hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9e152a2a8510826ba8ae2c65efbe3d72c2c9fd21 | 2,996 | ex | Elixir | clients/network_services/lib/google_api/network_services/v1/model/audit_config.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/network_services/lib/google_api/network_services/v1/model/audit_config.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/network_services/lib/google_api/network_services/v1/model/audit_config.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkServices.V1.Model.AuditConfig do
@moduledoc """
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:[email protected]" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:[email protected]" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts [email protected] from DATA_READ logging, and [email protected] from DATA_WRITE logging.
## Attributes
* `auditLogConfigs` (*type:* `list(GoogleApi.NetworkServices.V1.Model.AuditLogConfig.t)`, *default:* `nil`) - The configuration for logging of each type of permission.
* `service` (*type:* `String.t`, *default:* `nil`) - Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditLogConfigs => list(GoogleApi.NetworkServices.V1.Model.AuditLogConfig.t()) | nil,
:service => String.t() | nil
}
field(:auditLogConfigs, as: GoogleApi.NetworkServices.V1.Model.AuditLogConfig, type: :list)
field(:service)
end
defimpl Poison.Decoder, for: GoogleApi.NetworkServices.V1.Model.AuditConfig do
def decode(value, options) do
GoogleApi.NetworkServices.V1.Model.AuditConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.NetworkServices.V1.Model.AuditConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.92 | 1,106 | 0.748665 |
9e1554cf967b66b1e430e3146a1293bde6becd32 | 359 | ex | Elixir | lib/ua_inspector/util/short_code_map.ex | elixytics/ua_inspector | 11fd98f69b7853b70529ee73355ef57851248572 | [
"Apache-2.0"
] | 57 | 2015-04-07T03:10:45.000Z | 2019-03-11T01:01:40.000Z | lib/ua_inspector/util/short_code_map.ex | elixytics/ua_inspector | 11fd98f69b7853b70529ee73355ef57851248572 | [
"Apache-2.0"
] | 16 | 2015-03-09T19:56:17.000Z | 2019-03-16T14:24:02.000Z | lib/ua_inspector/util/short_code_map.ex | elixytics/ua_inspector | 11fd98f69b7853b70529ee73355ef57851248572 | [
"Apache-2.0"
] | 15 | 2015-02-02T23:14:00.000Z | 2019-03-16T13:15:05.000Z | defmodule UAInspector.Util.ShortCodeMap do
@moduledoc false
@doc """
Extracts the short version for an expanded short code.
"""
@spec to_short([{String.t(), String.t()}], String.t()) :: String.t()
def to_short([], long), do: long
def to_short([{short, long} | _], long), do: short
def to_short([_ | rest], long), do: to_short(rest, long)
end
| 29.916667 | 70 | 0.651811 |
9e1569edbb80aad2b83e65682cd6408b0a0282bb | 15,852 | ex | Elixir | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/api/debugger.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/api/debugger.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/api/debugger.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudDebugger.V2.Api.Debugger do
@moduledoc """
API calls for all endpoints tagged `Debugger`.
"""
alias GoogleApi.CloudDebugger.V2.Connection
import GoogleApi.CloudDebugger.V2.RequestBuilder
@doc """
Deletes the breakpoint from the debuggee.
## Parameters
- connection (GoogleApi.CloudDebugger.V2.Connection): Connection to server
- debuggee_id (String): ID of the debuggee whose breakpoint to delete.
- breakpoint_id (String): ID of the breakpoint to delete.
- opts (KeywordList): [optional] Optional parameters
- :pp (Boolean): Pretty-print response.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :bearer_token (String): OAuth bearer token.
- :upload_protocol (String): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :upload_type (String): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String): Selector specifying which fields to include in a partial response.
- :callback (String): JSONP
- :__/xgafv (String): V1 error format.
- :alt (String): Data format for response.
- :access_token (String): OAuth access token.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :client_version (String): The client version making the call. Schema: `domain/type/version` (e.g., `google.com/intellij/v1`).
## Returns
{:ok, %GoogleApi.CloudDebugger.V2.Model.Empty{}} on success
{:error, info} on failure
"""
@spec clouddebugger_debugger_debuggees_breakpoints_delete(Tesla.Env.client, String.t, String.t, keyword()) :: {:ok, GoogleApi.CloudDebugger.V2.Model.Empty.t} | {:error, Tesla.Env.t}
def clouddebugger_debugger_debuggees_breakpoints_delete(connection, debuggee_id, breakpoint_id, opts \\ []) do
optional_params = %{
:"pp" => :query,
:"oauth_token" => :query,
:"bearer_token" => :query,
:"upload_protocol" => :query,
:"prettyPrint" => :query,
:"uploadType" => :query,
:"fields" => :query,
:"callback" => :query,
:"$.xgafv" => :query,
:"alt" => :query,
:"access_token" => :query,
:"key" => :query,
:"quotaUser" => :query,
:"clientVersion" => :query
}
%{}
|> method(:delete)
|> url("/v2/debugger/debuggees/#{debuggee_id}/breakpoints/#{breakpoint_id}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.CloudDebugger.V2.Model.Empty{})
end
@doc """
Gets breakpoint information.
## Parameters
- connection (GoogleApi.CloudDebugger.V2.Connection): Connection to server
- debuggee_id (String): ID of the debuggee whose breakpoint to get.
- breakpoint_id (String): ID of the breakpoint to get.
- opts (KeywordList): [optional] Optional parameters
- :pp (Boolean): Pretty-print response.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :bearer_token (String): OAuth bearer token.
- :upload_protocol (String): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :upload_type (String): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String): Selector specifying which fields to include in a partial response.
- :callback (String): JSONP
- :__/xgafv (String): V1 error format.
- :alt (String): Data format for response.
- :access_token (String): OAuth access token.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :client_version (String): The client version making the call. Schema: `domain/type/version` (e.g., `google.com/intellij/v1`).
## Returns
{:ok, %GoogleApi.CloudDebugger.V2.Model.GetBreakpointResponse{}} on success
{:error, info} on failure
"""
@spec clouddebugger_debugger_debuggees_breakpoints_get(Tesla.Env.client, String.t, String.t, keyword()) :: {:ok, GoogleApi.CloudDebugger.V2.Model.GetBreakpointResponse.t} | {:error, Tesla.Env.t}
def clouddebugger_debugger_debuggees_breakpoints_get(connection, debuggee_id, breakpoint_id, opts \\ []) do
optional_params = %{
:"pp" => :query,
:"oauth_token" => :query,
:"bearer_token" => :query,
:"upload_protocol" => :query,
:"prettyPrint" => :query,
:"uploadType" => :query,
:"fields" => :query,
:"callback" => :query,
:"$.xgafv" => :query,
:"alt" => :query,
:"access_token" => :query,
:"key" => :query,
:"quotaUser" => :query,
:"clientVersion" => :query
}
%{}
|> method(:get)
|> url("/v2/debugger/debuggees/#{debuggee_id}/breakpoints/#{breakpoint_id}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.CloudDebugger.V2.Model.GetBreakpointResponse{})
end
@doc """
Lists all breakpoints for the debuggee.
## Parameters
- connection (GoogleApi.CloudDebugger.V2.Connection): Connection to server
- debuggee_id (String): ID of the debuggee whose breakpoints to list.
- opts (KeywordList): [optional] Optional parameters
- :pp (Boolean): Pretty-print response.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :bearer_token (String): OAuth bearer token.
- :upload_protocol (String): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :upload_type (String): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String): Selector specifying which fields to include in a partial response.
- :callback (String): JSONP
- :__/xgafv (String): V1 error format.
- :alt (String): Data format for response.
- :access_token (String): OAuth access token.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :wait_token (String): A wait token that, if specified, blocks the call until the breakpoints list has changed, or a server selected timeout has expired. The value should be set from the last response. The error code `google.rpc.Code.ABORTED` (RPC) is returned on wait timeout, which should be called again with the same `wait_token`.
- :client_version (String): The client version making the call. Schema: `domain/type/version` (e.g., `google.com/intellij/v1`).
- :action/value (String): Only breakpoints with the specified action will pass the filter.
- :include_inactive (Boolean): When set to `true`, the response includes active and inactive breakpoints. Otherwise, it includes only active breakpoints.
- :include_all_users (Boolean): When set to `true`, the response includes the list of breakpoints set by any user. Otherwise, it includes only breakpoints set by the caller.
- :strip_results (Boolean): This field is deprecated. The following fields are always stripped out of the result: `stack_frames`, `evaluated_expressions` and `variable_table`.
## Returns
{:ok, %GoogleApi.CloudDebugger.V2.Model.ListBreakpointsResponse{}} on success
{:error, info} on failure
"""
@spec clouddebugger_debugger_debuggees_breakpoints_list(Tesla.Env.client, String.t, keyword()) :: {:ok, GoogleApi.CloudDebugger.V2.Model.ListBreakpointsResponse.t} | {:error, Tesla.Env.t}
def clouddebugger_debugger_debuggees_breakpoints_list(connection, debuggee_id, opts \\ []) do
optional_params = %{
:"pp" => :query,
:"oauth_token" => :query,
:"bearer_token" => :query,
:"upload_protocol" => :query,
:"prettyPrint" => :query,
:"uploadType" => :query,
:"fields" => :query,
:"callback" => :query,
:"$.xgafv" => :query,
:"alt" => :query,
:"access_token" => :query,
:"key" => :query,
:"quotaUser" => :query,
:"waitToken" => :query,
:"clientVersion" => :query,
:"action.value" => :query,
:"includeInactive" => :query,
:"includeAllUsers" => :query,
:"stripResults" => :query
}
%{}
|> method(:get)
|> url("/v2/debugger/debuggees/#{debuggee_id}/breakpoints")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.CloudDebugger.V2.Model.ListBreakpointsResponse{})
end
@doc """
Sets the breakpoint to the debuggee.
## Parameters
- connection (GoogleApi.CloudDebugger.V2.Connection): Connection to server
- debuggee_id (String): ID of the debuggee where the breakpoint is to be set.
- opts (KeywordList): [optional] Optional parameters
- :pp (Boolean): Pretty-print response.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :bearer_token (String): OAuth bearer token.
- :upload_protocol (String): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :upload_type (String): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String): Selector specifying which fields to include in a partial response.
- :callback (String): JSONP
- :__/xgafv (String): V1 error format.
- :alt (String): Data format for response.
- :access_token (String): OAuth access token.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :client_version (String): The client version making the call. Schema: `domain/type/version` (e.g., `google.com/intellij/v1`).
- :body (Breakpoint):
## Returns
{:ok, %GoogleApi.CloudDebugger.V2.Model.SetBreakpointResponse{}} on success
{:error, info} on failure
"""
@spec clouddebugger_debugger_debuggees_breakpoints_set(Tesla.Env.client, String.t, keyword()) :: {:ok, GoogleApi.CloudDebugger.V2.Model.SetBreakpointResponse.t} | {:error, Tesla.Env.t}
def clouddebugger_debugger_debuggees_breakpoints_set(connection, debuggee_id, opts \\ []) do
optional_params = %{
:"pp" => :query,
:"oauth_token" => :query,
:"bearer_token" => :query,
:"upload_protocol" => :query,
:"prettyPrint" => :query,
:"uploadType" => :query,
:"fields" => :query,
:"callback" => :query,
:"$.xgafv" => :query,
:"alt" => :query,
:"access_token" => :query,
:"key" => :query,
:"quotaUser" => :query,
:"clientVersion" => :query,
:"body" => :body
}
%{}
|> method(:post)
|> url("/v2/debugger/debuggees/#{debuggee_id}/breakpoints/set")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.CloudDebugger.V2.Model.SetBreakpointResponse{})
end
@doc """
Lists all the debuggees that the user has access to.
## Parameters
- connection (GoogleApi.CloudDebugger.V2.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :pp (Boolean): Pretty-print response.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :bearer_token (String): OAuth bearer token.
- :upload_protocol (String): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :upload_type (String): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String): Selector specifying which fields to include in a partial response.
- :callback (String): JSONP
- :__/xgafv (String): V1 error format.
- :alt (String): Data format for response.
- :access_token (String): OAuth access token.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :project (String): Project number of a Google Cloud project whose debuggees to list.
- :client_version (String): The client version making the call. Schema: `domain/type/version` (e.g., `google.com/intellij/v1`).
- :include_inactive (Boolean): When set to `true`, the result includes all debuggees. Otherwise, the result includes only debuggees that are active.
## Returns
{:ok, %GoogleApi.CloudDebugger.V2.Model.ListDebuggeesResponse{}} on success
{:error, info} on failure
"""
@spec clouddebugger_debugger_debuggees_list(Tesla.Env.client, keyword()) :: {:ok, GoogleApi.CloudDebugger.V2.Model.ListDebuggeesResponse.t} | {:error, Tesla.Env.t}
def clouddebugger_debugger_debuggees_list(connection, opts \\ []) do
optional_params = %{
:"pp" => :query,
:"oauth_token" => :query,
:"bearer_token" => :query,
:"upload_protocol" => :query,
:"prettyPrint" => :query,
:"uploadType" => :query,
:"fields" => :query,
:"callback" => :query,
:"$.xgafv" => :query,
:"alt" => :query,
:"access_token" => :query,
:"key" => :query,
:"quotaUser" => :query,
:"project" => :query,
:"clientVersion" => :query,
:"includeInactive" => :query
}
%{}
|> method(:get)
|> url("/v2/debugger/debuggees")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.CloudDebugger.V2.Model.ListDebuggeesResponse{})
end
end
| 49.383178 | 360 | 0.677517 |
9e159b0273bcdb31db4ed618c18f132da00329f4 | 3,357 | ex | Elixir | api/lib/statistics/valuelists_analyzer.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | api/lib/statistics/valuelists_analyzer.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | api/lib/statistics/valuelists_analyzer.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | defmodule Api.Statistics.ValuelistsAnalyzer do
alias Api.Core.Config
def find_overlapping_valuelists(valuelists, used_values_only) do
Enum.reduce(Config.get(:projects), %{}, fn project_name, result ->
Map.put(result, project_name, find_overlapping_valuelists(valuelists, project_name, used_values_only))
end)
end
def find_overlapping_valuelists(valuelists, project_name, used_values_only) do
Enum.reduce(valuelists, %{}, &add_to_overlapping_info(&1, &2, project_name, valuelists, used_values_only))
|> Enum.into(%{})
end
def get_number_of_configured_valuelists(valuelists, used_valuelists_only, non_default_only) do
Enum.reduce(Config.get(:projects), %{}, fn project_name, result ->
Map.put(result, project_name, get_number_of_configured_valuelists(
valuelists, project_name, used_valuelists_only, non_default_only
))
end)
end
def get_number_of_configured_valuelists(valuelists, project_name, used_valuelists_only, non_default_only) do
Enum.filter(valuelists, fn { valuelist_name, valuelist } ->
Map.has_key?(valuelist.total, project_name)
&& (!used_valuelists_only || valuelist.total[project_name] > 0)
&& (!non_default_only || !String.contains?(valuelist_name, "default"))
end)
|> length
end
def get_number_of_valuelists(valuelists, non_default_only) do
Enum.filter(valuelists, fn { valuelist_name, _ } ->
!non_default_only || !String.contains?(valuelist_name, "default")
end)
|> length
end
def get_shared_valuelists_names(valuelists, non_default_only) do
Enum.filter(valuelists, fn { valuelist_name, valuelist } ->
(!non_default_only || !String.contains?(valuelist_name, "default"))
&& length(Map.keys(valuelist.total)) > 1
end)
|> Enum.map(fn { valuelist_name, _ } -> valuelist_name end)
end
defp add_to_overlapping_info({ valuelist_name, valuelist }, overlapping_info, project_name, valuelists,
used_values_only) do
overlapping = find_overlapping_valuelists(valuelist_name, valuelist, project_name, valuelists, used_values_only)
if length(overlapping) > 0 do
Map.put(overlapping_info, valuelist_name, overlapping)
else
overlapping_info
end
end
defp find_overlapping_valuelists(valuelist_name, valuelist, project_name, valuelists, used_values_only) do
values_to_check = get_values_to_check(valuelist, project_name, used_values_only)
if (length(values_to_check) > 0) do
Enum.filter(valuelists, fn { name, valuelist } ->
name != valuelist_name && contains_values(valuelist, values_to_check)
end)
|> Enum.map(fn { valuelist_name, _ } -> valuelist_name end)
else
[]
end
end
defp get_values_to_check(valuelist, project_name, used_values_only) do
if used_values_only do
get_used_values(valuelist, project_name)
else
if Enum.member?(Map.keys(valuelist.total), project_name), do: Map.keys(valuelist.values), else: []
end
end
defp get_used_values(%{ values: values }, project_name) do
Enum.filter(values, fn { _, counts } ->
Map.has_key?(counts, project_name) && counts[project_name] > 0
end)
|> Enum.map(fn { value_name, _ } -> value_name end)
end
defp contains_values(valuelist, values) do
length(values -- Map.keys(valuelist.values)) == 0
end
end
| 39.034884 | 116 | 0.722073 |
9e15a68a4e39b3ef01e22411b7cfe875da43039d | 365 | exs | Elixir | priv/repo/seeds.exs | cogini/elixir-deploy-template | eebe9e335ebbc54ef9388faf50c6fd64936a85aa | [
"MIT"
] | 82 | 2018-01-08T16:57:13.000Z | 2021-12-25T07:34:21.000Z | priv/repo/seeds.exs | cogini/elixir-deploy-template | eebe9e335ebbc54ef9388faf50c6fd64936a85aa | [
"MIT"
] | 8 | 2018-05-21T10:31:27.000Z | 2018-11-25T07:17:56.000Z | priv/repo/seeds.exs | cogini/elixir-deploy-template | eebe9e335ebbc54ef9388faf50c6fd64936a85aa | [
"MIT"
] | 15 | 2018-05-21T10:18:16.000Z | 2021-03-30T17:14:40.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# DeployTemplate.Repo.insert!(%DeployTemplate.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 30.416667 | 63 | 0.717808 |
9e15aa83f1e3ce3a47c176fe0bada5794fa6e9bb | 2,890 | ex | Elixir | clients/games_configuration/lib/google_api/games_configuration/v1configuration/model/leaderboard_configuration_detail.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/games_configuration/lib/google_api/games_configuration/v1configuration/model/leaderboard_configuration_detail.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/games_configuration/lib/google_api/games_configuration/v1configuration/model/leaderboard_configuration_detail.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail do
@moduledoc """
This is a JSON template for a leaderboard configuration detail.
## Attributes
* `iconUrl` (*type:* `String.t`, *default:* `nil`) - The icon url of this leaderboard. Writes to this field are ignored.
* `kind` (*type:* `String.t`, *default:* `gamesConfiguration#leaderboardConfigurationDetail`) - Uniquely identifies the type of this resource. Value is always the fixed string gamesConfiguration#leaderboardConfigurationDetail.
* `name` (*type:* `GoogleApi.GamesConfiguration.V1configuration.Model.LocalizedStringBundle.t`, *default:* `nil`) - Localized strings for the leaderboard name.
* `scoreFormat` (*type:* `GoogleApi.GamesConfiguration.V1configuration.Model.GamesNumberFormatConfiguration.t`, *default:* `nil`) - The score formatting for the leaderboard.
* `sortRank` (*type:* `integer()`, *default:* `nil`) - The sort rank of this leaderboard. Writes to this field are ignored.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:iconUrl => String.t(),
:kind => String.t(),
:name => GoogleApi.GamesConfiguration.V1configuration.Model.LocalizedStringBundle.t(),
:scoreFormat =>
GoogleApi.GamesConfiguration.V1configuration.Model.GamesNumberFormatConfiguration.t(),
:sortRank => integer()
}
field(:iconUrl)
field(:kind)
field(:name, as: GoogleApi.GamesConfiguration.V1configuration.Model.LocalizedStringBundle)
field(
:scoreFormat,
as: GoogleApi.GamesConfiguration.V1configuration.Model.GamesNumberFormatConfiguration
)
field(:sortRank)
end
defimpl Poison.Decoder,
for: GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail do
def decode(value, options) do
GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.GamesConfiguration.V1configuration.Model.LeaderboardConfigurationDetail do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.285714 | 230 | 0.746713 |
9e162341a29a7635d0183336165cabc067982dcb | 1,804 | ex | Elixir | lib/mix/noap/gen_code/wsdl_wrap/namespace_util.ex | bpardee/noap | 4c21c55ef2d88ad26a9fab94805beff954315dbf | [
"MIT"
] | 1 | 2022-03-04T03:55:46.000Z | 2022-03-04T03:55:46.000Z | lib/mix/noap/gen_code/wsdl_wrap/namespace_util.ex | bpardee/noap | 4c21c55ef2d88ad26a9fab94805beff954315dbf | [
"MIT"
] | null | null | null | lib/mix/noap/gen_code/wsdl_wrap/namespace_util.ex | bpardee/noap | 4c21c55ef2d88ad26a9fab94805beff954315dbf | [
"MIT"
] | null | null | null | defmodule Mix.Noap.GenCode.WSDLWrap.NamespaceUtil do
import SweetXml, only: [add_namespace: 3]
def add_schema_namespace(xpath, prefix) do
add_namespace(xpath, prefix, "http://www.w3.org/2001/XMLSchema")
end
def add_protocol_namespace(xpath, prefix) do
add_namespace(xpath, prefix, "http://schemas.xmlsoap.org/wsdl/")
end
# @spec get_soap_namespace(String.t(), list()) :: String.t()
# defp get_soap_namespace(doc, opts) when is_list(opts) do
# version = soap_version(opts)
# url = @soap_version_namespaces[version]
# Noap.XML.find_namespace(doc, url)
# end
def add_soap_namespace(xpath, prefix) do
add_namespace(xpath, prefix, "http://schemas.xmlsoap.org/wsdl/soap/")
end
# @spec get_namespaces(String.t(), String.t(), String.t()) :: map()
# defp get_namespaces(doc, schema_namespace, protocol_ns) do
# doc
# |> xpath(~x"//#{ns("definitions", protocol_ns)}/namespace::*"l)
# |> Enum.into(%{}, &get_namespace(&1, doc, schema_namespace, protocol_ns))
# end
# @spec get_namespace(map(), String.t(), String.t(), String.t()) :: tuple()
# defp get_namespace(namespaces_node, doc, schema_namespace, protocol_ns) do
# {_, _, _, key, value} = namespaces_node
# string_key = key |> to_string
# value = Atom.to_string(value)
# cond do
# xpath(doc, ~x"//#{ns("definitions", protocol_ns)}[@targetNamespace='#{value}']") ->
# {string_key, %{value: value, type: :wsdl}}
# xpath(
# doc,
# ~x"//#{ns("types", protocol_ns)}/#{ns("schema", schema_namespace)}/#{
# ns("import", schema_namespace)
# }[@namespace='#{value}']"
# ) ->
# {string_key, %{value: value, type: :xsd}}
# true ->
# {string_key, %{value: value, type: :soap}}
# end
# end
end
| 34.037736 | 91 | 0.628049 |
9e1634260e99a7b97033c3095c060714180251b6 | 3,261 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/creative_group.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/creative_group.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/creative_group.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.CreativeGroup do
@moduledoc """
Contains properties of a creative group.
## Attributes
* `accountId` (*type:* `String.t`, *default:* `nil`) - Account ID of this creative group. This is a read-only field that can be left blank.
* `advertiserId` (*type:* `String.t`, *default:* `nil`) - Advertiser ID of this creative group. This is a required field on insertion.
* `advertiserIdDimensionValue` (*type:* `GoogleApi.DFAReporting.V33.Model.DimensionValue.t`, *default:* `nil`) - Dimension value for the ID of the advertiser. This is a read-only, auto-generated field.
* `groupNumber` (*type:* `integer()`, *default:* `nil`) - Subgroup of the creative group. Assign your creative groups to a subgroup in order to filter or manage them more easily. This field is required on insertion and is read-only after insertion. Acceptable values are 1 to 2, inclusive.
* `id` (*type:* `String.t`, *default:* `nil`) - ID of this creative group. This is a read-only, auto-generated field.
* `kind` (*type:* `String.t`, *default:* `dfareporting#creativeGroup`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#creativeGroup".
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this creative group. This is a required field and must be less than 256 characters long and unique among creative groups of the same advertiser.
* `subaccountId` (*type:* `String.t`, *default:* `nil`) - Subaccount ID of this creative group. This is a read-only field that can be left blank.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => String.t(),
:advertiserId => String.t(),
:advertiserIdDimensionValue => GoogleApi.DFAReporting.V33.Model.DimensionValue.t(),
:groupNumber => integer(),
:id => String.t(),
:kind => String.t(),
:name => String.t(),
:subaccountId => String.t()
}
field(:accountId)
field(:advertiserId)
field(:advertiserIdDimensionValue, as: GoogleApi.DFAReporting.V33.Model.DimensionValue)
field(:groupNumber)
field(:id)
field(:kind)
field(:name)
field(:subaccountId)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.CreativeGroup do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.CreativeGroup.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.CreativeGroup do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.955882 | 293 | 0.709905 |
9e1651e9e07666db0295f2db4bc597347f04072f | 353 | ex | Elixir | lib/elixir_lokalise_api/models/screenshot.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 3 | 2021-06-24T14:30:31.000Z | 2021-09-06T11:30:17.000Z | lib/elixir_lokalise_api/models/screenshot.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 8 | 2021-09-15T07:30:59.000Z | 2022-02-01T17:40:17.000Z | lib/elixir_lokalise_api/models/screenshot.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 2 | 2021-09-07T11:10:51.000Z | 2021-09-26T07:37:39.000Z | defmodule ElixirLokaliseApi.Model.Screenshot do
@moduledoc false
defstruct screenshot_id: nil,
key_ids: [],
url: nil,
title: nil,
description: nil,
screenshot_tags: [],
width: nil,
height: nil,
created_at: nil,
created_at_timestamp: nil
end
| 25.214286 | 47 | 0.532578 |
9e166b8d17b3876460d16762fcdabf6bea04a9fb | 214 | exs | Elixir | apps/feedback/config/prod.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/feedback/config/prod.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/feedback/config/prod.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | use Mix.Config
config :feedback,
support_ticket_to_email: "${SUPPORT_TICKET_TO_EMAIL}",
support_ticket_from_email: "${SUPPORT_TICKET_FROM_EMAIL}",
support_ticket_reply_email: "${SUPPORT_TICKET_REPLY_EMAIL}"
| 30.571429 | 61 | 0.813084 |
9e1695862623ab485985e53d7045295f0db36aab | 1,132 | ex | Elixir | lib/printing.ex | adolfont/tableau | 2912685c718f00291430acff46948c4d9b463130 | [
"MIT"
] | 3 | 2020-02-29T22:43:52.000Z | 2021-02-18T13:38:45.000Z | lib/printing.ex | adolfont/tableau | 2912685c718f00291430acff46948c4d9b463130 | [
"MIT"
] | null | null | null | lib/printing.ex | adolfont/tableau | 2912685c718f00291430acff46948c4d9b463130 | [
"MIT"
] | null | null | null | defmodule Printing do
def show_proof(proof = %Proof{branches: []}) do
["FORMULAS: ", Enum.map(proof.formulas, &show_signed_formula(&1)), "STATUS: #{proof.status}"]
# show_proof_branches(proof.branches)
end
def show_proof(proof = %Proof{}) do
[
:formulas,
Enum.map(proof.formulas, &show_signed_formula(&1)),
"STATUS: #{proof.status}",
:branches,
show_proof(hd(proof.branches)),
show_proof(hd(tl(proof.branches)))
]
end
def show_signed_formula({sign, formula}) do
"#{sign_as_string(sign)} #{formula_as_string(formula)}"
end
defp sign_as_string(sign) do
sign
|> Atom.to_string()
|> String.upcase()
end
defp formula_as_string(atom) when is_atom(atom), do: Atom.to_string(atom)
defp formula_as_string({:not, f}), do: "!#{formula_as_string(f)}"
defp formula_as_string({f, :and, g}), do: "(#{formula_as_string(f)}&#{formula_as_string(g)})"
defp formula_as_string({f, :or, g}), do: "(#{formula_as_string(f)}|#{formula_as_string(g)})"
defp formula_as_string({f, :implies, g}),
do: "(#{formula_as_string(f)}->#{formula_as_string(g)})"
end
| 31.444444 | 97 | 0.65636 |
9e16a16b2e529d92ed4c2decb50dd7452d36df4e | 1,306 | exs | Elixir | test/phoenix/live_dashboard/live/request_logger_live_test.exs | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | 1 | 2020-04-23T11:36:03.000Z | 2020-04-23T11:36:03.000Z | test/phoenix/live_dashboard/live/request_logger_live_test.exs | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | test/phoenix/live_dashboard/live/request_logger_live_test.exs | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | 1 | 2021-02-04T03:06:20.000Z | 2021-02-04T03:06:20.000Z | defmodule Phoenix.LiveDashboard.RequestLoggerLiveTest do
use ExUnit.Case, async: true
require Logger
import Phoenix.ConnTest
import Phoenix.LiveViewTest
alias Phoenix.LiveDashboard.RequestLogger
alias Phoenix.LiveDashboardTest.PubSub
@endpoint Phoenix.LiveDashboardTest.Endpoint
test "redirects to stream" do
{:error, {:live_redirect, %{to: "/dashboard/nonode%40nohost/request_logger/" <> _}}} =
live(build_conn(), "/dashboard/nonode@nohost/request_logger")
end
@tag :capture_log
test "receives log messages on stream" do
{:ok, live, _} = live(build_conn(), "/dashboard/nonode@nohost/request_logger/sample")
assert render(live) =~ "request_logger_param_key="
assert render(live) =~ "Enable cookie"
Logger.error("hello world", logger_pubsub_backend: {PubSub, RequestLogger.topic("sample")})
Logger.flush()
# Guarantees the message above has been processed
_ = render(live)
# Guarantees the stream has arrived
assert render(live) =~ ~s|[error] hello world\n</pre>|
end
test "redirects to new node" do
{:ok, live, _} = live(build_conn(), "/dashboard/nonode@nohost/request_logger/helloworld")
send(live.pid, {:node_redirect, "foo@bar"})
assert_redirect(live, "/dashboard/foo%40bar/request_logger/helloworld")
end
end
| 34.368421 | 95 | 0.723583 |
9e16b4ad2a10fd9e5df8e0760b7509db7cce3b88 | 261 | ex | Elixir | lib/phx_crud_users.ex | LeonardoSSev/phx-crud-users | 52dabaae0c81adeee39afa48eb17331de261d3c4 | [
"MIT"
] | null | null | null | lib/phx_crud_users.ex | LeonardoSSev/phx-crud-users | 52dabaae0c81adeee39afa48eb17331de261d3c4 | [
"MIT"
] | null | null | null | lib/phx_crud_users.ex | LeonardoSSev/phx-crud-users | 52dabaae0c81adeee39afa48eb17331de261d3c4 | [
"MIT"
] | null | null | null | defmodule PhxCrudUsers do
@moduledoc """
PhxCrudUsers keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 26.1 | 66 | 0.762452 |
9e16e759c531827e7da09ad99752a5d57ab16c8c | 3,071 | ex | Elixir | lib/ecto/query/builder/group_by.ex | Anber/ecto | 2b903c8c6acb924f87746fe4d40cb4b42a7f0491 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder/group_by.ex | Anber/ecto | 2b903c8c6acb924f87746fe4d40cb4b42a7f0491 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/builder/group_by.ex | Anber/ecto | 2b903c8c6acb924f87746fe4d40cb4b42a7f0491 | [
"Apache-2.0"
] | null | null | null | import Kernel, except: [apply: 2]
defmodule Ecto.Query.Builder.GroupBy do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Escapes a list of quoted expressions.
See `Ecto.Builder.escape/2`.
iex> escape(:group_by, quote do [x.x, 13] end, {%{}, :acc}, [x: 0], __ENV__)
{[{:{}, [], [{:{}, [], [:., [], [{:{}, [], [:&, [], [0]]}, :x]]}, [], []]},
13],
{%{}, :acc}}
"""
@spec escape(:group_by | :partition_by, {map, term}, Macro.t, Keyword.t, Macro.Env.t) ::
{Macro.t, {map, term}}
def escape(kind, {:^, _, [expr]}, params_acc, _vars, _env) do
{quote(do: Ecto.Query.Builder.GroupBy.group_by!(unquote(kind), unquote(expr))), params_acc}
end
def escape(kind, expr, params_acc, vars, env) do
expr
|> List.wrap
|> Enum.map_reduce(params_acc, &do_escape(&1, &2, kind, vars, env))
end
defp do_escape({:^, _, [expr]}, params_acc, kind, _vars, _env) do
{quote(do: Ecto.Query.Builder.GroupBy.field!(unquote(kind), unquote(expr))), params_acc}
end
defp do_escape(field, params_acc, _kind, _vars, _env) when is_atom(field) do
{Macro.escape(to_field(field)), params_acc}
end
defp do_escape(expr, params_acc, _kind, vars, env) do
Builder.escape(expr, :any, params_acc, vars, env)
end
@doc """
Called at runtime to verify a field.
"""
def field!(_kind, field) when is_atom(field),
do: to_field(field)
def field!(kind, other) do
raise ArgumentError,
"expected a field as an atom in `#{kind}`, got: `#{inspect other}`"
end
@doc """
Called at runtime to verify group_by.
"""
def group_by!(kind, group_by) do
Enum.map List.wrap(group_by), fn
field when is_atom(field) ->
to_field(field)
_ ->
raise ArgumentError,
"expected a list of fields in `#{kind}`, got: `#{inspect group_by}`"
end
end
defp to_field(field), do: {{:., [], [{:&, [], [0]}, field]}, [], []}
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, [Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(query, binding, expr, env) do
{query, binding} = Builder.escape_binding(query, binding, env)
{expr, {params, _}} = escape(:group_by, expr, {%{}, :acc}, binding, env)
params = Builder.escape_params(params)
group_by = quote do: %Ecto.Query.QueryExpr{
expr: unquote(expr),
params: unquote(params),
file: unquote(env.file),
line: unquote(env.line)}
Builder.apply_query(query, __MODULE__, [group_by], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, term) :: Ecto.Query.t
def apply(%Ecto.Query{group_bys: group_bys} = query, expr) do
%{query | group_bys: group_bys ++ [expr]}
end
def apply(query, expr) do
apply(Ecto.Queryable.to_query(query), expr)
end
end
| 31.020202 | 95 | 0.601107 |
9e16eb0f191ea8c19682e5821a7adb974115ff85 | 754 | exs | Elixir | mix.exs | Ruin0x11/elixir-tmdb | d2ab828bb4473c93c11037d1a65064dce8de245e | [
"MIT"
] | 13 | 2016-07-30T05:10:50.000Z | 2019-04-25T05:08:11.000Z | mix.exs | Ruin0x11/elixir-tmdb | d2ab828bb4473c93c11037d1a65064dce8de245e | [
"MIT"
] | 3 | 2016-08-04T00:44:01.000Z | 2017-07-18T17:27:05.000Z | mix.exs | Ruin0x11/elixir-tmdb | d2ab828bb4473c93c11037d1a65064dce8de245e | [
"MIT"
] | 12 | 2016-07-30T23:59:26.000Z | 2021-06-25T05:53:57.000Z | defmodule Tmdb.Mixfile do
use Mix.Project
def project do
[app: :tmdb,
version: "0.1.1",
elixir: "~> 1.2",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger, :httpoison]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:httpoison, "~> 0.8"},
{:poison, "~> 1.5 or ~> 2.0 or ~> 3.0"}
]
end
end
| 20.944444 | 77 | 0.578249 |
9e16eb8ead671013bb76cf78bd855386834b8756 | 3,093 | exs | Elixir | test/redis_unique_queue_test.exs | mvalitov/elixir-redis-unique-queue | 872c2736c51adeec949900ec94cd06c1c5c11d95 | [
"MIT"
] | 6 | 2017-05-21T11:43:59.000Z | 2020-12-09T09:46:11.000Z | test/redis_unique_queue_test.exs | mvalitov/elixir-redis-unique-queue | 872c2736c51adeec949900ec94cd06c1c5c11d95 | [
"MIT"
] | null | null | null | test/redis_unique_queue_test.exs | mvalitov/elixir-redis-unique-queue | 872c2736c51adeec949900ec94cd06c1c5c11d95 | [
"MIT"
] | null | null | null | defmodule RedisUniqueQueueTest do
use ExUnit.Case
# doctest RedisUniqueQueue
@redis_config Application.get_env(:redis_unique_queue, :redis)[:config]
setup do
{:ok, queue} = RedisUniqueQueue.create("test_queue", @redis_config)
RedisUniqueQueue.clear(queue)
{:ok, queue: queue}
end
test "return argument error if name not is_bitstring" do
assert {:error, "argument error"} == RedisUniqueQueue.create('qwerty', @redis_config)
end
test "return error if name empty" do
assert {:error, "name is empty"} == RedisUniqueQueue.create("", @redis_config)
end
test "test push and pop", %{queue: queue} do
RedisUniqueQueue.push(queue, "test")
RedisUniqueQueue.push(queue, "test2")
assert {:ok, ["test"]} == RedisUniqueQueue.pop(queue)
end
test "test push_multi and pop_multi", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2"])
assert {:ok, ["test", "test2"]} == RedisUniqueQueue.pop_multi(queue, 2)
end
test "pop all values", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2", "test3"])
assert {:ok, ["test", "test2", "test3"]} == RedisUniqueQueue.pop_all(queue)
end
test "get front and back value", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2", "test3"])
{:ok, front} = RedisUniqueQueue.front(queue)
{:ok, back} = RedisUniqueQueue.back(queue)
assert front == ["test"] && back == ["test3"]
end
test "test unique and size", %{queue: queue} do
RedisUniqueQueue.push(queue, "test")
RedisUniqueQueue.push(queue, "test")
assert {:ok, 1} == RedisUniqueQueue.size(queue)
end
test "test remove and remove by index", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2", "test3"])
RedisUniqueQueue.remove(queue, "test2")
{:ok, remove} = RedisUniqueQueue.all(queue)
RedisUniqueQueue.remove_item_by_index(queue, 1)
{:ok, remove_by_index} = RedisUniqueQueue.all(queue)
assert remove == ["test", "test3"] && remove_by_index == ["test"]
end
test "test include?", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2", "test3"])
{:ok, tr} = RedisUniqueQueue.include?(queue, "test2")
{:ok, fl} = RedisUniqueQueue.include?(queue, "no")
assert tr == true && fl == false
end
test "test peek", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2", "test3"])
{:ok, res} = RedisUniqueQueue.peek(queue, 1, 2)
assert res == ["test2", "test3"]
end
test "test expire", %{queue: queue} do
RedisUniqueQueue.push_multi(queue, ["test", "test2", "test3"])
{:ok, size} = RedisUniqueQueue.size(queue)
RedisUniqueQueue.expire(queue, 1)
:timer.sleep(2000)
{:ok, exp} = RedisUniqueQueue.size(queue)
assert size == 3 && exp == 0
end
test "test queue clear", %{queue: queue} do
RedisUniqueQueue.push(queue, "test")
{:ok, size} = RedisUniqueQueue.size(queue)
RedisUniqueQueue.clear(queue)
{:ok, new_size} = RedisUniqueQueue.size(queue)
assert size == 1 && new_size == 0
end
end
| 34.366667 | 89 | 0.661494 |
9e16f0cd50bb9e944af818473593aea87e30dab7 | 1,134 | exs | Elixir | test/core_words_message_test.exs | alexiob/forthvm | 4dc75790ef98e248ac48a6f4116ad7673cee6287 | [
"MIT"
] | 8 | 2021-11-19T14:02:01.000Z | 2022-03-09T06:29:33.000Z | test/core_words_message_test.exs | alexiob/forthvm | 4dc75790ef98e248ac48a6f4116ad7673cee6287 | [
"MIT"
] | null | null | null | test/core_words_message_test.exs | alexiob/forthvm | 4dc75790ef98e248ac48a6f4116ad7673cee6287 | [
"MIT"
] | 1 | 2021-11-26T18:51:31.000Z | 2021-11-26T18:51:31.000Z | defmodule ForthVM.ProcessWordMessageTest do
@moduledoc false
use ExUnit.Case
import ExUnit.CaptureIO
@test_command "two one 42 :puts 1 send"
@test_message {"puts", [42, "one", "two"]}
test "message should be received by target process" do
assert capture_io(fn ->
start_supervised({ForthVM.Supervisor, num_cores: 2})
core_pid = ForthVM.core_pid(1)
:erlang.trace(core_pid, true, [:receive])
ForthVM.execute(1, 1, @test_command)
# wait for the send message to be received
assert_received(
{:trace, ^core_pid, :receive, {:"$gen_cast", {:send_message, 1, @test_message}}}
)
%{processes: [process | _]} = :sys.get_state(core_pid)
{_tokens, _data_stack, _return_stack, _dictionary, %{messages: messages} = _meta} =
process.context
assert [@test_message] == messages
# we wait for some output to be generated by the IO handler
assert_receive({:trace, ^core_pid, :receive, {:io_reply, _, :ok}})
end) == "42\n"
end
end
| 31.5 | 96 | 0.592593 |
9e172351775ccbcd75a5deb18e31152e273197b6 | 1,789 | ex | Elixir | platform/target/info_workers/wifi_level.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | platform/target/info_workers/wifi_level.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | platform/target/info_workers/wifi_level.ex | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | defmodule FarmbotOS.Platform.Target.InfoWorker.WifiLevel do
@moduledoc """
Worker process responsible for reporting current wifi
power levels to the bot_state server
"""
@report_interval 7_000
use GenServer
alias FarmbotOS.BotState
@doc false
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
@impl GenServer
def init(_args) do
send(self(), :load_network_config)
{:ok, %{ssid: nil}}
end
@impl GenServer
def handle_info(:timeout, state) do
maybe_report_wifi(VintageNet.ioctl("wlan0", :signal_poll))
{:noreply, state, @report_interval}
end
def handle_info(:load_network_config, state) do
if FarmbotOS.Config.get_network_config("eth0") do
VintageNet.subscribe(["interface", "eth0"])
{:noreply, state}
else
case FarmbotOS.Config.get_network_config("wlan0") do
%{ssid: ssid} ->
VintageNet.subscribe(["interface", "wlan0"])
{:noreply, %{state | ssid: ssid}, @report_interval}
nil ->
Process.send_after(self(), :load_network_config, 10_000)
{:noreply, %{state | ssid: nil}}
end
end
end
def handle_info(
{VintageNet, ["interface", _, "addresses"], _old,
[%{address: address} | _], _meta},
state
) do
FarmbotOS.BotState.set_private_ip(to_string(:inet.ntoa(address)))
{:noreply, state, @report_interval}
end
def handle_info({VintageNet, _property, _old, _new, _meta}, state) do
{:noreply, state, @report_interval}
end
def maybe_report_wifi({:ok, signal_info} = result) do
:ok = BotState.report_wifi_level(signal_info.signal_dbm)
:ok = BotState.report_wifi_level_percent(signal_info.signal_percent)
result
end
def maybe_report_wifi(other), do: other
end
| 26.308824 | 72 | 0.672443 |
9e17309146eeeeb9659c53f8a53f87abfe5d9dd4 | 1,467 | exs | Elixir | mix.exs | vic/comeonin_ecto_password | 1bc1be0bdb72cf20d8752676391dc41c5ac7da05 | [
"BSD-3-Clause"
] | 33 | 2016-02-01T03:25:37.000Z | 2021-12-21T15:22:36.000Z | mix.exs | vic/comeonin_ecto_password | 1bc1be0bdb72cf20d8752676391dc41c5ac7da05 | [
"BSD-3-Clause"
] | 7 | 2016-05-21T16:07:24.000Z | 2020-03-30T04:12:29.000Z | mix.exs | vic/comeonin_ecto_password | 1bc1be0bdb72cf20d8752676391dc41c5ac7da05 | [
"BSD-3-Clause"
] | 10 | 2016-05-21T09:38:26.000Z | 2020-03-17T12:45:26.000Z | defmodule ComeoninEctoPassword.Mixfile do
use Mix.Project
def project do
[
app: :comeonin_ecto_password,
version: "3.0.0",
elixir: "~> 1.7",
description: description(),
package: package(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
def description do
"""
Ecto custom type for storing encrypted password using Comeonin
"""
end
def package do
[
files: ~w(lib mix.exs README* LICENSE),
maintainers: [
"Victor Hugo Borja <[email protected]>",
"Herman verschooten <[email protected]>"
],
licenses: ["BSD"],
links: %{
"GitHub" => "https://github.com/vic/comeonin_ecto_password"
}
]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:ecto, "~> 3.0 or ~> 2.0"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:pbkdf2_elixir, "~> 1.0", optional: true},
{:bcrypt_elixir, "~> 2.0", optional: true},
{:argon2_elixir, "~> 2.0", optional: true}
]
end
end
| 23.285714 | 77 | 0.575324 |
9e17abdd3d8ce7917c2ea4f3793690a88debd6a2 | 475 | ex | Elixir | lib/ex_dadata/address/geocode_address/metro.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | 1 | 2021-08-23T08:24:50.000Z | 2021-08-23T08:24:50.000Z | lib/ex_dadata/address/geocode_address/metro.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | null | null | null | lib/ex_dadata/address/geocode_address/metro.ex | Elonsoft/ex_dadata | 71edca65f9d35fd2c9aa6553e87992b377500a1c | [
"MIT"
] | null | null | null | defmodule ExDadata.Address.GeocodeAddress.Metro do
@moduledoc false
use Ecto.Schema
alias Ecto.Changeset
@primary_key false
embedded_schema do
field :name, :string
field :line, :string
field :distance, :decimal
end
@fields ~w(name line distance)a
def cast_list!(list) when is_list(list) do
Enum.map(list, fn attrs ->
%__MODULE__{}
|> Changeset.cast(attrs, @fields)
|> Changeset.apply_action!(:insert)
end)
end
end
| 19 | 50 | 0.675789 |
9e17b7c9cfe0832213ede5344b4b20f2be36c2b8 | 480 | exs | Elixir | test/alertlytics/adapters/http_adapter_test.exs | cultivatedcode/alertlytics | 92bbc071bfc667e8c15dca6aa3a5cc28627f2fed | [
"Apache-2.0"
] | null | null | null | test/alertlytics/adapters/http_adapter_test.exs | cultivatedcode/alertlytics | 92bbc071bfc667e8c15dca6aa3a5cc28627f2fed | [
"Apache-2.0"
] | 3 | 2019-02-24T18:04:02.000Z | 2020-06-12T04:36:44.000Z | test/alertlytics/adapters/http_adapter_test.exs | cultivatedcode/alertlytics | 92bbc071bfc667e8c15dca6aa3a5cc28627f2fed | [
"Apache-2.0"
] | null | null | null | defmodule HttpAdapterTest do
use ExUnit.Case
alias Alertlytics.Adapters.HttpAdapter, as: Subject
doctest Alertlytics.Adapters.HttpAdapter
test "good url" do
assert true ==
Subject.check(%{
"health_check_url" => "https://www.cultivatedcode.com"
})
end
test "bad url" do
assert false ==
Subject.check(%{
"health_check_url" => "https://incorrect.cultivatedcode.com"
})
end
end
| 24 | 75 | 0.597917 |
9e17e3da5bf085b5365253868876aaa6ce8fa96c | 673 | exs | Elixir | airesources/Elixir/mix.exs | Cerbes/Halite-II | b402fce1bc2d325d36fe32e3b120031217b4396d | [
"MIT"
] | 232 | 2017-09-11T14:28:41.000Z | 2022-01-19T10:26:07.000Z | airesources/Elixir/mix.exs | Cerbes/Halite-II | b402fce1bc2d325d36fe32e3b120031217b4396d | [
"MIT"
] | 302 | 2017-09-13T04:46:25.000Z | 2018-09-06T22:14:06.000Z | airesources/Elixir/mix.exs | Cerbes/Halite-II | b402fce1bc2d325d36fe32e3b120031217b4396d | [
"MIT"
] | 151 | 2017-09-11T21:03:07.000Z | 2020-11-28T04:58:55.000Z | defmodule Elixirbot.Mixfile do
use Mix.Project
def project do
[
app: :elixirbot,
version: "0.2.0",
elixir: "~> 1.5",
start_permanent: Mix.env == :prod,
escript: [main_module: Elixirbot.CLI, path: 'MyBot'],
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:logger_file_backend, "~> 0.0.10"}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
end
| 21.709677 | 88 | 0.582467 |
9e181a483679dddda8789d9f59996ff4abdbdb82 | 548 | exs | Elixir | apps/data_warehouse/priv/repo/migrations/20210222224889_create_trade_events.exs | itsemilano/orbex | 301dfaad1369acfd68055f1868d9a1dcd7e51e16 | [
"Apache-2.0"
] | 65 | 2020-07-07T01:51:27.000Z | 2021-09-27T00:13:59.000Z | apps/data_warehouse/priv/repo/migrations/20210222224889_create_trade_events.exs | itsemilano/orbex | 301dfaad1369acfd68055f1868d9a1dcd7e51e16 | [
"Apache-2.0"
] | 5 | 2021-02-12T08:21:15.000Z | 2021-09-01T21:17:27.000Z | apps/data_warehouse/priv/repo/migrations/20210222224889_create_trade_events.exs | itsemilano/orbex | 301dfaad1369acfd68055f1868d9a1dcd7e51e16 | [
"Apache-2.0"
] | 10 | 2020-08-13T13:39:31.000Z | 2021-09-14T12:46:51.000Z | defmodule DataWarehouse.Repo.Migrations.CreateTradeEvents do
use Ecto.Migration
def change do
create table(:trade_events, primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:event_type, :text)
add(:event_time, :bigint)
add(:symbol, :text)
add(:trade_id, :integer)
add(:price, :text)
add(:quantity, :text)
add(:buyer_order_id, :bigint)
add(:seller_order_id, :bigint)
add(:trade_time, :bigint)
add(:buyer_market_maker, :bool)
timestamps()
end
end
end
| 24.909091 | 60 | 0.640511 |
9e1822e95b5521fdf5c67227669c674c4372985f | 191 | ex | Elixir | lib/slackin_ex/events/fuse_handler.ex | deadtrickster/slackin_ex | a3584eb51a9bc2a7ff193e313fa6c1ebdf4bccab | [
"MIT"
] | null | null | null | lib/slackin_ex/events/fuse_handler.ex | deadtrickster/slackin_ex | a3584eb51a9bc2a7ff193e313fa6c1ebdf4bccab | [
"MIT"
] | null | null | null | lib/slackin_ex/events/fuse_handler.ex | deadtrickster/slackin_ex | a3584eb51a9bc2a7ff193e313fa6c1ebdf4bccab | [
"MIT"
] | null | null | null | defmodule SlackinEx.Events.FuseHandler do
use SlackinEx.Events.Handler
def handle_event({:slack_sync_api, _}, state) do
SlackinEx.Cache.invalidate()
{:ok, state}
end
end
| 17.363636 | 50 | 0.712042 |
9e182568407f2591cfe4bc3f34c73fd743bf8199 | 1,827 | exs | Elixir | config/prod.exs | gotoeveryone/phoenix_sample | 8e53f1d1a0c9bf37e474755c60d06f3cb578ae7f | [
"MIT"
] | null | null | null | config/prod.exs | gotoeveryone/phoenix_sample | 8e53f1d1a0c9bf37e474755c60d06f3cb578ae7f | [
"MIT"
] | null | null | null | config/prod.exs | gotoeveryone/phoenix_sample | 8e53f1d1a0c9bf37e474755c60d06f3cb578ae7f | [
"MIT"
] | null | null | null | import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :phoenix_sample, PhoenixSampleWeb.Endpoint,
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :phoenix_sample, PhoenixSampleWeb.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :phoenix_sample, PhoenixSampleWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 35.823529 | 66 | 0.713191 |
9e18482e404f9858ec4057a45b5f86a7280a711c | 4,835 | exs | Elixir | test/tds_issues_test.exs | pressrelations/tds | a05120c6064b9561860695242ebff37037f01165 | [
"Apache-2.0"
] | null | null | null | test/tds_issues_test.exs | pressrelations/tds | a05120c6064b9561860695242ebff37037f01165 | [
"Apache-2.0"
] | null | null | null | test/tds_issues_test.exs | pressrelations/tds | a05120c6064b9561860695242ebff37037f01165 | [
"Apache-2.0"
] | null | null | null | defmodule TdsIssuesTest do
import Tds.TestHelper
require Logger
use ExUnit.Case, async: true
@tag timeout: 50000
setup do
opts = Application.fetch_env!(:tds, :opts)
{:ok, pid} = Tds.start_link(opts)
{:ok, [pid: pid]}
end
@tag :float
test "float read and write", context do
query("DROP TABLE [float_tests]", [])
:ok = query("""
CREATE TABLE [float_tests] (
[id] int NOT NULL identity(1,1) primary key,
[float_value] float
)
""", [])
test_vals = [
{-1234.1234, << 78, 209, 145, 92, 126, 72, 147, 192>>},
{-1234.0, << 0, 0, 0, 0, 0, 72, 147, 192>>},
{-1.0, << 0, 0, 0, 0, 0, 0, 240, 191>>},
{-0.5, << 0, 0, 0, 0, 0, 0, 224, 191>>},
{-0.3333333333333333, << 85, 85, 85, 85, 85, 85, 213, 191>>},
{-0.25, << 0, 0, 0, 0, 0, 0, 208, 191>>},
{-0.2, <<154, 153, 153, 153, 153, 153, 201, 191>>},
{0.0, << 0, 0, 0, 0, 0, 0, 0, 0>>},
{0.0, << 0, 0, 0, 0, 0, 0, 0, 0>>},
{0.2, <<154, 153, 153, 153, 153, 153, 201, 63>>},
{0.25, << 0, 0, 0, 0, 0, 0, 208, 63>>},
{0.3333333333333333, << 85, 85, 85, 85, 85, 85, 213, 63>>},
{0.5, << 0, 0, 0, 0, 0, 0, 224, 63>>},
{1.0, << 0, 0, 0, 0, 0, 0, 240, 63>>},
{1234.0, << 0, 0, 0, 0, 0, 72, 147, 64>>},
{1234.1234, << 78, 209, 145, 92, 126, 72, 147, 64>>}
]
Enum.each(test_vals, fn {val, _} ->
:ok = query("INSERT INTO [float_tests] values (#{val})", [])
end)
values = Enum.map(test_vals, fn {val, _} -> [val] end)
assert values == query("SELECT float_value FROM [float_tests]", [])
Enum.each(values, fn [val] ->
assert [[val]] == query("SELECT cast(#{val} as float)", [])
end)
query("DROP TABLE [float_tests]", [])
end
@tag :float
test "issue 33: Sending Float with more than 9 characters should not fail",
context do
query("DROP TABLE hades_sealed_cfdis", [])
query(
"""
CREATE TABLE hades_sealed_cfdis(
[id] int identity(1,1) not null primary key,
[total] float(53),
[inserted_at] datetime,
[updated_at] datetime
)
""",
[]
)
f = fn val ->
res =
query(
"""
INSERT INTO hades_sealed_cfdis ([total] ,[inserted_at], [updated_at])
VALUES (@1,@2,@3)
""",
[
%Tds.Parameter{name: "@1", value: val, type: :float},
%Tds.Parameter{
name: "@2",
value: {{2016, 12, 20}, {23, 59, 23, 0}}
},
%Tds.Parameter{name: "@3", value: {{2016, 12, 20}, {23, 59, 23, 0}}}
]
)
assert :ok == res
assert [[val]] ==
query(
"""
SELECT [total] FROM hades_sealed_cfdis
WHERE id in (select max(id) from hades_sealed_cfdis)
""",
[]
)
end
1..17
|> Enum.flat_map(&[1 / &1, -1 / &1])
|> Enum.each(f)
query("DROP TABLE hades_sealed_cfdis", [])
end
test "testing stored procedure execution", context do
create_table = """
IF EXISTS(SELECT * FROM sys.objects where name ='RetrieveDummyValues' and type ='P') DROP PROCEDURE [dbo].[RetrieveDummyValues];
IF OBJECT_ID('[dbo].[dummy_tbl]', 'U') IS NOT NULL DROP TABLE [dbo].[dummy_tbl];
CREATE TABLE [dbo].[dummy_tbl](
[id] [int] NOT NULL PRIMARY KEY,
[name] [nvarchar] (52) NOT NULL
);
INSERT INTO [dbo].[dummy_tbl]
VALUES
(1, 'Elixir'), (2, 'Elm'), (3, 'Sql');
"""
create_procedure = """
CREATE PROCEDURE RetrieveDummyValues
-- Add the parameters for the stored procedure here
@filterId INT
AS
BEGIN
-- SET NOCOUNT ON added to prevent extra result sets from
-- interfering with SELECT statements.
SET NOCOUNT ON;
-- Insert statements for procedure here
select id, name from dummy_tbl where id = @filterId
END
"""
query(create_table, [])
query(create_procedure, [])
assert [[1, "Elixir"]] ==
query("exec RetrieveDummyValues @filterId", [
%Tds.Parameter{name: "@filterId", value: 1}
])
query(
"""
IF EXISTS(SELECT * FROM sys.objects where name ='RetrieveDummyValues' and type ='P') DROP PROCEDURE [dbo].[RetrieveDummyValues];
IF OBJECT_ID('[dbo].[dummy_tbl]', 'U') IS NOT NULL DROP TABLE [dbo].[dummy_tbl];
""",
[]
)
end
end
| 31.396104 | 134 | 0.476319 |
9e186fb35b6af7ff44db216d4f7f71a632b4f9c9 | 3,083 | ex | Elixir | lib/apoc/hazmat/mac/hmac256.ex | auxesis/apoc | e650c21767f508a2720dad1bb3d14439bdcf39c4 | [
"Apache-2.0"
] | 6 | 2018-10-04T14:18:35.000Z | 2020-05-15T08:43:31.000Z | lib/apoc/hazmat/mac/hmac256.ex | auxesis/apoc | e650c21767f508a2720dad1bb3d14439bdcf39c4 | [
"Apache-2.0"
] | 3 | 2018-10-23T12:20:45.000Z | 2021-01-27T10:41:14.000Z | lib/apoc/hazmat/mac/hmac256.ex | auxesis/apoc | e650c21767f508a2720dad1bb3d14439bdcf39c4 | [
"Apache-2.0"
] | 2 | 2020-02-19T00:43:37.000Z | 2021-08-19T04:04:25.000Z | defmodule Apoc.Hazmat.MAC.HMAC256 do
@moduledoc """
Implementation of the HMAC construction
as described in [FIPS PUB 198-1](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.198-1.pdf)
"""
# This is needed for our API check
require :crypto
use Apoc.Adapter.MAC
defguard is_valid_key(key) when is_binary(key) and byte_size(key) >= 32
@doc """
Generate the HMAC signature for the given message
on the key. This function only returns the Base16 (hex) encoding
of the signature and does not encode the plaintext at all
(unlike `Plug.Crypto.MessageVerifier` which includes the plaintext
in the encoded return value)
SHA256 is used as the Hash function and as such a 32 byte (256 bit)
key is recommended.
Key length for HMAC-256 should be at least 32 bytes. Keys longer than 64 bytes,
while valid are not necessary as HMAC will hash them to generate a shorter key anyway.
See https://crypto.stackexchange.com/questions/34864/key-size-for-hmac-sha256
## Examples
iex> Apoc.Hazmat.MAC.HMAC256.sign("hello", Apoc.decode!("Of-znK3DYHWeV1u6XHXQ6QNotKMLdYleLUhc4-TMpxU"))
{:ok,
<<241, 135, 240, 239, 31, 202, 134, 189, 43, 55, 208, 89, 37, 208, 2, 87, 228,
236, 191, 9, 76, 82, 110, 190, 174, 78, 97, 103, 188, 14, 211, 146>>}
iex> Apoc.Hazmat.MAC.HMAC256.sign("hello", <<1, 2, 3>>)
{:error, "Invalid key size"}
"""
@impl Apoc.Adapter.MAC
def sign(message, key, opts \\ [])
def sign(message, key, _opts) when is_valid_key(key) do
tag =
if function_exported?(:crypto, :mac, 4) do
:crypto.mac(:hmac, :sha256, key, message)
else
:crypto.hmac(:sha256, key, message)
end
{:ok, tag}
end
def sign(_, _, _) do
{:error, "Invalid key size"}
end
@doc """
Similar to `c:sign/3` but either returns the tag directly
or raises `Apoc.Error` if something went wrong.
## Example
iex> "hello"
...> |> Apoc.Hazmat.MAC.HMAC256.sign!(Apoc.decode!("Of-znK3DYHWeV1u6XHXQ6QNotKMLdYleLUhc4-TMpxU"))
...> |> Apoc.encode
"8Yfw7x_Khr0rN9BZJdACV-TsvwlMUm6-rk5hZ7wO05I"
"""
@impl Apoc.Adapter.MAC
def sign!(message, key, opts \\ []) do
with {:ok, tag} <- sign(message, key, opts) do
tag
else
{:error, message} ->
raise Apoc.Error, message: message
end
end
@doc """
Verifies a tag generated by `Apoc.Hazmat.MAC.HMAC256.sign/3`.
## Examples
iex> key = Apoc.decode!("Of-znK3DYHWeV1u6XHXQ6QNotKMLdYleLUhc4-TMpxU")
iex> "8Yfw7x_Khr0rN9BZJdACV-TsvwlMUm6-rk5hZ7wO05I"
...> |> Apoc.decode!
...> |> Apoc.Hazmat.MAC.HMAC256.verify("hello", key)
true
"""
@impl Apoc.Adapter.MAC
def verify(tag, message, key, opts \\ []) when is_valid_key(key) do
with {:ok, challenge} <- sign(message, key, opts) do
Apoc.secure_compare(tag, challenge)
else
_ ->
false
end
end
@deprecated "Use `Apoc.sign/3` or `Apoc.sign!/3` instead"
def sign_hex(message, key, opts \\ []) do
message
|> sign!(key, opts)
|> Apoc.hex()
end
end
| 29.084906 | 109 | 0.6458 |
9e18723d52c37779b147c49735b83f233433d93f | 174 | exs | Elixir | priv/repo/migrations/20200831195118_remove_stdout_frames.exs | remerle/asciinema-server | 895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57 | [
"Apache-2.0"
] | 893 | 2017-09-14T14:18:29.000Z | 2022-03-31T21:45:08.000Z | priv/repo/migrations/20200831195118_remove_stdout_frames.exs | remerle/asciinema-server | 895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57 | [
"Apache-2.0"
] | 103 | 2017-09-29T22:15:33.000Z | 2022-03-27T21:47:43.000Z | priv/repo/migrations/20200831195118_remove_stdout_frames.exs | remerle/asciinema-server | 895bf5a7ffb7db1b418c97cfd2ac9136c46dfb57 | [
"Apache-2.0"
] | 152 | 2017-09-07T12:43:15.000Z | 2022-03-10T18:47:47.000Z | defmodule Asciinema.Repo.Migrations.RemoveStdoutFrames do
use Ecto.Migration
def change do
alter table(:asciicasts) do
remove :stdout_frames
end
end
end
| 17.4 | 57 | 0.741379 |
9e188c0b08dc08284cc3ed9e64fb2eff9f7d1770 | 3,645 | exs | Elixir | template/$PROJECT_NAME$/apps/$PROJECT_NAME$_web/test/$PROJECT_NAME$_web/controllers/accounts/registration_controller_test.exs | fadeojo/mithril | d84ff2d42f895c27c46c0feb09b70ccbac5827ac | [
"MIT"
] | 54 | 2018-01-24T00:22:57.000Z | 2019-01-15T20:03:52.000Z | template/$PROJECT_NAME$/apps/$PROJECT_NAME$_web/test/$PROJECT_NAME$_web/controllers/accounts/registration_controller_test.exs | infinitered/mithril | 0bbad29f86c63d9a827dcaaf6fed78a176ab90d7 | [
"MIT"
] | 3 | 2018-02-05T18:00:39.000Z | 2018-05-04T00:05:20.000Z | template/$PROJECT_NAME$/apps/$PROJECT_NAME$_web/test/$PROJECT_NAME$_web/controllers/accounts/registration_controller_test.exs | fadeojo/mithril | d84ff2d42f895c27c46c0feb09b70ccbac5827ac | [
"MIT"
] | 3 | 2018-02-15T19:08:23.000Z | 2018-09-28T13:49:27.000Z | defmodule <%= @project_name_camel_case %>Web.Accounts.RegistrationControllerTest do
use <%= @project_name_camel_case %>Web.ConnCase
import <%= @project_name_camel_case %>.AccountsFactory
import Plug.Test
alias <%= @project_name_camel_case %>.Accounts
describe ".new/2" do
test "renders registration form", %{conn: conn} do
response =
conn
|> get(Routes.registration_path(conn, :new))
|> html_response(200)
assert response =~ "form"
assert response =~ "action=\"#{Routes.registration_path(conn, :create)}\""
assert response =~ "input id=\"user_email\""
assert response =~ "input id=\"user_password\""
assert response =~ "input id=\"user_password_confirmation\""
end
end
describe ".create/2" do
test "creates a user and logs in if valid", %{conn: conn} do
params = %{
"user" => %{
"email" => "[email protected]",
"password" => "p@$$w0rd",
"password_confirmation" => "p@$$w0rd"
}
}
conn = post(conn, Routes.registration_path(conn, :create), params)
assert conn.assigns.current_user
assert get_flash(conn, :success)
assert redirected_to(conn) =~ Routes.page_path(conn, :index)
end
test "validates form", %{conn: conn} do
params = %{
"user" => %{
"email" => "",
"password" => "p@$$w0rd",
"password_confirmation" => "mismatch"
}
}
response =
conn
|> post(Routes.registration_path(conn, :create), params)
|> html_response(400)
assert response =~ "can't be blank"
assert response =~ "does not match confirmation"
end
end
describe ".edit/2" do
setup [:create_user, :create_token]
test "requires user to be logged in", %{conn: conn} do
assert_login_required fn ->
get(conn, Routes.registration_path(conn, :edit))
end
end
test "displays a form to edit the current user", %{conn: conn, user: user, token: token} do
response =
conn
|> assign(:current_user, user)
|> init_test_session(token: token.token)
|> get(Routes.registration_path(conn, :edit))
|> html_response(200)
assert response =~ "form"
assert response =~ "action=\"#{Routes.registration_path(conn, :update)}\""
assert response =~ "input id=\"user_email\""
assert response =~ "input id=\"user_password\""
assert response =~ "input id=\"user_password_confirmation\""
assert response =~ "value=\"#{user.email}\""
end
end
describe ".update/2" do
setup [:create_user, :create_token]
test "requires user to be logged in", %{conn: conn} do
assert_login_required fn ->
put(conn, Routes.registration_path(conn, :update), %{})
end
end
test "updates a user's fields", %{conn: conn, user: user, token: token} do
params = %{
"user" => %{
"email" => "[email protected]",
"password" => "new_password",
"password_confirmation" => "new_password"
}
}
conn =
conn
|> assign(:current_user, user)
|> init_test_session(token: token.token)
|> put(Routes.registration_path(conn, :update), params)
assert get_flash(conn, :success)
assert html_response(conn, 200) =~ "form"
assert {:ok, _token} = Accounts.tokenize({"[email protected]", "new_password"})
end
end
defp assert_login_required(fun) do
conn = fun.()
assert get_flash(conn, :error) =~ "logged in"
assert redirected_to(conn) == Routes.session_path(conn, :new)
end
end
| 29.877049 | 95 | 0.59808 |
9e18a3594e84223127d0af62036cbb9635553c44 | 644 | exs | Elixir | examples/hello_world/test/hello_world_test.exs | Sailias/aws-lambda-elixir-runtime | 70808bdebda270b8402a0b6ac875f0a948a920e6 | [
"MIT-0"
] | 3 | 2019-09-10T22:12:27.000Z | 2022-01-06T01:21:04.000Z | examples/hello_world/test/hello_world_test.exs | StratoSpire/aws-lambda-elixir-runtime | 68259e321edf6e975a2e7e257a98603b243de87a | [
"MIT-0"
] | null | null | null | examples/hello_world/test/hello_world_test.exs | StratoSpire/aws-lambda-elixir-runtime | 68259e321edf6e975a2e7e257a98603b243de87a | [
"MIT-0"
] | 2 | 2020-08-13T05:37:17.000Z | 2022-01-06T01:24:02.000Z | defmodule HelloWorldTest do
use ExUnit.Case
doctest HelloWorld
test "greets the world" do
request = %{
"key1" => "value1",
"key2" => "value2",
"key3" => "value3"
}
context = %{
"lambda-runtime-aws-request-id" => "22f56578-dfd6-47f7-8df5-81591dc205e8",
"lambda-runtime-deadline-ms" => "1567642833715",
"lambda-runtime-invoked-function-arn" => "arn:aws:lambda:us-west-2:123456789012:function:hello_world",
"lambda-runtime-trace-id" => "Root=1-5d7054cc-49cf0fdc2501a97e69a4be32;Parent=5de8912c22d2913a;Sampled=0"
}
assert HelloWorld.handler(request, context) == :ok
end
end
| 29.272727 | 111 | 0.659938 |
9e18b8692ab2fd2f9f5f319427aaf8b291d48cc7 | 2,805 | ex | Elixir | lib/sieve_of_eratosthenes.ex | Dante7/prime_opt | 45ce8a673884c3a1a6d9a235fa2f500a53f5e7ea | [
"MIT"
] | 1 | 2021-12-05T09:55:22.000Z | 2021-12-05T09:55:22.000Z | lib/sieve_of_eratosthenes.ex | Dante7/sieve_of_eratosthenes | 45ce8a673884c3a1a6d9a235fa2f500a53f5e7ea | [
"MIT"
] | null | null | null | lib/sieve_of_eratosthenes.ex | Dante7/sieve_of_eratosthenes | 45ce8a673884c3a1a6d9a235fa2f500a53f5e7ea | [
"MIT"
] | null | null | null | defmodule SieveOfEratosthenes do
@moduledoc """
Documentation for `SieveOfEratosthenes`.
Implementation of sieve of eratosthenes algorithm to calculate all the prime numbers
until number given used as limit, using tail recursive optimization and async functions
"""
@doc """
Calculate all the primes until given `input` used as limit
"""
def calculate_primes(input) do
chunk_size = get_chunk_size(input)
chunked_list = get_chunked_list(input, chunk_size)
primes = recursive_primes(hd(chunked_list) , [])
another_primes = get_non_multiples(tl(chunked_list), primes)
primes ++ another_primes
end
@doc """
Generate a list between two and `input` number
And chunk that list by the `chunk_size`
## Examples
iex> SieveOfEratosthenes.get_chunked_list(10, 2)
[[2, 3], [4, 5], [6, 7], [8, 9], [10]]
"""
def get_chunked_list(input, chunk_size) do
2..input
|> Enum.to_list
|> Enum.chunk_every(chunk_size)
end
@doc """
Get the size of the chunk using the square root from `input`
this number are used to limit the prime calculation using the sieve of eratosthenes algorithm
## Examples
iex> SieveOfEratosthenes.get_chunk_size(1_000)
32
"""
def get_chunk_size(input) do
:math.sqrt(input)
|> Float.ceil(0)
|> trunc
end
@doc """
filter all non-multiple `numbers` of the given `primes`
## Examples
iex> SieveOfEratosthenes.get_non_multiples([2..100], [2,3,5,7,11])
[13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]
"""
def get_non_multiples(numbers, primes) do
for l <- numbers do
Task.async(fn -> remove_multiples(primes, l) end)
end
|> Task.yield_many(100_000)
|> Enum.map(fn {t, res} -> elem(res, 1) || Task.shutdown(t, :brutal_kill) end)
|> Enum.concat
end
@doc """
Calculate all primes of list given using the sieve of eratosthenes algorithm
## Examples
iex> SieveOfEratosthenes.recursive_primes([2,3,4,5,6,7,8,9,10], [])
[2, 3, 5, 7]
"""
def recursive_primes([head | tail], primes) do
recursive_primes(Enum.filter(tail, fn x -> rem(x, head) != 0 end), primes ++ [head])
end
def recursive_primes([], list_primes), do: list_primes
@doc """
remove all the multiples numbers from given number list using list of prime numbers
## Examples
iex> l = 10..100 |> Enum.to_list
iex> SieveOfEratosthenes.remove_multiples([2,3,5,7,11], l)
[13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]
"""
def remove_multiples([head | tail], number_list) do
remove_multiples(tail, Enum.filter(number_list, fn x -> rem(x, head) != 0 end))
end
def remove_multiples([], number_list), do: number_list
end
| 28.05 | 97 | 0.653476 |
9e1905deb6e71b80522a03578153b6fbadb92bf9 | 389 | ex | Elixir | lib/trento/support/mix/tasks/helper.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-22T16:59:34.000Z | 2022-03-22T16:59:34.000Z | lib/trento/support/mix/tasks/helper.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 24 | 2022-03-22T16:45:25.000Z | 2022-03-31T13:00:02.000Z | lib/trento/support/mix/tasks/helper.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-30T14:16:16.000Z | 2022-03-30T14:16:16.000Z | defmodule Trento.Tasks.Helper do
@moduledoc """
Helper functions for tasks.
"""
def start_repo do
[:postgrex, :ecto]
|> Enum.each(&Application.ensure_all_started/1)
Trento.Repo.start_link()
end
def print_error(msg) do
case Code.ensure_compiled(Mix) do
{:module, _} -> Mix.raise(msg)
{:error, _} -> IO.puts(IO.ANSI.red() <> msg)
end
end
end
| 19.45 | 51 | 0.632391 |
9e1952405b2d0ce0c1df2f3ff59aea0e83d102f2 | 908 | ex | Elixir | lib/sse.ex | mustafaturan/sse | 9edc7bc2397fb618861f15edb0a9ffd9ff8bb59e | [
"Unlicense"
] | 73 | 2018-02-20T09:09:33.000Z | 2022-02-12T19:56:46.000Z | lib/sse.ex | mustafaturan/sse | 9edc7bc2397fb618861f15edb0a9ffd9ff8bb59e | [
"Unlicense"
] | 39 | 2018-11-21T12:04:33.000Z | 2021-08-02T13:14:36.000Z | lib/sse.ex | mustafaturan/sse | 9edc7bc2397fb618861f15edb0a9ffd9ff8bb59e | [
"Unlicense"
] | 6 | 2018-11-20T13:18:59.000Z | 2022-03-22T12:35:21.000Z | defmodule SSE do
@moduledoc """
Server Sent Events handler
"""
alias SSE.Server
@type config :: map()
@type chunk :: SSE.Chunk.t()
@type conn :: Plug.Conn.t()
@type event_id :: integer() | String.t()
@type event_shadow_with_config :: {config(), topic(), event_id()}
@type matcher :: tuple()
@type topic :: atom()
@type topic_or_topics :: topic() | topics()
@type topics :: list(topic())
@type topics_with_chunk :: {topic_or_topics(), chunk()}
@doc """
Deliver EventBus SSE events to the given process
"""
@spec process(event_shadow_with_config()) :: no_return()
def process({%{pid: pid}, topic, id} = _event_shadow_with_config) do
send(pid, {:sse, topic, id})
end
@doc """
Serv the SSE stream
"""
@spec stream(conn(), topics_with_chunk(), matcher()) :: conn()
defdelegate stream(conn, data, matcher \\ {SSE, {}}),
to: Server,
as: :stream
end
| 25.942857 | 70 | 0.631057 |
9e196653887e96878726a7ae7100ec471eaffdfc | 17,174 | exs | Elixir | test/taglet/tag_as_test.exs | ringofhealth/ex_tag | a0aa0a3c8f57311867e33d2290a944a46351d0e2 | [
"Apache-2.0"
] | null | null | null | test/taglet/tag_as_test.exs | ringofhealth/ex_tag | a0aa0a3c8f57311867e33d2290a944a46351d0e2 | [
"Apache-2.0"
] | null | null | null | test/taglet/tag_as_test.exs | ringofhealth/ex_tag | a0aa0a3c8f57311867e33d2290a944a46351d0e2 | [
"Apache-2.0"
] | null | null | null | defmodule Taglet.TagAsTest do
alias Ecto.Adapters.SQL
alias TagletPost, as: Post
alias Taglet.{Tagging, Tag}
import Ecto.Query
# import Mix.Ecto, only: [build_repo_priv: 1]
use ExUnit.Case
def build_repo_priv(repo) do
Application.app_dir(
Keyword.fetch!(repo.config(), :otp_app),
source_repo_priv(repo)
)
end
def source_repo_priv(repo) do
repo.config()[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
end
@repo Taglet.RepoClient.repo()
@tenant_id "example_tenant"
doctest Taglet
setup do
# Regular test
@repo.delete_all(Post)
@repo.delete_all(Tagging)
@repo.delete_all(Tag)
# Multi tenant test
setup_tenant()
on_exit(fn ->
# Regular test
@repo.delete_all(Post)
@repo.delete_all(Tagging)
@repo.delete_all(Tag)
# Multi tenant test
setup_tenant()
end)
:ok
end
# Regular test
test "using the module allows to add tags and list it" do
post = @repo.insert!(%Post{title: "hello world"})
result = Post.add_categories(post, ["mycategory", "yourcategory"])
assert result.categories == ["mycategory", "yourcategory"]
end
test "using the module allows to add tags and list it as a queryable" do
post = @repo.insert!(%Post{title: "hello world"})
Post.add_categories(post, ["mycategory", "yourcategory"])
queryable = Post.categories_queryable()
assert queryable.__struct__ == Ecto.Query
assert queryable |> @repo.all == ["mycategory", "yourcategory"]
end
test "using the module allows to add a tag and list it" do
post = @repo.insert!(%Post{title: "hello world"})
Post.add_category(post, "mycategory")
result = Post.category_list(post)
assert result == ["mycategory"]
end
test "using the module allows to add a tag and list it for different contexts" do
post = @repo.insert!(%Post{title: "hello world"})
Post.add_category(post, "mycategory")
Post.add_tag(post, "mytag")
tag_result = Post.tag_list(post)
category_result = Post.category_list(post)
assert tag_result == ["mytag"]
assert category_result == ["mycategory"]
end
test "using the module allows to add a tag and list it as queryable for different contexts" do
post = @repo.insert!(%Post{title: "hello world"})
Post.add_category(post, "mycategory")
Post.add_tag(post, "mytag")
tag_queryable = Post.tags_queryable()
category_queryable = Post.categories_queryable()
assert tag_queryable.__struct__ == Ecto.Query
assert category_queryable.__struct__ == Ecto.Query
assert tag_queryable |> @repo.all == ["mytag"]
assert category_queryable |> @repo.all == ["mycategory"]
end
test "Remove only a Tag relation" do
post1 = @repo.insert!(%Post{title: "Post1"})
post2 = @repo.insert!(%Post{title: "Post2"})
# We add a category without relations
Post.add_category("public")
# Now we add 2 new entries in Tagging relating with different tag_id
Post.add_category(post1, "public")
Post.add_category(post2, "public")
# Remove only the relation with post1
result = Post.remove_category(post1, "public")
# Tag still exits but there are 2 relations in Tagging that represent
# a general relation with Post - categories and one for post2
assert result.categories == []
assert Post.categories() == ["public"]
assert Tag |> @repo.all |> length == 1
assert Tagging |> @repo.all |> length == 2
end
test "It is possible to remove a Tag and all its relations" do
post = @repo.insert!(%Post{title: "Post1"})
# We add a category without relations
Post.add_category("public")
# Now we add a new entry in Tagging relating Tag and taggable_id
Post.add_category(post, "public")
# Remove everything about public - Post - categories
result = Post.remove_category("public")
assert result.categories == []
assert Tag |> @repo.all == []
assert Tagging |> @repo.all == []
end
test "Remove a generic Tag keep other contexts" do
post = @repo.insert!(%Post{title: "Post1"})
# We add two categories in a general way (without relations)
Post.add_category("public")
Post.add_tag("private")
# Now we add 2 new entries in Tagging relating Tag and taggable_id
Post.add_category(post, "public")
Post.add_tag(post, "private")
# At this point we have 2 tags in Tag, and 4 entries in Tagging
result = Post.remove_category("public")
# Remove everything about public - Post - categories
assert result.categories == []
assert Post.categories() == []
assert Post.tags() == ["private"]
assert Tagging |> @repo.all |> length == 2
end
test "using the module allows to search for all created tags for a context" do
post1 = @repo.insert!(%Post{title: "hello world"})
post2 = @repo.insert!(%Post{title: "hello world2"})
Taglet.add(post1, ["tag1", "tag2"])
Taglet.add(post2, ["tag2", "tag3"])
result = Post.tags()
assert result == ["tag1", "tag2", "tag3"]
end
test "using the module allows to search for tagged resources" do
post1 = @repo.insert!(%Post{title: "hello world1"})
post2 = @repo.insert!(%Post{title: "hello world2"})
post3 = @repo.insert!(%Post{title: "hello world3"})
Post.add_category(post1, "tagged1")
Post.add_category(post2, "tagged1")
Post.add_category(post3, "tagged2")
result = Post.tagged_with_category("tagged1")
assert result == [post1, post2]
end
test "using the module allows to search for any tagged resources" do
post1 = @repo.insert!(%Post{title: "hello world1"})
post2 = @repo.insert!(%Post{title: "hello world2"})
post3 = @repo.insert!(%Post{title: "hello world3"})
post4 = @repo.insert!(%Post{title: "hello world"})
post5 = @repo.insert!(%Post{title: "hello world3"})
post6 = @repo.insert!(%Post{title: "hello world3"})
Post.add_category(post1, "tagged1")
Post.add_category(post2, "tagged1")
Post.add_category(post3, "tagged2")
Post.add_category(post4, "tagged2")
Post.add_category(post5, "tagged2")
Post.add_categories(post6, ["tagged2", "hello"])
result = Post.tagged_with_any_categories(["tagged1", "hello"])
assert result == [post1, post2, post6]
end
test "using the module allows to build a query to search for tagged resources" do
post1 = @repo.insert!(%Post{title: "hello world1"})
post2 = @repo.insert!(%Post{title: "hello world2"})
post3 = @repo.insert!(%Post{title: "hello world3"})
Post.add_category(post1, "tagged1")
Post.add_category(post2, "tagged1")
Post.add_category(post3, "tagged2")
query = Post |> where(title: "hello world1")
result = Post.tagged_with_query_category(query, "tagged1") |> @repo.all
assert result == [post1]
end
test "using the module allows to build a query to search for any tagged resources" do
post1 = @repo.insert!(%Post{title: "hello world1"})
post2 = @repo.insert!(%Post{title: "hello world2"})
post3 = @repo.insert!(%Post{title: "hello world3"})
Post.add_category(post1, "tagged1")
Post.add_category(post2, "tagged1")
Post.add_category(post3, "tagged2")
query = Post |> where(title: "hello world1")
result = Post.tagged_with_any_query_categories(query, ["tagged1"]) |> @repo.all
assert result == [post1]
end
test "Update a tag name without relations" do
Post.add_category(["public"])
assert Post.categories() == ["public"]
# Now we add 2 new entries in Tagging relating Tag and taggable_id
Post.rename_category("public", "public_post")
assert Post.categories() == ["public_post"]
assert Tag |> @repo.all |> length == 1
assert Tagging |> @repo.all |> length == 1
end
test "Update a tag name with relations and different contexts" do
Post.add_category(["public", "private"])
Post.add_tags(["private"])
assert Post.categories() == ["private", "public"]
assert Post.tags() == ["private"]
# Now we add 2 new entries in Tagging relating Tag and taggable_id
Post.rename_category("private", "private_category")
assert Post.categories() == ["private_category", "public"]
assert Post.tags() == ["private"]
assert Tag |> @repo.all |> length == 3
assert Tagging |> @repo.all |> length == 3
end
# Multi tenant test
test "[multi tenant] using the module allows to add tags and list it" do
post = @repo.insert!(%Post{title: "hello world"}, prefix: @tenant_id)
result = Post.add_categories(post, ["mycategory", "yourcategory"], prefix: @tenant_id)
assert result.categories == ["mycategory", "yourcategory"]
end
test "[multi tenant] using the module allows to add tags and list it as a queryable" do
post = @repo.insert!(%Post{title: "hello world"}, prefix: @tenant_id)
Post.add_categories(post, ["mycategory", "yourcategory"], prefix: @tenant_id)
queryable = Post.categories_queryable()
assert queryable.__struct__ == Ecto.Query
assert queryable |> @repo.all(prefix: @tenant_id) == ["mycategory", "yourcategory"]
assert queryable |> @repo.all == []
end
test "[multi tenant] using the module allows to add a tag and list it" do
post = @repo.insert!(%Post{title: "hello world"}, prefix: @tenant_id)
Post.add_category(post, "mycategory", prefix: @tenant_id)
result = Post.category_list(post, prefix: @tenant_id)
assert result == ["mycategory"]
end
test "[multi tenant] using the module allows to add a tag and list it for different contexts" do
post = @repo.insert!(%Post{title: "hello world"}, prefix: @tenant_id)
Post.add_category(post, "mycategory", prefix: @tenant_id)
Post.add_tag(post, "mytag", prefix: @tenant_id)
tag_result = Post.tag_list(post, prefix: @tenant_id)
category_result = Post.category_list(post, prefix: @tenant_id)
assert tag_result == ["mytag"]
assert category_result == ["mycategory"]
end
test "[multi tenant] using the module allows to add a tag and list it as queryable for different contexts" do
post = @repo.insert!(%Post{title: "hello world"}, prefix: @tenant_id)
Post.add_category(post, "mycategory", prefix: @tenant_id)
Post.add_tag(post, "mytag", prefix: @tenant_id)
tag_queryable = Post.tags_queryable()
category_queryable = Post.categories_queryable()
assert tag_queryable.__struct__ == Ecto.Query
assert category_queryable.__struct__ == Ecto.Query
assert tag_queryable |> @repo.all == []
assert category_queryable |> @repo.all == []
assert tag_queryable |> @repo.all(prefix: @tenant_id) == ["mytag"]
assert category_queryable |> @repo.all(prefix: @tenant_id) == ["mycategory"]
end
test "[multi tenant] Remove only a Tag relation" do
post1 = @repo.insert!(%Post{title: "Post1"}, prefix: @tenant_id)
post2 = @repo.insert!(%Post{title: "Post2"}, prefix: @tenant_id)
# We add a category without relations
Post.add_category("public", prefix: @tenant_id)
# Now we add 2 new entries in Tagging relating with different tag_id
Post.add_category(post1, "public", prefix: @tenant_id)
Post.add_category(post2, "public", prefix: @tenant_id)
# Remove only the relation with post1
result = Post.remove_category(post1, "public", prefix: @tenant_id)
# Tag still exits but there are 2 relations in Tagging that represent
# a general relation with Post - categories and one for post2
assert result.categories == []
assert Post.categories(prefix: @tenant_id) == ["public"]
assert Tag |> @repo.all |> length == 0
assert Tagging |> @repo.all |> length == 0
assert Tag |> @repo.all(prefix: @tenant_id) |> length == 1
assert Tagging |> @repo.all(prefix: @tenant_id) |> length == 2
end
test "[multi tenant] It is possible to remove a Tag and all its relations" do
post = @repo.insert!(%Post{title: "Post1"}, prefix: @tenant_id)
# We add a category without relations
Post.add_category("public", prefix: @tenant_id)
# Now we add a new entry in Tagging relating Tag and taggable_id
Post.add_category(post, "public", prefix: @tenant_id)
# Remove everything about public - Post - categories
result = Post.remove_category("public", prefix: @tenant_id)
assert result.categories == []
assert Tag |> @repo.all(prefix: @tenant_id) == []
assert Tagging |> @repo.all(prefix: @tenant_id) == []
end
test "[multi tenant] Remove a generic Tag keep other contexts" do
post = @repo.insert!(%Post{title: "Post1"}, prefix: @tenant_id)
# We add two categories in a general way (without relations)
Post.add_category("public", prefix: @tenant_id)
Post.add_tag("private", prefix: @tenant_id)
# Now we add 2 new entries in Tagging relating Tag and taggable_id
Post.add_category(post, "public", prefix: @tenant_id)
Post.add_tag(post, "private", prefix: @tenant_id)
# At this point we have 2 tags in Tag, and 4 entries in Tagging
result = Post.remove_category("public", prefix: @tenant_id)
# Remove everything about public - Post - categories
assert result.categories == []
assert Post.categories(prefix: @tenant_id) == []
assert Post.tags(prefix: @tenant_id) == ["private"]
assert Tagging |> @repo.all(prefix: @tenant_id) |> length == 2
end
test "[multi tenant] using the module allows to search for all created tags for a context" do
post1 = @repo.insert!(%Post{title: "hello world"}, prefix: @tenant_id)
post2 = @repo.insert!(%Post{title: "hello world2"}, prefix: @tenant_id)
Taglet.add(post1, ["tag1", "tag2"], prefix: @tenant_id)
Taglet.add(post2, ["tag2", "tag3"], prefix: @tenant_id)
result = Post.tags(prefix: @tenant_id)
assert result == ["tag1", "tag2", "tag3"]
end
test "[multi tenant] using the module allows to search for tagged resources" do
post1 = @repo.insert!(%Post{title: "hello world1"}, prefix: @tenant_id)
post2 = @repo.insert!(%Post{title: "hello world2"}, prefix: @tenant_id)
post3 = @repo.insert!(%Post{title: "hello world3"}, prefix: @tenant_id)
Post.add_category(post1, "tagged1", prefix: @tenant_id)
Post.add_category(post2, "tagged1", prefix: @tenant_id)
Post.add_category(post3, "tagged2", prefix: @tenant_id)
post1 = @repo.get(Post, 1, prefix: @tenant_id)
post2 = @repo.get(Post, 2, prefix: @tenant_id)
result = Post.tagged_with_category("tagged1", prefix: @tenant_id)
assert result == [post1, post2]
end
test "[multi tenant] using the module allows to build a query to search for tagged resources" do
post1 = @repo.insert!(%Post{title: "hello world1"}, prefix: @tenant_id)
post2 = @repo.insert!(%Post{title: "hello world2"}, prefix: @tenant_id)
post3 = @repo.insert!(%Post{title: "hello world3"}, prefix: @tenant_id)
Post.add_category(post1, "tagged1", prefix: @tenant_id)
Post.add_category(post2, "tagged1", prefix: @tenant_id)
Post.add_category(post3, "tagged2", prefix: @tenant_id)
query = Post |> where(title: "hello world1")
post1 = @repo.get(Post, 1, prefix: @tenant_id)
result = Post.tagged_with_query_category(query, "tagged1") |> @repo.all(prefix: @tenant_id)
assert result == [post1]
end
test "[multi tenant] Update a tag name without relations" do
Post.add_category(["public"], prefix: @tenant_id)
assert Post.categories(prefix: @tenant_id) == ["public"]
# Now we add 2 new entries in Tagging relating Tag and taggable_id
Post.rename_category("public", "public_post", prefix: @tenant_id)
assert Post.categories() == []
assert Tag |> @repo.all |> length == 0
assert Tagging |> @repo.all |> length == 0
assert Post.categories(prefix: @tenant_id) == ["public_post"]
assert Tag |> @repo.all(prefix: @tenant_id) |> length == 1
assert Tagging |> @repo.all(prefix: @tenant_id) |> length == 1
end
test "[multi tenant] Update a tag name with relations and different contexts" do
Post.add_categories(["public", "private"], prefix: @tenant_id)
Post.add_tags(["private"], prefix: @tenant_id)
assert Post.categories(prefix: @tenant_id) == ["private", "public"]
assert Post.tags(prefix: @tenant_id) == ["private"]
# Now we add 2 new entries in Tagging relating Tag and taggable_id
Post.rename_category("private", "private_category", prefix: @tenant_id)
assert Post.categories() == []
assert Post.tags() == []
assert Tag |> @repo.all |> length == 0
assert Tagging |> @repo.all |> length == 0
assert Post.categories(prefix: @tenant_id) == ["private_category", "public"]
assert Post.tags(prefix: @tenant_id) == ["private"]
assert Tag |> @repo.all(prefix: @tenant_id) |> length == 3
assert Tagging |> @repo.all(prefix: @tenant_id) |> length == 3
end
# Aux functions
defp setup_tenant do
migrations_path = Path.join(build_repo_priv(@repo), "migrations")
# Drop the previous tenant to reset the data
SQL.query(@repo, "DROP SCHEMA \"#{@tenant_id}\" CASCADE", [])
# Create new tenant
SQL.query(@repo, "CREATE SCHEMA \"#{@tenant_id}\"", [])
Ecto.Migrator.run(@repo, migrations_path, :up, prefix: @tenant_id, all: true)
end
end
| 37.828194 | 111 | 0.672703 |
9e198b1ca9642dd6f257e5eee57605d770090d94 | 740 | exs | Elixir | mix.exs | elixir-twister/corsica | 698a916c0ab71d67a2fc356d72b3609623f9d3a5 | [
"MIT"
] | null | null | null | mix.exs | elixir-twister/corsica | 698a916c0ab71d67a2fc356d72b3609623f9d3a5 | [
"MIT"
] | null | null | null | mix.exs | elixir-twister/corsica | 698a916c0ab71d67a2fc356d72b3609623f9d3a5 | [
"MIT"
] | null | null | null | defmodule Corsica.Mixfile do
use Mix.Project
@version "0.5.0"
@description "Plug-based swiss-army knife for CORS requests."
def project() do
[app: :corsica,
version: @version,
elixir: "~> 1.0",
deps: deps(),
description: @description(),
name: "Corsica",
source_url: "https://github.com/whatyouhide/corsica",
package: package()]
end
def application() do
[applications: [:logger, :cowboy, :plug]]
end
defp deps() do
[{:cowboy, ">= 1.0.0"},
{:plug, ">= 0.9.0"},
{:ex_doc, "~> 0.15", only: :dev}]
end
defp package() do
[maintainers: ["Andrea Leopardi"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/whatyouhide/corsica"}]
end
end
| 21.142857 | 68 | 0.582432 |
9e19abf3632168ccdeb3c06e7eede174be5bd2cc | 2,562 | ex | Elixir | test/support/test_adapter.ex | felipe-kosouski/triplex | daa68037217a17eedf9eef4c9e8ca88da4ad8870 | [
"MIT"
] | 344 | 2017-07-03T11:30:26.000Z | 2022-03-30T19:11:41.000Z | test/support/test_adapter.ex | felipe-kosouski/triplex | daa68037217a17eedf9eef4c9e8ca88da4ad8870 | [
"MIT"
] | 56 | 2017-06-29T01:55:35.000Z | 2022-03-15T22:09:47.000Z | test/support/test_adapter.ex | felipe-kosouski/triplex | daa68037217a17eedf9eef4c9e8ca88da4ad8870 | [
"MIT"
] | 45 | 2017-08-15T18:59:41.000Z | 2022-03-17T17:00:50.000Z | defmodule Triplex.TestAdapter do
@behaviour Ecto.Adapter
@behaviour Ecto.Adapter.Queryable
@behaviour Ecto.Adapter.Schema
@behaviour Ecto.Adapter.Transaction
@behaviour Ecto.Adapter.Migration
defmacro __before_compile__(_opts), do: :ok
def ensure_all_started(_, _), do: {:ok, []}
def init(_opts) do
child_spec = Supervisor.child_spec({Task, fn -> :timer.sleep(:infinity) end}, [])
{:ok, child_spec, %{meta: :meta}}
end
def checkout(_, _, _), do: raise("not implemented")
def delete(_, _, _, _), do: raise("not implemented")
def insert_all(_, _, _, _, _, _, _), do: raise("not implemented")
def rollback(_, _), do: raise("not implemented")
def stream(_, _, _, _, _), do: raise("not implemented")
def update(_, _, _, _, _, _), do: raise("not implemented")
## Types
def loaders(_primitive, type), do: [type]
def dumpers(_primitive, type), do: [type]
def autogenerate(_), do: nil
## Queryable
def prepare(operation, query), do: {:nocache, {operation, query}}
# Migration emulation
def execute(_, _, {:nocache, {:all, %{from: %{source: {"schema_migrations", _}}}}}, _, _) do
{length(migrated_versions()), Enum.map(migrated_versions(), &List.wrap/1)}
end
def execute(
_,
_meta,
{:nocache, {:delete_all, %{from: %{source: {"schema_migrations", _}}}}},
[version],
_
) do
Process.put(:migrated_versions, List.delete(migrated_versions(), version))
{1, nil}
end
def insert(_, %{source: "schema_migrations"}, val, _, _, _) do
version = Keyword.fetch!(val, :version)
Process.put(:migrated_versions, [version | migrated_versions()])
{:ok, []}
end
def in_transaction?(_), do: Process.get(:in_transaction?) || false
def transaction(_mod, _opts, fun) do
Process.put(:in_transaction?, true)
send(test_process(), {:transaction, fun})
{:ok, fun.()}
after
Process.put(:in_transaction?, false)
end
## Migrations
def lock_for_migrations(_, query, _opts, fun) do
send(test_process(), {:lock_for_migrations, fun})
fun.(query)
end
def execute_ddl(_, command, _) do
Process.put(:last_command, command)
{:ok, []}
end
defp migrated_versions do
Process.get(:migrated_versions, [])
end
def supports_ddl_transaction? do
get_config(:supports_ddl_transaction?, false)
end
defp test_process do
get_config(:test_process, self())
end
defp get_config(name, default) do
:triplex
|> Application.get_env(__MODULE__, [])
|> Keyword.get(name, default)
end
end
| 26.412371 | 94 | 0.650273 |
9e19be44d296eed92309c9fc4e33e6df02363763 | 1,089 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dlp/lib/google_api/dlp/v2/connection.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.DLP.V2.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage your data across Google Cloud Platform services
"https://www.googleapis.com/auth/cloud-platform"
],
otp_app: :google_api_dlp,
base_url: "https://dlp.googleapis.com/"
end
| 33 | 74 | 0.733701 |
9e19dac1fa8e402c8dd26d73de816f020c90acb2 | 3,525 | ex | Elixir | lib/hex/update_checker.ex | hrzndhrn/hex | f74e2ed979e74130bdc4a6974660aa986333f33f | [
"Apache-2.0"
] | null | null | null | lib/hex/update_checker.ex | hrzndhrn/hex | f74e2ed979e74130bdc4a6974660aa986333f33f | [
"Apache-2.0"
] | null | null | null | lib/hex/update_checker.ex | hrzndhrn/hex | f74e2ed979e74130bdc4a6974660aa986333f33f | [
"Apache-2.0"
] | null | null | null | defmodule Hex.UpdateChecker do
@moduledoc false
use GenServer
@name __MODULE__
@timeout 60_000
@update_interval 24 * 60 * 60
def start_link(opts \\ []) do
{init_state, opts} = Keyword.pop(opts, :init_state, %{})
opts = Keyword.put_new(opts, :name, @name)
GenServer.start_link(__MODULE__, init_state, opts)
end
def init(init_state) do
{:ok, state(init_state)}
end
def start_check() do
GenServer.cast(@name, :start_check)
end
def check() do
GenServer.call(@name, :check)
|> print_update_message()
end
def handle_cast(:start_check, state) do
if not state.started and not Hex.State.fetch!(:offline) and check_update?() do
Task.async(fn ->
{:installs, Hex.Repo.get_installs()}
end)
{:noreply, %{state | started: true}}
else
{:noreply, %{state | started: true, done: true}}
end
end
def handle_call(:check, _from, %{started: true, done: true} = state) do
{:reply, :already_checked, state}
end
def handle_call(:check, from, %{started: true, reply: nil, check_timeout: timeout} = state) do
{:noreply, %{state | from: from}, timeout}
end
def handle_call(:check, from, %{started: true} = state) do
{:reply, state.reply, %{state | from: from, done: true}}
end
def handle_call(:check, _from, %{started: false} = state) do
{:reply, :latest, state}
end
def handle_info(:timeout, state) do
state = reply(:timeout, state)
{:noreply, state}
end
def handle_info({_ref, {:installs, result}}, state) do
result =
case result do
{:ok, {code, body, _headers}} when code in 200..299 ->
Hex.Repo.find_new_version_from_csv(body)
other ->
Hex.Shell.error("Failed to check for new Hex version")
Hex.Utils.print_error_result(other)
# Treat failure as latest
:latest
end
Hex.Registry.Server.last_update(:calendar.universal_time())
state = reply(result, state)
{:noreply, state}
end
def handle_info({:DOWN, _ref, :process, _pid, :normal}, state) do
{:noreply, state}
end
defp print_update_message(:already_checked), do: :ok
defp print_update_message(:latest), do: :ok
defp print_update_message({:http_error, reason}) do
Hex.Shell.error("Hex update check failed, HTTP ERROR: #{inspect(reason)}")
:ok
end
defp print_update_message(:timeout) do
Hex.Shell.error("Hex update check failed due to a timeout")
:ok
end
defp print_update_message({:status, status}) do
Hex.Shell.error("Hex update check failed, status code: #{status}")
:ok
end
defp print_update_message({:version, version}) do
Hex.Shell.warn(
"A new Hex version is available (#{Hex.version()} < #{version}), " <>
"please update with `mix local.hex`"
)
:ok
end
defp reply(reply, %{from: nil} = state) do
%{state | reply: reply}
end
defp reply(reply, %{from: from} = state) do
GenServer.reply(from, reply)
%{state | from: nil, done: true}
end
defp check_update?() do
if last = Hex.Registry.Server.last_update() do
now = :calendar.universal_time() |> :calendar.datetime_to_gregorian_seconds()
last = :calendar.datetime_to_gregorian_seconds(last)
now - last > @update_interval
else
true
end
end
defp state(init_state) do
state = %{
from: nil,
reply: nil,
done: false,
started: false,
check_timeout: @timeout
}
Map.merge(state, init_state)
end
end
| 24.479167 | 96 | 0.637447 |
9e19e5f1aaf377078d17c98fda84e1461a826262 | 2,114 | ex | Elixir | lib/glimesh_web/plugs/old_api_context_plug.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh_web/plugs/old_api_context_plug.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh_web/plugs/old_api_context_plug.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | defmodule GlimeshWeb.Plugs.OldApiContextPlug do
@behaviour Plug
import Plug.Conn
import Phoenix.Controller, only: [json: 2]
alias Glimesh.Oauth
def init(opts), do: opts
def call(conn, opts) do
case parse_token_from_header(conn, opts) |> authorize(conn) do
{:ok, %Glimesh.Api.Access{} = access} ->
conn
|> Absinthe.Plug.put_options(
context: %{
access: access
}
)
{:error, %Boruta.Oauth.Error{} = reason} ->
conn
|> put_status(:unauthorized)
|> json(%{
errors: [
%{
message: reason.error_description,
header_error: reason.error
}
]
})
|> halt()
_ ->
conn
|> put_status(:unauthorized)
|> json(%{errors: [%{message: "You must be logged in to access the api"}]})
|> halt()
end
end
defp authorize({:bearer, token}, _) do
case Boruta.Oauth.Authorization.AccessToken.authorize(value: token) do
{:ok, %Boruta.Oauth.Token{} = token} ->
Oauth.get_api_access_from_token(token)
{:error, msg} ->
{:error, msg}
end
end
defp authorize({:client, client_id}, _) do
client_id = Glimesh.OauthMigration.convert_client_id(client_id)
case Boruta.Config.clients().get_by(id: client_id) do
%Boruta.Oauth.Client{} = client ->
Oauth.get_unprivileged_api_access_from_client(client)
{:error, msg} ->
{:error, msg}
end
end
defp authorize(_, conn) do
# Since this is the old API, try a session based auth
if user = conn.assigns[:current_user] do
Oauth.access_for_user(user, "public email chat streamkey follow")
else
{:error, :unauthorized}
end
end
defp parse_token_from_header(conn, _opts) do
case get_req_header(conn, "authorization") do
["Bearer " <> token] -> {:bearer, token}
["bearer " <> token] -> {:bearer, token}
["Client-ID " <> token] -> {:client, token}
["client-id " <> token] -> {:client, token}
_ -> false
end
end
end
| 25.46988 | 83 | 0.578524 |
9e1a017c30414e0d377e205102c7348c46629889 | 230 | exs | Elixir | priv/repo/migrations/20210105160838_update_views.exs | Multiverse-io/familiar | 0bda7fa0fb4d6f8fdc701293b8824965a5d8b930 | [
"MIT"
] | 5 | 2021-05-05T14:54:06.000Z | 2022-02-19T13:00:48.000Z | priv/repo/migrations/20210105160838_update_views.exs | Multiverse-io/familiar | 0bda7fa0fb4d6f8fdc701293b8824965a5d8b930 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210105160838_update_views.exs | Multiverse-io/familiar | 0bda7fa0fb4d6f8fdc701293b8824965a5d8b930 | [
"MIT"
] | 1 | 2021-11-23T16:51:36.000Z | 2021-11-23T16:51:36.000Z | defmodule Familiar.Repo.Migrations.UpdateViews do
use Ecto.Migration
use Familiar
def change do
update_view :chickens, version: 2, revert: 1, materialized: true
update_function :mix, version: 2, revert: 1
end
end
| 23 | 68 | 0.743478 |
9e1a0f92aac2aa34e2c32c212ef1b45a40797651 | 197 | exs | Elixir | Elixir/fizz_buzz.exs | GauravPoosarla/FizzBuzz-Hacktoberfest-2021 | eefeeb7eb2045ecd03b736cf4c00da4e902a7278 | [
"Unlicense"
] | 13 | 2021-10-01T09:29:27.000Z | 2022-01-13T08:33:03.000Z | Elixir/fizz_buzz.exs | GauravPoosarla/FizzBuzz-Hacktoberfest-2021 | eefeeb7eb2045ecd03b736cf4c00da4e902a7278 | [
"Unlicense"
] | 76 | 2021-10-01T07:26:48.000Z | 2021-11-03T06:51:10.000Z | Elixir/fizz_buzz.exs | GauravPoosarla/FizzBuzz-Hacktoberfest-2021 | eefeeb7eb2045ecd03b736cf4c00da4e902a7278 | [
"Unlicense"
] | 68 | 2021-10-01T07:09:30.000Z | 2021-10-31T16:33:40.000Z | Enum.map(1..100, fn n ->
cond do
rem(n, 3) == 0 and rem(n, 5) == 0 -> "FizzBuzz"
rem(n, 3) == 0 -> "Fizz"
rem(n, 5) == 0 -> "Buzz"
true -> n
end
end)
|> Enum.each(&IO.puts(&1))
| 19.7 | 51 | 0.451777 |
9e1a34e2521b8c2d9d17b90f38a684b05b1ac43d | 1,039 | exs | Elixir | mix.exs | zillou/ex_twilio | 4812546f34caaa432d8ac3c486f67b9a5232af32 | [
"MIT"
] | null | null | null | mix.exs | zillou/ex_twilio | 4812546f34caaa432d8ac3c486f67b9a5232af32 | [
"MIT"
] | 1 | 2021-02-19T04:34:52.000Z | 2021-03-29T19:14:14.000Z | mix.exs | workpathco/ex_twilio | fe6948ee2c78084dade683c0b81c33f47461589f | [
"MIT"
] | null | null | null | defmodule ExTwilio.Mixfile do
use Mix.Project
def project do
[
app: :ex_twilio,
version: "0.9.0",
elixir: "~> 1.2",
name: "ExTwilio",
description: "Twilio API library for Elixir",
source_url: "https://github.com/danielberkompas/ex_twilio",
package: package(),
docs: docs(),
deps: deps()
]
end
def application do
[extra_applications: [:logger]]
end
defp deps do
[
{:httpoison, ">= 0.9.0"},
{:jason, "~> 1.2"},
{:inflex, "~> 2.0"},
{:joken, "~> 2.0"},
{:dialyze, "~> 0.2.0", only: [:dev, :test]},
{:mock, "~> 0.3", only: :test},
{:ex_doc, ">= 0.0.0", only: [:dev, :test]},
{:inch_ex, ">= 0.0.0", only: [:dev, :test]}
]
end
def docs do
[
readme: "README.md",
main: ExTwilio
]
end
defp package do
[
maintainers: ["Daniel Berkompas"],
licenses: ["MIT"],
links: %{
"Github" => "https://github.com/danielberkompas/ex_twilio"
}
]
end
end
| 19.980769 | 66 | 0.499519 |
9e1a383a0547a2f599c947d08acc6ab4809b8055 | 1,043 | exs | Elixir | talks-articles/languages-n-runtimes/elixir/book--programming-elixir-ge-1.6/chapter-15/chain.exs | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 37 | 2015-02-01T23:16:39.000Z | 2021-12-22T16:50:48.000Z | talks-articles/languages-n-runtimes/elixir/book--programming-elixir-ge-1.6/chapter-15/chain.exs | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 1 | 2017-03-02T04:55:48.000Z | 2018-01-14T10:51:11.000Z | talks-articles/languages-n-runtimes/elixir/book--programming-elixir-ge-1.6/chapter-15/chain.exs | abhishekkr/tutorials_as_code | f355dc62a5025b710ac6d4a6ac2f9610265fad54 | [
"MIT"
] | 15 | 2015-03-02T08:09:01.000Z | 2021-06-10T03:25:41.000Z | defmodule Chain do
def counter(prev_pid) do
receive do
n ->
send prev_pid, n + 1
end
end
def create_procs(n) do
code_to_run = fn(_, send_to) ->
spawn(Chain, :counter, [send_to])
end
last = Enum.reduce(1..n, self(), code_to_run)
send(last, 0)
receive do
final_answer when is_integer(final_answer) ->
"Result is #{inspect(final_answer)}"
end
end
def run(n) do
:timer.tc(Chain, :create_procs, [n])
|> IO.inspect()
end
end
## elixir -r chain.exs -e "Chain.run(5)"
Chain.run(50_000)
# will fail for
## elixir -r chain.exs -e "Chain.run(500_000)"
### 13:24:34.926 [error] Too many processes
### ** (SystemLimitError) a system limit has been reached
#
# that is just default limit, not a hard limit for VM
## elixir --erl "+P 1000000" -r chain.exs -e "Chain.run(500_000)"
#
#± % elixir --erl "+P 1000000" -r chapter-15/chain.exs -e "Chain.run(500_000)"
#{1596325, "Result is 500000"}
## even now if process fails for limit, that's hard limit for your system
| 24.255814 | 78 | 0.636625 |
9e1a57c8d20aaada2e4e6f8342a5d8e79c25280b | 995 | ex | Elixir | lib/entangled_server.ex | inaka/beam_olympics-solver | a32fa18a4a7e03900b7274c4a0c4e8b82f1a9768 | [
"Apache-2.0"
] | 1 | 2018-10-11T06:58:02.000Z | 2018-10-11T06:58:02.000Z | lib/entangled_server.ex | inaka/beam_olympics-solver | a32fa18a4a7e03900b7274c4a0c4e8b82f1a9768 | [
"Apache-2.0"
] | null | null | null | lib/entangled_server.ex | inaka/beam_olympics-solver | a32fa18a4a7e03900b7274c4a0c4e8b82f1a9768 | [
"Apache-2.0"
] | null | null | null | defmodule EntangledServer do
@moduledoc """
Entangled process for bo_entangled
"""
use GenServer
def start() do
{:ok, uppid} = GenServer.start(EntangledServer, :up)
{:ok, downpid} = GenServer.start(EntangledServer, :down)
:ok = GenServer.cast(uppid, downpid)
:ok = GenServer.cast(downpid, uppid)
{uppid, downpid}
end
# Callbacks
def init(spin) do
{:ok, %{spin: spin, pair: nil}}
end
def handle_call(:get_spin, _from, state) do
{:reply, state.spin, state}
end
def handle_call(:invert_spin, from, state) do
GenServer.call(state.pair, :just_invert_spin)
handle_call(:just_invert_spin, from, state)
end
def handle_call(:just_invert_spin, _from, state = %{:spin => :up}) do
{:reply, :ok, %{state | :spin => :down}}
end
def handle_call(:just_invert_spin, _from, state = %{:spin => :down}) do
{:reply, :ok, %{state | :spin => :up}}
end
def handle_cast(pid, state) do
{:noreply, %{state | :pair => pid}}
end
end
| 25.512821 | 73 | 0.642211 |
9e1a5a87af7adbef3ec61bc7163d269c51187779 | 13,915 | ex | Elixir | deps/absinthe_plug/lib/absinthe/plug.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | deps/absinthe_plug/lib/absinthe/plug.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | null | null | null | deps/absinthe_plug/lib/absinthe/plug.ex | JoakimEskils/elixir-absinthe | d81e24ec7c7b1164e6d152101dd50422f192d7e9 | [
"MIT"
] | 1 | 2019-04-23T04:22:52.000Z | 2019-04-23T04:22:52.000Z | defmodule Absinthe.Plug do
@moduledoc """
A plug for using [Absinthe](https://hex.pm/packages/absinthe) (GraphQL).
## Usage
In your router:
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser],
pass: ["*/*"],
json_decoder: Poison
plug Absinthe.Plug,
schema: MyAppWeb.Schema
If you want only `Absinthe.Plug` to serve a particular route, configure your
router like:
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json, Absinthe.Plug.Parser],
pass: ["*/*"],
json_decoder: Poison
forward "/api",
to: Absinthe.Plug,
init_opts: [schema: MyAppWeb.Schema]
See the documentation on `Absinthe.Plug.init/1` and the `Absinthe.Plug.opts`
type for information on the available options.
To add support for a GraphiQL interface, add a configuration for
`Absinthe.Plug.GraphiQL`:
forward "/graphiql",
to: Absinthe.Plug.GraphiQL,
init_opts: [schema: MyAppWeb.Schema]
For more information, see the API documentation for `Absinthe.Plug`.
### Phoenix.Router
If you are using [Phoenix.Router](https://hexdocs.pm/phoenix/Phoenix.Router.html), `forward` expects different arguments:
#### Plug.Router
forward "/graphiql",
to: Absinthe.Plug.GraphiQL,
init_opts: [
schema: MyAppWeb.Schema,
interface: :simple
]
#### Phoenix.Router
forward "/graphiql",
Absinthe.Plug.GraphiQL,
schema: MyAppWeb.Schema,
interface: :simple
For more information see [Phoenix.Router.forward/4](https://hexdocs.pm/phoenix/Phoenix.Router.html#forward/4).
## Included GraphQL Types
This package includes additional types for use in Absinthe GraphQL schema and
type modules.
See the documentation on `Absinthe.Plug.Types` for more information.
## More Information
For more on configuring `Absinthe.Plug` and how GraphQL requests are made,
see [the guide](http://absinthe-graphql.org/guides/plug-phoenix/) at
<http://absinthe-graphql.org>.
"""
@behaviour Plug
import Plug.Conn
require Logger
alias __MODULE__.Request
@raw_options [:analyze_complexity, :max_complexity]
@type function_name :: atom
@typedoc """
- `:adapter` -- (Optional) Absinthe adapter to use (default: `Absinthe.Adapter.LanguageConventions`).
- `:context` -- (Optional) Initial value for the Absinthe context, available to resolvers. (default: `%{}`).
- `:no_query_message` -- (Optional) Message to return to the client if no query is provided (default: "No query document supplied").
- `:json_codec` -- (Optional) A `module` or `{module, Keyword.t}` dictating which JSON codec should be used (default: `Poison`). The codec module should implement `encode!/2` (e.g., `module.encode!(body, opts)`).
- `:pipeline` -- (Optional) `{module, atom}` reference to a 2-arity function that will be called to generate the processing pipeline. (default: `{Absinthe.Plug, :default_pipeline}`).
- `:document_providers` -- (Optional) A `{module, atom}` reference to a 1-arity function that will be called to determine the document providers that will be used to process the request. (default: `{Absinthe.Plug, :default_document_providers}`, which configures `Absinthe.Plug.DocumentProvider.Default` as the lone document provider). A simple list of document providers can also be given. See `Absinthe.Plug.DocumentProvider` for more information about document providers, their role in procesing requests, and how you can define and configure your own.
- `:schema` -- (Required, if not handled by Mix.Config) The Absinthe schema to use. If a module name is not provided, `Application.get_env(:absinthe, :schema)` will be attempt to find one.
- `:serializer` -- (Optional) Similar to `:json_codec` but allows the use of serialization formats other than JSON, like MessagePack or Erlang Term Format. Defaults to whatever is set in `:json_codec`.
- `content_type` -- (Optional) The content type of the response. Should probably be set if `:serializer` option is used. Defaults to `"application/json"`.
"""
@type opts :: [
schema: module,
adapter: module,
context: map,
json_codec: module | {module, Keyword.t},
pipeline: {module, atom},
no_query_message: String.t,
document_providers: [Absinthe.Plug.DocumentProvider.t, ...] | Absinthe.Plug.DocumentProvider.t | {module, atom},
analyze_complexity: boolean,
max_complexity: non_neg_integer | :infinity,
serializer: module | {module, Keyword.t},
content_type: String.t,
]
@doc """
Serve an Absinthe GraphQL schema with the specified options.
## Options
See the documentation for the `Absinthe.Plug.opts` type for details on the available options.
"""
@spec init(opts :: opts) :: map
def init(opts) do
adapter = Keyword.get(opts, :adapter, Absinthe.Adapter.LanguageConventions)
context = Keyword.get(opts, :context, %{})
no_query_message = Keyword.get(opts, :no_query_message, "No query document supplied")
pipeline = Keyword.get(opts, :pipeline, {__MODULE__, :default_pipeline})
document_providers = Keyword.get(opts, :document_providers, {__MODULE__, :default_document_providers})
json_codec = case Keyword.get(opts, :json_codec, Poison) do
module when is_atom(module) -> %{module: module, opts: []}
other -> other
end
serializer = case Keyword.get(opts, :serializer, json_codec) do
module when is_atom(module) -> %{module: module, opts: []}
{mod, opts} -> %{module: mod, opts: opts}
other -> other
end
content_type = Keyword.get(opts, :content_type, "application/json")
schema_mod = opts |> get_schema
raw_options = Keyword.take(opts, @raw_options)
log_level = Keyword.get(opts, :log_level, :debug)
pubsub = Keyword.get(opts, :pubsub, nil)
%{
adapter: adapter,
context: context,
document_providers: document_providers,
json_codec: json_codec,
no_query_message: no_query_message,
pipeline: pipeline,
raw_options: raw_options,
schema_mod: schema_mod,
serializer: serializer,
content_type: content_type,
log_level: log_level,
pubsub: pubsub,
}
end
defp get_schema(opts) do
default = Application.get_env(:absinthe, :schema)
schema = Keyword.get(opts, :schema, default)
try do
Absinthe.Schema.types(schema)
rescue
UndefinedFunctionError ->
raise ArgumentError, "The supplied schema: #{inspect schema} is not a valid Absinthe Schema"
end
schema
end
@doc """
Parses, validates, resolves, and executes the given Graphql Document
"""
@spec call(Plug.Conn.t, map) :: Plug.Conn.t | no_return
def call(conn, config) do
config = update_config(conn, config)
{conn, result} = conn |> execute(config)
case result do
{:input_error, msg} ->
conn
|> send_resp(400, msg)
{:ok, %{"subscribed" => topic}} ->
conn
|> subscribe(topic, config)
{:ok, %{data: _} = result} ->
conn
|> encode(200, result, config)
{:ok, %{errors: _} = result} ->
conn
|> encode(400, result, config)
{:ok, result} when is_list(result) ->
conn
|> encode(200, result, config)
{:error, {:http_method, text}, _} ->
conn
|> send_resp(405, text)
{:error, error, _} when is_binary(error) ->
conn
|> send_resp(500, error)
end
end
defp update_config(conn, config) do
pubsub = config[:pubsub] || config.context[:pubsub] || conn.private[:phoenix_endpoint]
if pubsub do
put_in(config, [:context, :pubsub], pubsub)
else
config
end
end
def subscribe(conn, topic, %{context: %{pubsub: pubsub}} = config) do
pubsub.subscribe(topic)
conn
|> put_resp_header("content-type", "text/event-stream")
|> send_chunked(200)
|> subscribe_loop(topic, config)
end
def subscribe_loop(conn, topic, config) do
receive do
%{event: "subscription:data", payload: %{result: result}} ->
case chunk(conn, "#{encode_json!(result, config)}\n\n") do
{:ok, conn} ->
subscribe_loop(conn, topic, config)
{:error, :closed} ->
Absinthe.Subscription.unsubscribe(config.context.pubsub, topic)
conn
end
:close ->
Absinthe.Subscription.unsubscribe(config.context.pubsub, topic)
conn
after
30_000 ->
case chunk(conn, ":ping\n\n") do
{:ok, conn} ->
subscribe_loop(conn, topic, config)
{:error, :closed} ->
Absinthe.Subscription.unsubscribe(config.context.pubsub, topic)
conn
end
end
end
@doc """
Sets the options for a given GraphQL document execution.
## Examples
iex> Absinthe.Plug.put_options(conn, context: %{current_user: user})
%Plug.Conn{}
"""
@spec put_options(Plug.Conn.t, Keyword.t) :: Plug.Conn.t
def put_options(%Plug.Conn{private: %{absinthe: absinthe}} = conn, opts) do
opts = Map.merge(absinthe, Enum.into(opts, %{}))
Plug.Conn.put_private(conn, :absinthe, opts)
end
def put_options(conn, opts) do
Plug.Conn.put_private(conn, :absinthe, Enum.into(opts, %{}))
end
@doc false
@spec execute(Plug.Conn.t, map) :: {Plug.Conn.t, any}
def execute(conn, config) do
conn_info = %{
conn_private: (conn.private[:absinthe] || %{}) |> Map.put(:http_method, conn.method),
}
with {:ok, conn, request} <- Request.parse(conn, config),
{:ok, request} <- ensure_processable(request, config) do
{conn, run_request(request, conn_info, config)}
else
result ->
{conn, result}
end
end
@doc false
@spec ensure_processable(Request.t, map) :: {:ok, Request.t} | {:input_error, String.t}
def ensure_processable(request, config) do
with {:ok, request} <- ensure_documents(request, config) do
ensure_document_provider(request)
end
end
@spec ensure_documents(Request.t, map) :: {:ok, Request.t} | {:input_error, String.t}
defp ensure_documents(%{queries: []}, config) do
{:input_error, config.no_query_message}
end
defp ensure_documents(%{queries: queries} = request, config) do
Enum.reduce_while(queries, {:ok, request}, fn query, _acc ->
query_status = case query do
{:input_error, error_msg} -> {:input_error, error_msg}
query -> ensure_document(query, config)
end
case query_status do
{:ok, _query} -> {:cont, {:ok, request}}
{:input_error, error_msg} -> {:halt, {:input_error, error_msg}}
end
end)
end
@spec ensure_document(Request.t, map) :: {:ok, Request.t} | {:input_error, String.t}
defp ensure_document(%{document: nil}, config) do
{:input_error, config.no_query_message}
end
defp ensure_document(%{document: _} = query, _) do
{:ok, query}
end
@spec ensure_document_provider(Request.t) :: {:ok, Request.t} | {:input_error, String.t}
defp ensure_document_provider(%{queries: queries} = request) do
if Enum.all?(queries, &Map.has_key?(&1, :document_provider)) do
{:ok, request}
else
{:input_error, "No document provider found to handle this request"}
end
end
def run_request(%{batch: true, queries: queries} = request, conn, config) do
Request.log(request, config.log_level)
results =
queries
|> Absinthe.Plug.Batch.Runner.run(conn, config)
|> Enum.zip(request.extra_keys)
|> Enum.map(fn {result, extra_keys} ->
Map.merge(extra_keys, %{
payload: result
})
end)
{:ok, results}
end
def run_request(%{batch: false, queries: [query]} = request, conn_info, config) do
Request.log(request, config.log_level)
run_query(query, conn_info, config)
end
def run_query(query, conn_info, config) do
%{document: document, pipeline: pipeline} = Request.Query.add_pipeline(query, conn_info, config)
with {:ok, %{result: result}, _} <- Absinthe.Pipeline.run(document, pipeline) do
{:ok, result}
end
end
#
# PIPELINE
#
@doc """
The default pipeline used to process GraphQL documents.
This consists of Absinthe's default pipeline (as returned by `Absinthe.Pipeline.for_document/1`),
with the `Absinthe.Plug.Validation.HTTPMethod` phase inserted to ensure that the correct
HTTP verb is being used for the GraphQL operation type.
"""
@spec default_pipeline(map, Keyword.t) :: Absinthe.Pipeline.t
def default_pipeline(config, pipeline_opts) do
config.schema_mod
|> Absinthe.Pipeline.for_document(pipeline_opts)
|> Absinthe.Pipeline.insert_after(Absinthe.Phase.Document.CurrentOperation,
[
{Absinthe.Plug.Validation.HTTPMethod, method: config.conn_private.http_method},
]
)
end
#
# DOCUMENT PROVIDERS
#
@doc """
The default list of document providers that are enabled.
This consists of a single document provider, `Absinthe.Plug.DocumentProvider.Default`, which
supports ad hoc GraphQL documents provided directly within the request.
For more information about document providers, see `Absinthe.Plug.DocumentProvider`.
"""
@spec default_document_providers(map) :: [Absinthe.Plug.DocumentProvider.t]
def default_document_providers(_) do
[Absinthe.Plug.DocumentProvider.Default]
end
#
# SERIALIZATION
#
@doc false
@spec encode(Plug.Conn.t, 200 | 400 | 405 | 500, String.t, map) :: Plug.Conn.t | no_return
def encode(conn, status, body, %{serializer: %{module: mod, opts: opts}, content_type: content_type}) do
conn
|> put_resp_content_type(content_type)
|> send_resp(status, mod.encode!(body, opts))
end
def encode_json!(value, %{json_codec: json_codec}) do
json_codec.module.encode!(value, json_codec.opts)
end
end
| 33.130952 | 556 | 0.664966 |
9e1a5d2a7278d2e73881bbbcd998125f0b53fab5 | 340 | ex | Elixir | lib/agata/router.ex | alfonsojimenez/agata | 826ac3e992423ce6640166ea0e567f939e9bed7c | [
"MIT"
] | 5 | 2018-08-30T14:05:38.000Z | 2020-10-24T07:00:27.000Z | lib/agata/router.ex | alfonsojimenez/agata | 826ac3e992423ce6640166ea0e567f939e9bed7c | [
"MIT"
] | null | null | null | lib/agata/router.ex | alfonsojimenez/agata | 826ac3e992423ce6640166ea0e567f939e9bed7c | [
"MIT"
] | null | null | null | defmodule Agata.Router do
use Plug.Router
plug Plug.Static, at: "/public", from: :agata
plug Plug.Static, at: "/tmp", from: "priv/tmp"
plug :match
plug :dispatch
get "/", do: send_resp(
conn, 200, EEx.eval_file("views/index.eex", [emails: Agata.Storage.to_json])
)
match _, do: send_resp(conn, 404, "Not Found")
end
| 21.25 | 80 | 0.655882 |
9e1a7a400f8bf2b54400554953f1dae57a230386 | 5,108 | ex | Elixir | lib/mix/lib/mix/tasks/compile.erlang.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | 1 | 2019-06-11T20:22:20.000Z | 2019-06-11T20:22:20.000Z | lib/mix/lib/mix/tasks/compile.erlang.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/compile.erlang.ex | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Compile.Erlang do
use Mix.Task
import Mix.Compilers.Erlang
@recursive true
@manifest ".compile.erlang"
@moduledoc """
Compiles Erlang source files.
When this task runs, it will first check the modification times of
all files to be compiled and if they haven't been
changed since the last compilation, it will not compile
them. If any of them have changed, it compiles
everything.
For this reason, the task touches your `:compile_path`
directory and sets the modification time to the current
time and date at the end of each compilation. You can
force compilation regardless of modification times by passing
the `--force` option.
## Command line options
* `--force` - forces compilation regardless of modification times
## Configuration
* `ERL_COMPILER_OPTIONS` - can be used to give default compile options.
The value must be a valid Erlang term. If the value is a list, it will
be used as is. If it is not a list, it will be put into a list.
* `:erlc_paths` - directories to find source files.
Defaults to `["src"]`.
* `:erlc_include_path` - directory for adding include files.
Defaults to `"include"`.
* `:erlc_options` - compilation options that apply to Erlang's compiler.
`:debug_info` is enabled by default.
For a list of the many more available options,
see [`:compile.file/2`](http://www.erlang.org/doc/man/compile.html#file-2).
"""
@doc """
Runs this task.
"""
@spec run(OptionParser.argv) :: :ok | :noop
def run(args) do
{opts, _, _} = OptionParser.parse(args, switches: [force: :boolean])
project = Mix.Project.config
source_paths = project[:erlc_paths]
include_path = to_erl_file project[:erlc_include_path]
compile_path = to_erl_file Mix.Project.compile_path(project)
files = Mix.Utils.extract_files(source_paths, [:erl])
erlc_options = project[:erlc_options] || []
erlc_options = erlc_options ++ [{:outdir, compile_path}, {:i, include_path}, :report]
erlc_options = Enum.map erlc_options, fn
{kind, dir} when kind in [:i, :outdir] ->
{kind, to_erl_file(dir)}
opt ->
opt
end
compile_path = Path.relative_to(compile_path, File.cwd!)
tuples = files
|> scan_sources(include_path, source_paths)
|> sort_dependencies
|> Enum.map(&annotate_target(&1, compile_path, opts[:force]))
Mix.Compilers.Erlang.compile(manifest(), tuples, fn
input, _output ->
file = to_erl_file(Path.rootname(input, ".erl"))
:compile.file(file, erlc_options)
end)
end
@doc """
Returns Erlang manifests.
"""
def manifests, do: [manifest]
defp manifest, do: Path.join(Mix.Project.manifest_path, @manifest)
@doc """
Cleans up compilation artifacts.
"""
def clean do
Mix.Compilers.Erlang.clean(manifest())
end
## Internal helpers
defp scan_sources(files, include_path, source_paths) do
include_paths = [include_path | source_paths]
Enum.reduce(files, [], &scan_source(&2, &1, include_paths)) |> Enum.reverse
end
defp scan_source(acc, file, include_paths) do
erl_file = %{file: file, module: module_from_artifact(file),
behaviours: [], compile: [], includes: [], invalid: false}
case :epp.parse_file(to_erl_file(file), include_paths, []) do
{:ok, forms} ->
[List.foldl(tl(forms), erl_file, &do_form(file, &1, &2)) | acc]
{:error, _error} ->
acc
end
end
defp do_form(file, form, erl) do
case form do
{:attribute, _, :file, {include_file, _}} when file != include_file ->
if File.regular?(include_file) do
%{erl | includes: [include_file|erl.includes]}
else
erl
end
{:attribute, _, :behaviour, behaviour} ->
%{erl | behaviours: [behaviour|erl.behaviours]}
{:attribute, _, :compile, value} ->
%{erl | compile: [value|erl.compile]}
_ ->
erl
end
end
defp sort_dependencies(erls) do
graph = :digraph.new
_ = for erl <- erls do
:digraph.add_vertex(graph, erl.module, erl)
end
_ = for erl <- erls do
_ = for b <- erl.behaviours, do: :digraph.add_edge(graph, b, erl.module)
_ = for c <- erl.compile do
case c do
{:parse_transform, transform} -> :digraph.add_edge(graph, transform, erl.module)
_ -> :ok
end
end
:ok
end
result =
case :digraph_utils.topsort(graph) do
false -> erls
mods ->
for m <- mods, do: elem(:digraph.vertex(graph, m), 1)
end
:digraph.delete(graph)
result
end
defp annotate_target(erl, compile_path, force) do
beam = Path.join(compile_path, "#{erl.module}#{:code.objfile_extension}")
if force || Mix.Utils.stale?([erl.file|erl.includes], [beam]) do
{:stale, erl.file, beam}
else
{:ok, erl.file, beam}
end
end
defp module_from_artifact(artifact) do
artifact |> Path.basename |> Path.rootname |> String.to_atom
end
end
| 29.526012 | 90 | 0.637431 |
9e1aa94ca0e9756122e847acf5c605caf567b086 | 8,366 | ex | Elixir | lib/ex_aws/cloudformation/parsers.ex | Frameio/ex_aws | 3b335b6ed7932b5cf991323d26cf5497e1e6c122 | [
"Unlicense",
"MIT"
] | null | null | null | lib/ex_aws/cloudformation/parsers.ex | Frameio/ex_aws | 3b335b6ed7932b5cf991323d26cf5497e1e6c122 | [
"Unlicense",
"MIT"
] | null | null | null | lib/ex_aws/cloudformation/parsers.ex | Frameio/ex_aws | 3b335b6ed7932b5cf991323d26cf5497e1e6c122 | [
"Unlicense",
"MIT"
] | null | null | null | if Code.ensure_loaded?(SweetXml) do
defmodule ExAws.Cloudformation.Parsers do
import SweetXml, only: [sigil_x: 2, transform_by: 2]
@type resource_status ::
:create_in_progress | :create_failed | :create_complete |
:delete_in_progress | :delete_failed | :delete_complete | :delete_skipped |
:update_in_progress | :update_failed | :update_complete
@type stack_status ::
resource_status |
:rollback_in_progress | :rollback_failed | :rollback_complete |
:update_rollback_failed | :update_rollback_in_progress |
:update_rollback_complete | :review_in_progress |
:update_complete_cleanup_in_progress |
:update_rollback_complete_cleanup_in_progress
def parse({:ok, %{body: xml}=resp}, :cancel_update_stack, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//CancelUpdateStackResponse", request_id: request_id_xpath() )
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :continue_update_rollback, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//ContinueUpdateRollbackResponse", request_id: request_id_xpath() )
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :create_stack, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//CreateStackResponse",
request_id: request_id_xpath(),
stack_id: ~x"./CreateStackResult/StackId/text()"s
)
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :delete_stack, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//DeleteStackResponse", request_id: request_id_xpath())
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :describe_stack_resource, config) do
parsed_body = xml
|> SweetXml.xpath(~x"//DescribeStackResourceResponse",
request_id: request_id_xpath(),
resource: [
~x"./DescribeStackResourceResult/StackResourceDetail",
last_updated_timestamp: ~x"./LastUpdatedTimestamp/text()"s,
metadata: ~x"./Metadata/text()"so |> transform_by(&(parse_metadata_json(&1, config)))
] ++ resource_description_fields() ++ stack_fields()
)
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :describe_stack_resources, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//DescribeStackResourcesResponse",
request_id: request_id_xpath(),
resources: [
~x"./DescribeStackResourcesResult/StackResources/member"l,
timestamp: ~x"./Timestamp/text()"s,
] ++ resource_description_fields() ++ stack_fields()
)
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml} = resp}, :get_template, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//GetTemplateResponse",
template_body: ~x"./GetTemplateResult/TemplateBody/text()"s,
request_id: request_id_xpath()
)
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml} = resp}, :get_template_summary, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//GetTemplateSummaryResponse",
description: ~x"./GetTemplateSummaryResult/Description/text()"s,
parameters: [
~x"./GetTemplateSummaryResult/Parameters/member"l,
no_echo: ~x"./NoEcho/text()"s,
parameter_key: ~x"./ParameterKey/text()"s,
description: ~x"./Description/text()"s,
parameter_type: ~x"./ParameterType/text()"s,
],
metadata: ~x"./GetTemplateSummaryResult/Metadata/text()"s,
version: ~x"./GetTemplateSummaryResult/Version/text()"s,
request_id: request_id_xpath())
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :list_stacks, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//ListStacksResponse",
next_token: ~x"./ListStacksResult/NextToken/text()"s,
request_id: request_id_xpath(),
stacks: [
~x"./ListStacksResult/StackSummaries/member"l,
id: ~x"./StackId/text()"s,
name: ~x"./StackName/text()"s,
status: ~x"./StackStatus/text()"s |> transform_by(&const_to_atom/1),
creation_time: ~x"./CreationTime/text()"s,
template_description: ~x"./TemplateDescription/text()"so,
resource_types: ~x"./ResourceTypes/member/text()"slo
]
)
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :list_stack_resources, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//ListStackResourcesResponse",
next_token: ~x"./ListStackResourcesResult/NextToken/text()"s,
request_id: request_id_xpath(),
resources: [
~x"./ListStackResourcesResult/StackResourceSummaries/member"l,
last_updated_timestamp: ~x"./LastUpdatedTimestamp/text()"s
] ++ resource_description_fields()
)
{:ok, Map.put(resp, :body, parsed_body)}
end
def parse({:ok, %{body: xml}=resp}, :describe_stacks, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//DescribeStacksResponse",
stacks: [
~x"./DescribeStacksResult/Stacks/member"lo,
name: ~x"./StackName/text()"s,
id: ~x"./StackId/text()"s,
creation_time: ~x"./CreationTime/text()"s,
status: ~x"./StackStatus/text()"s |> transform_by(&const_to_atom/1),
disable_rollback: ~x"./DisableRollback/text()"s,
outputs: [
~x"./Outputs/member"lo,
key: ~x"./OutputKey/text()"s,
value: ~x"./OutputValue/text()"s
]
],
request_id: request_id_xpath())
processStack = fn stack ->
Map.update!( stack, :outputs, &Map.new( &1, fn kv -> {kv[:key], kv[:value]} end ) )
end
#Convert the list of outputs to a map
processed_body = Map.update!( parsed_body, :stacks, &Enum.map( &1, fn stack -> processStack.( stack ) end ) )
{:ok, Map.put(resp, :body, processed_body)}
end
def parse({:error, {type, http_status_code, %{body: xml}}}, _, _) do
parsed_body = xml
|> SweetXml.xpath(~x"//ErrorResponse",
request_id: ~x"./RequestId/text()"s,
type: ~x"./Error/Type/text()"s,
code: ~x"./Error/Code/text()"s,
message: ~x"./Error/Message/text()"s,
detail: ~x"./Error/Detail/text()"s)
{:error, {type, http_status_code, parsed_body}}
end
def parse(val, _), do: val
defp resource_description_fields do
[
resource_status: ~x"./ResourceStatus/text()"s |> transform_by(&const_to_atom/1),
logical_resource_id: ~x"./LogicalResourceId/text()"s,
physical_resource_id: ~x"./PhysicalResourceId/text()"s,
resource_type: ~x"./ResourceType/text()"s
]
end
defp stack_fields do
[
stack_id: ~x"./StackId/text()"s,
stack_name: ~x"./StackName/text()"s
]
end
defp request_id_xpath do
~x"./ResponseMetadata/RequestId/text()"s
end
defp const_to_atom(string) do
_load_status_atoms()
string |> String.downcase |> String.to_existing_atom
end
defp parse_metadata_json("", _), do: %{}
defp parse_metadata_json(json, config) do
json
|> String.trim
|> config.json_codec.decode!
end
defp _load_status_atoms do
~w(
create_in_progress create_failed create_complete delete_in_progress delete_failed
delete_complete delete_skipped update_in_progress update_failed update_complete
rollback_in_progress rollback_failed rollback_complete update_rollback_failed
update_rollback_in_progress update_rollback_complete review_in_progress
update_complete_cleanup_in_progress update_rollback_complete_cleanup_in_progress
)a
end
end
end
| 38.027273 | 115 | 0.595625 |
9e1aca8bea2323a06f44e4ba9e8877dfd2f010e2 | 569 | ex | Elixir | lib/movement/operation.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/movement/operation.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/movement/operation.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | defmodule Movement.Operation do
defstruct action: nil,
key: nil,
text: nil,
file_comment: nil,
file_index: 0,
value_type: nil,
plural: false,
locked: false,
batch: false,
translation_id: nil,
rollbacked_operation_id: nil,
batch_operation_id: nil,
revision_id: nil,
version_id: nil,
document_id: nil,
project_id: nil,
previous_translation: nil
@type t :: %__MODULE__{}
end
| 25.863636 | 41 | 0.507909 |
9e1acd008c2b4e16e32e46b441a259b9fc295e18 | 1,781 | exs | Elixir | mix.exs | chrislaskey/assoc | 7bee95e7f5bc6222cd7c2a63e3f955d0db89b2b7 | [
"MIT"
] | 6 | 2019-01-31T23:31:42.000Z | 2020-10-06T20:05:34.000Z | mix.exs | chrislaskey/assoc | 7bee95e7f5bc6222cd7c2a63e3f955d0db89b2b7 | [
"MIT"
] | 2 | 2021-11-09T14:35:51.000Z | 2021-11-09T18:04:15.000Z | mix.exs | chrislaskey/assoc | 7bee95e7f5bc6222cd7c2a63e3f955d0db89b2b7 | [
"MIT"
] | 2 | 2019-01-31T23:53:33.000Z | 2021-11-05T21:47:26.000Z | defmodule Assoc.MixProject do
use Mix.Project
@version "0.2.3"
def project do
[
app: :assoc,
version: @version,
build_path: "./_build",
config_path: "./config/config.exs",
deps_path: "./deps",
lockfile: "./mix.lock",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
description: description(),
package: package(),
# Docs
name: "Assoc",
source_url: "https://github.com/chrislaskey/assoc",
docs: [
main: "readme",
extras: [
"README.md",
"CHANGELOG.md"
]
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/setup", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
default_options = [
extra_applications: [:logger]
]
case Mix.env() do
:test -> Keyword.put(default_options, :mod, {Assoc.Test.Application, []})
_ -> default_options
end
end
defp deps do
[
{:ecto_sql, "~> 3.0", only: :test},
{:postgrex, ">= 0.0.0", only: :test},
{:ex_doc, "~> 0.25", only: :dev, runtime: false}
]
end
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
defp description do
"An easy way to manage many_to_many, has_many and belongs_to Ecto associations"
end
defp package() do
[
files: ~w(lib priv .formatter.exs mix.exs README* LICENSE* CHANGELOG*),
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/chrislaskey/assoc"}
]
end
end
| 23.12987 | 83 | 0.558675 |
9e1ae45a0e7c287eb91bee85c8b60a7e55e9162d | 3,736 | ex | Elixir | lib/siwapp/invoices/item.ex | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | null | null | null | lib/siwapp/invoices/item.ex | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | null | null | null | lib/siwapp/invoices/item.ex | jakon89/siwapp | b5f8fd43458deae72c76e434ed0c63b620cb97a4 | [
"MIT"
] | null | null | null | defmodule Siwapp.Invoices.Item do
@moduledoc """
Item
"""
use Ecto.Schema
import Ecto.Changeset
import Siwapp.Invoices.AmountHelper
alias Siwapp.Commons
alias Siwapp.Commons.Tax
alias Siwapp.Invoices.Invoice
@fields [
:quantity,
:discount,
:description,
:unitary_cost,
:invoice_id,
:virtual_unitary_cost
]
@type t :: %__MODULE__{
id: pos_integer() | nil,
quantity: pos_integer(),
discount: non_neg_integer(),
description: binary() | nil,
unitary_cost: integer(),
invoice_id: pos_integer() | nil
}
schema "items" do
field :quantity, :integer, default: 1
field :discount, :integer, default: 0
field :description, :string
field :unitary_cost, :integer, default: 0
field :net_amount, :integer, virtual: true, default: 0
field :taxes_amount, :map, virtual: true, default: %{}
field :virtual_unitary_cost, :decimal, virtual: true
belongs_to :invoice, Invoice
many_to_many :taxes, Tax,
join_through: "items_taxes",
on_replace: :delete
end
@spec changeset(t(), map, binary | atom) :: Ecto.Changeset.t()
def changeset(item, attrs \\ %{}, currency) do
item
|> cast(attrs, @fields)
|> set_amount(:unitary_cost, :virtual_unitary_cost, currency)
|> assoc_taxes(attrs)
|> foreign_key_constraint(:invoice_id)
|> validate_length(:description, max: 20_000)
|> validate_number(:quantity, greater_than_or_equal_to: 0)
|> validate_number(:discount, greater_than_or_equal_to: 0, less_than_or_equal_to: 100)
|> calculate()
end
@doc """
Performs the totals calculations for net_amount and taxes_amount fields.
"""
@spec calculate(Ecto.Changeset.t()) :: Ecto.Changeset.t()
def calculate(changeset) do
changeset
|> set_net_amount()
|> set_taxes_amount()
end
@spec set_net_amount(Ecto.Changeset.t()) :: Ecto.Changeset.t()
defp set_net_amount(changeset) do
quantity = get_field(changeset, :quantity)
unitary_cost = get_field(changeset, :unitary_cost)
discount = get_field(changeset, :discount)
net_amount = round(quantity * unitary_cost * (1 - discount / 100))
put_change(changeset, :net_amount, net_amount)
end
@spec set_taxes_amount(Ecto.Changeset.t()) :: Ecto.Changeset.t()
defp set_taxes_amount(changeset) do
case get_field(changeset, :taxes) do
[] ->
changeset
taxes ->
net_amount = get_field(changeset, :net_amount)
taxes_amounts =
for tax <- taxes, into: %{} do
tax_val = Decimal.new("#{tax.value / 100}")
{tax.name, Decimal.mult(net_amount, tax_val)}
end
put_change(changeset, :taxes_amount, taxes_amounts)
end
end
@spec assoc_taxes(Ecto.Changeset.t(), map()) :: Ecto.Changeset.t()
defp assoc_taxes(changeset, attrs) do
attr_taxes_names = MapSet.new(get(attrs, :taxes) || [], &String.upcase/1)
all_taxes = Commons.list_taxes(:cache)
all_taxes_names = MapSet.new(all_taxes, & &1.name)
changeset =
Enum.reduce(attr_taxes_names, changeset, &check_wrong_taxes(&1, &2, all_taxes_names))
put_assoc(changeset, :taxes, Enum.filter(all_taxes, &(&1.name in attr_taxes_names)))
end
@spec check_wrong_taxes(String.t(), Ecto.Changeset.t(), MapSet.t()) :: Ecto.Changeset.t()
defp check_wrong_taxes(tax_name, changeset, all_taxes_names) do
if MapSet.member?(all_taxes_names, tax_name) do
changeset
else
add_error(changeset, :taxes, "The tax #{tax_name} is not defined")
end
end
@spec get(map(), atom()) :: any()
defp get(map, key) when is_atom(key) do
Map.get(map, key) || Map.get(map, Atom.to_string(key))
end
end
| 29.417323 | 91 | 0.663276 |
9e1b08be9c01a44f9967d3a4b1c52af410a7d8d7 | 72 | ex | Elixir | apps/api_web/lib/api_web/views/client_portal/layout_view.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/api_web/lib/api_web/views/client_portal/layout_view.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/api_web/lib/api_web/views/client_portal/layout_view.ex | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule ApiWeb.ClientPortal.LayoutView do
use ApiWeb.Web, :view
end
| 18 | 43 | 0.805556 |
9e1b4a40064fcf64511fe60a11cba46e54ee9950 | 2,361 | ex | Elixir | apps/ewallet_api/lib/ewallet_api/v1/controllers/transfer_controller.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_api/lib/ewallet_api/v1/controllers/transfer_controller.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/lib/ewallet_api/v1/controllers/transfer_controller.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletAPI.V1.TransferController do
use EWalletAPI, :controller
import EWalletAPI.V1.ErrorHandler
alias EWallet.{ComputedBalanceFetcher, TransactionGate}
plug :put_view, EWalletAPI.V1.ComputedBalanceView
def transfer(conn, %{
"from_address" => from_address,
"to_address" => to_address,
"token_id" => token_id,
"amount" => amount,
} = attrs)
when from_address != nil
when to_address != nil
and token_id != nil
and is_integer(amount)
do
attrs
|> Map.put("idempotency_token", conn.assigns[:idempotency_token])
|> TransactionGate.process_with_addresses()
|> respond_with_balances(conn)
end
def transfer(conn, _attrs), do: handle_error(conn, :invalid_parameter)
def credit(conn, attrs), do: credit_or_debit(conn, TransactionGate.credit_type, attrs)
def debit(conn, attrs), do: credit_or_debit(conn, TransactionGate.debit_type, attrs)
defp credit_or_debit(conn, type, %{
"provider_user_id" => provider_user_id,
"token_id" => token_id,
"amount" => amount} = attrs
)
when provider_user_id != nil
and token_id != nil
and is_integer(amount)
do
attrs
|> Map.put("type", type)
|> Map.put("idempotency_token", conn.assigns[:idempotency_token])
|> TransactionGate.process_credit_or_debit()
|> respond_with_balances(conn)
end
defp credit_or_debit(conn, _type, _attrs), do: handle_error(conn, :invalid_parameter)
defp respond_with_balances({:ok, _transfer, balances, minted_token}, conn) do
addresses = Enum.map(balances, fn balance ->
case ComputedBalanceFetcher.get(minted_token.friendly_id, balance.address) do
{:ok, address} -> address
error -> error
end
end)
case Enum.find(addresses, fn(e) -> match?({:error, _code, _description}, e) end) do
nil -> respond({:ok, addresses}, conn)
error -> error
end
end
defp respond_with_balances({:error, code}, conn), do: handle_error(conn, code)
defp respond_with_balances({:error, _transfer, code, description}, conn) do
handle_error(conn, code, description)
end
defp respond({:ok, addresses}, conn) do
render(conn, :balances, %{addresses: addresses})
end
defp respond({:error, code, description}, conn) do
handle_error(conn, code, description)
end
defp respond({:error, code}, conn), do: handle_error(conn, code)
end
| 32.791667 | 88 | 0.700127 |
9e1b8ad18e8214e00d29d91570b8d5556e4d0958 | 101 | exs | Elixir | test/iso_8583/encode/encode_test.exs | zemuldo/iso_8583_elixir | bc8782a20cd9bc5fa78c9c57338a68f459f9929c | [
"MIT"
] | 7 | 2020-03-02T07:12:49.000Z | 2022-03-15T02:03:30.000Z | test/iso_8583/encode/encode_test.exs | zemuldo/iso_8583_elixir | bc8782a20cd9bc5fa78c9c57338a68f459f9929c | [
"MIT"
] | 1 | 2020-10-21T05:19:44.000Z | 2020-10-24T20:11:47.000Z | test/iso_8583/encode/encode_test.exs | zemuldo/iso_8583_elixir | bc8782a20cd9bc5fa78c9c57338a68f459f9929c | [
"MIT"
] | 2 | 2020-03-02T07:14:50.000Z | 2022-03-15T17:18:57.000Z | defmodule ISO8583.Test.EncodeTest do
use ExUnit.Case
alias ISO8583.Encode
doctest Encode
end
| 12.625 | 36 | 0.782178 |
9e1ba54d09b9823249963c2fed7f86e226f3d434 | 2,156 | exs | Elixir | test/surgex/refactor/refactor_test.exs | surgeventures/surgex | b3acdd6a9a010c26f0081b9cb23aeb072459be30 | [
"MIT"
] | 10 | 2017-09-15T08:55:48.000Z | 2021-07-08T09:26:24.000Z | test/surgex/refactor/refactor_test.exs | surgeventures/surgex | b3acdd6a9a010c26f0081b9cb23aeb072459be30 | [
"MIT"
] | 17 | 2017-07-24T11:27:22.000Z | 2022-01-24T22:28:18.000Z | test/surgex/refactor/refactor_test.exs | surgeventures/surgex | b3acdd6a9a010c26f0081b9cb23aeb072459be30 | [
"MIT"
] | 2 | 2018-04-12T15:01:00.000Z | 2018-05-27T12:14:34.000Z | defmodule Surgex.RefactorTest do
use ExUnit.Case
import ExUnit.CaptureIO
alias Surgex.Refactor
setup do
File.rm_rf!("test/samples")
File.mkdir_p!("test/samples")
File.write(
"test/samples/valid_xyz_mod.ex",
"defmodule Surgex.Refactor.MapFilenamesTest.ValidXYZMod, do: nil"
)
File.write(
"test/samples/wrong_xyz_mod.ex",
"defmodule Surgex.Refactor.MapFilenamesTest.InvalidXYZMod, do: nil"
)
end
test "expands recursively when no path is given" do
result =
capture_io(fn ->
Refactor.call([
"map_filenames"
])
end)
assert result =~ ~r(wrong_xyz_mod.ex)
File.rm_rf!("test/samples")
end
test "handles wrong path" do
result =
capture_io(fn ->
Refactor.call([
"map_filenames",
"wrong_path"
])
end)
assert result =~ ~r/No files found/
File.rm_rf!("test/samples")
end
test "map filenames without fixing them" do
result =
capture_io(fn ->
Refactor.call([
"map_filenames",
"test/samples"
])
end)
assert result =~ ~r/You're in a simulation mode, pass the --fix option to apply the action./
assert result =~ ~r(/wrong_xyz_mod.ex => test/.*/invalid_xyz_mod.ex)
refute result =~ ~r(/valid_xyz_mod.ex)
refute result =~ ~r/Renamed \d+ file\(s\)/
assert File.exists?("test/samples/wrong_xyz_mod.ex")
refute File.exists?("test/samples/invalid_xyz_mod.ex")
File.rm_rf!("test/samples")
end
test "map filenames with fixing them" do
result =
capture_io(fn ->
Refactor.call([
"map_filenames",
"test/samples",
"--fix"
])
end)
refute result =~ ~r/You're in a simulation mode, pass the --fix option to apply the action./
assert result =~ ~r(/wrong_xyz_mod.ex => test/.*/invalid_xyz_mod.ex)
refute result =~ ~r(/valid_xyz_mod.ex)
assert result =~ ~r/Renamed 1 file\(s\)/
refute File.exists?("test/samples/wrong_xyz_mod.ex")
assert File.exists?("test/samples/invalid_xyz_mod.ex")
File.rm_rf!("test/samples")
end
end
| 24.781609 | 96 | 0.614564 |
9e1bbdff05cc59c673c087a1a02e3a060aa76b73 | 1,184 | ex | Elixir | clients/logging/lib/google_api/logging/v2/model/log_entry_operation.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/logging/lib/google_api/logging/v2/model/log_entry_operation.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/logging/lib/google_api/logging/v2/model/log_entry_operation.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Logging.V2.Model.LogEntryOperation do
@moduledoc """
Additional information about a potentially long-running operation with which a log entry is associated.
"""
@derive [Poison.Encoder]
defstruct [
:"first",
:"id",
:"last",
:"producer"
]
end
defimpl Poison.Decoder, for: GoogleApi.Logging.V2.Model.LogEntryOperation do
def decode(value, _options) do
value
end
end
| 29.6 | 105 | 0.741554 |
9e1bc1a5d9cc135292a286c323cd88c90bca6eaf | 1,495 | ex | Elixir | lib/api/graphql/resolvers.ex | nunopolonia/psc-api | 2e358503851cc04cdaa89201a3f56586f8746736 | [
"MIT"
] | 1 | 2017-09-10T23:51:40.000Z | 2017-09-10T23:51:40.000Z | lib/api/graphql/resolvers.ex | nunopolonia/psc-api | 2e358503851cc04cdaa89201a3f56586f8746736 | [
"MIT"
] | 24 | 2018-03-14T18:17:00.000Z | 2021-03-01T07:47:53.000Z | lib/api/graphql/resolvers.ex | portosummerofcode/psc-api | 2e358503851cc04cdaa89201a3f56586f8746736 | [
"MIT"
] | null | null | null | defmodule Api.GraphQL.Resolvers do
alias Absinthe.Relay.Connection
alias Api.Repo
alias Api.GraphQL.Errors
alias Api.GraphQL.EctoExtensions
def by_id(type) when is_atom(type) do
fn %{id: id}, _info ->
{:ok, type |> Repo.get(id)}
end
end
def by_attr(type, _attr) when is_atom(type) do
fn args, _info ->
{:ok, type |> Repo.get_by(args)}
end
end
defp collect_args(arg_keys, source, args, info) do
ctx = %{source: source, args: args, info: info}
arg_keys
|> Enum.reverse
|> Enum.reduce([], fn arg_key, rest ->
[get_in(ctx, arg_key) | rest]
end)
end
def assoc(assocs) do
all(
fn source ->
Ecto.assoc(source, assocs)
end,
[[:source]]
)
end
def all(type) when is_atom(type) do
all(
fn -> type end,
[]
)
end
def all(fun, arg_keys) when is_function(fun) do
fn source, args, info ->
fun
|> apply(collect_args(arg_keys, source, args, info))
|> EctoExtensions.orderable(args)
|> Connection.from_query(&Repo.all/1, args)
end
end
def run_with_args(fun, arg_keys) do
fn source, args, info ->
fun
|> apply(collect_args(arg_keys, source, args, info))
end
|> Errors.graphqlize
end
def run(fun) do
fn args, _info ->
fun.(args)
end
|> Errors.graphqlize
end
#
# Non-generic resolvers
def me(_args, %{context: %{current_user: current_user}}) do
{:ok, current_user}
end
end
| 19.933333 | 61 | 0.6 |
9e1bc9d50edaff4ce7a0edefa6de05b69a05b4e0 | 690 | ex | Elixir | debian/preinst.ex | flu0r1ne/nebula | aaf1c92d38d38defd88fa3755a2dabd2d0d9094a | [
"MIT"
] | null | null | null | debian/preinst.ex | flu0r1ne/nebula | aaf1c92d38d38defd88fa3755a2dabd2d0d9094a | [
"MIT"
] | null | null | null | debian/preinst.ex | flu0r1ne/nebula | aaf1c92d38d38defd88fa3755a2dabd2d0d9094a | [
"MIT"
] | null | null | null | #!/bin/sh
# preinst script for nebula
#
# see: dh_installdeb(1)
set -e
# summary of how this script can be called:
# * <new-preinst> `install'
# * <new-preinst> `install' <old-version>
# * <new-preinst> `upgrade' <old-version>
# * <old-preinst> `abort-upgrade' <new-version>
# for details, see https://www.debian.org/doc/debian-policy/ or
# the debian-policy package
case "$1" in
install|upgrade)
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
#DEBHELPER#
exit 0
| 19.166667 | 63 | 0.627536 |
9e1c6fd915abfee5cb4513dfa973a3731bc20b52 | 2,283 | ex | Elixir | clients/playable_locations/lib/google_api/playable_locations/v3/model/google_maps_playablelocations_v3_impression.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/playable_locations/lib/google_api/playable_locations/v3/model/google_maps_playablelocations_v3_impression.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/playable_locations/lib/google_api/playable_locations/v3/model/google_maps_playablelocations_v3_impression.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3Impression do
@moduledoc """
Encapsulates impression event details.
## Attributes
* `gameObjectType` (*type:* `integer()`, *default:* `nil`) - An arbitrary, developer-defined type identifier for each type of game object used in your game. Since players interact with differ types of game objects in different ways, this field allows you to segregate impression data by type for analysis. You should assign a unique `game_object_type` ID to represent a distinct type of game object in your game. For example, 1=monster location, 2=powerup location.
* `impressionType` (*type:* `String.t`, *default:* `nil`) - Required. The type of impression event.
* `locationName` (*type:* `String.t`, *default:* `nil`) - Required. The name of the playable location.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:gameObjectType => integer(),
:impressionType => String.t(),
:locationName => String.t()
}
field(:gameObjectType)
field(:impressionType)
field(:locationName)
end
defimpl Poison.Decoder,
for: GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3Impression do
def decode(value, options) do
GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3Impression.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.PlayableLocations.V3.Model.GoogleMapsPlayablelocationsV3Impression do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.362069 | 469 | 0.742882 |
9e1c76b7bb4af06c95ccbb92d75f69a6bf7baa6e | 74,595 | ex | Elixir | data/web/deps/postgrex/lib/postgrex/protocol.ex | lydiadwyer/trains_elixir | 16da18d4582307f4967b6cce7320e9aa08a849c3 | [
"Apache-2.0"
] | null | null | null | data/web/deps/postgrex/lib/postgrex/protocol.ex | lydiadwyer/trains_elixir | 16da18d4582307f4967b6cce7320e9aa08a849c3 | [
"Apache-2.0"
] | null | null | null | data/web/deps/postgrex/lib/postgrex/protocol.ex | lydiadwyer/trains_elixir | 16da18d4582307f4967b6cce7320e9aa08a849c3 | [
"Apache-2.0"
] | null | null | null | defmodule Postgrex.Protocol do
@moduledoc false
alias Postgrex.Types
alias Postgrex.Query
alias Postgrex.Stream
import Postgrex.Messages
import Postgrex.BinaryUtils
require Logger
@behaviour DBConnection
@timeout 5000
@sock_opts [packet: :raw, mode: :binary, active: false]
@max_packet 64 * 1024 * 1024 # max raw receive length
@nonposix_errors [:closed, :timeout]
defstruct [sock: nil, connection_id: nil, connection_key: nil, peer: nil,
types: nil, null: nil, timeout: nil, parameters: %{}, queries: nil,
postgres: :idle, transactions: :naive, buffer: nil]
@type state :: %__MODULE__{sock: {module, any},
connection_id: nil | pos_integer,
connection_key: nil | pos_integer,
peer: nil | {:inet.ip_address, :inet.port_number},
types: (nil | reference | Postgrex.TypeServer.table),
null: atom,
timeout: timeout,
parameters: %{binary => binary} | reference,
queries: nil | :ets.tid,
postgres: :idle | :transaction | :failed,
transactions: :strict | :naive,
buffer: nil | binary | :active_once}
@type notify :: ((binary, binary) -> any)
@reserved_prefix "POSTGREX_"
@reserved_queries ["BEGIN",
"COMMIT",
"ROLLBACK",
"SAVEPOINT postgrex_savepoint",
"RELEASE SAVEPOINT postgrex_savepoint",
"ROLLBACK TO SAVEPOINT postgrex_savepoint",
"SAVEPOINT postgrex_query",
"RELEASE SAVEPOINT postgrex_query",
"ROLLBACK TO SAVEPOINT postgrex_query"]
@spec connect(Keyword.t) ::
{:ok, state} |
{:error, Postgrex.Error.t | %DBConnection.ConnectionError{}}
def connect(opts) do
host = Keyword.fetch!(opts, :hostname) |> to_char_list
port = opts[:port] || 5432
timeout = opts[:timeout] || @timeout
sock_opts = [send_timeout: timeout] ++ (opts[:socket_options] || [])
custom = opts[:extensions] || []
decode_bin = opts[:decode_binary] || :copy
ext_opts = [decode_binary: decode_bin]
extensions = custom ++ Postgrex.Utils.default_extensions(ext_opts)
ssl? = opts[:ssl] || false
types? = Keyword.fetch!(opts, :types)
null = opts[:null]
transactions =
case opts[:transactions] || :naive do
:naive -> :naive
:strict -> :strict
end
prepare =
case opts[:prepare] || :named do
:named -> :named
:unnamed -> :unnamed
end
s = %__MODULE__{timeout: timeout, postgres: :idle,
transactions: transactions, null: null}
types_key = if types?, do: {host, port, Keyword.fetch!(opts, :database), decode_bin, custom}
status = %{opts: opts, types_key: types_key, types_ref: nil,
types_table: nil, build_types: nil, extensions: extensions,
prepare: prepare, ssl: ssl?}
case connect(host, port, sock_opts ++ @sock_opts, s) do
{:ok, s} -> handshake(s, status)
{:error, _} = error -> error
end
end
@spec disconnect(Exception.t, state) :: :ok
def disconnect(_, s) do
sock_close(s)
_ = recv_buffer(s)
delete_parameters(s)
queries_delete(s)
cancel_request(s)
:ok
end
@spec ping(state) ::
{:ok, state} |
{:disconnect, Postgrex.Error.t | %DBConnection.ConnectionError{}, state}
def ping(%{postgres: :transaction, transactions: :strict} = s) do
sync_error(s, :transaction)
end
def ping(%{buffer: buffer} = s) do
status = %{notify: notify([]), mode: :transaction, sync: :sync}
s = %{s | buffer: nil}
case buffer do
:active_once ->
sync(s, status, :active_once, buffer)
_ when is_binary(buffer) ->
sync(s, status, nil, buffer)
end
end
@spec checkout(state) ::
{:ok, state} |
{:disconnect, Postgrex.Error.t | %DBConnection.ConnectionError{}, state}
def checkout(%{postgres: :transaction, transactions: :strict} = s) do
sync_error(s, :transaction)
end
def checkout(%{buffer: :active_once} = s) do
case setopts(s, [active: :false], :active_once) do
:ok -> recv_buffer(s)
{:disconnect, _, _} = dis -> dis
end
end
@spec checkin(state) ::
{:ok, state} |
{:disconnect, Postgrex.Error.t | %DBConnection.ConnectionError{}, state}
def checkin(%{postgres: :transaction, transactions: :strict} = s) do
sync_error(s, :transaction)
end
def checkin(%{buffer: buffer} = s) when is_binary(buffer) do
activate(s, buffer)
end
@spec handle_prepare(Postgrex.Query.t, Keyword.t, state) ::
{:ok, Postgrex.Query.t, state} |
{:error, %ArgumentError{} | Postgrex.Error.t, state} |
{:error | :disconnect, %RuntimeError{}, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_prepare(query, _, %{postgres: {_, _}} = s) do
lock_error(s, :prepare, query)
end
def handle_prepare(%Query{name: @reserved_prefix <> _} = query, _, s) do
reserved_error(query, s)
end
def handle_prepare(%Query{types: nil} = query, opts, %{queries: nil, buffer: buffer} = s) do
{sync, next} = prepare(opts)
status = %{notify: notify(opts), mode: mode(opts), sync: sync}
parse_describe(%{s | buffer: nil}, status, unnamed(query), buffer, next)
end
def handle_prepare(%Query{types: nil} = query, opts, %{buffer: buffer} = s) do
{sync, next} = prepare(opts)
status = %{notify: notify(opts), mode: mode(opts), sync: sync}
close_parse_describe(%{s | buffer: nil}, status, query, buffer, next)
end
def handle_prepare(%Query{types: types} = query, _, %{types: types} = s) do
query_error(s, "query #{inspect query} has already been prepared")
end
def handle_prepare(%Query{} = query, _, s) do
query_error(s, "query #{inspect query} has invalid types for the connection")
end
@spec handle_execute(Postgrex.Parameters.t, nil, Keyword.t, state) ::
{:ok, %{binary => binary}, state} |
{:error, Postgrex.Error.t, state}
def handle_execute(%Postgrex.Parameters{}, nil, _, s) do
%{parameters: parameters} = s
case Postgrex.Parameters.fetch(parameters) do
{:ok, parameters} ->
{:ok, parameters, s}
:error ->
{:error, %Postgrex.Error{message: "parameters not available"}, s}
end
end
@spec handle_execute(Postgrex.Stream.t | Postgrex.Query.t, list, Keyword.t, state) ::
{:ok, Postgrex.Result.t, state} |
{:error, %ArgumentError{} | Postgrex.Error.t, state} |
{:error | :disconnect, %RuntimeError{}, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_execute(req, params, opts, s) do
status = %{notify: notify(opts), mode: mode(opts), sync: :sync}
case execute(s, req) do
execute when is_function(execute, 4) ->
%{buffer: buffer} = s
s = %{s | buffer: nil}
execute.(s, status, params, buffer)
{kind, _, _} = error when kind in [:error, :disconnect] ->
error
end
end
@spec handle_close(Postgrex.Query.t | Postgrex.Stream.t, Keyword.t, state) ::
{:ok, Postgrex.Result.t, state} |
{:error, %ArgumentError{} | Postgrex.Error.t, state} |
{:error | :disconnect, %RuntimeError{}, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_close(%Stream{ref: ref} = stream, _, %{postgres: {_, ref}} = s) do
msg = "postgresql protocol can not halt copying from database for " <>
inspect(stream)
err = RuntimeError.exception(message: msg)
{:disconnect, err, s}
end
def handle_close(%Query{ref: ref} = query, opts, %{postgres: {postgres, ref}} = s) do
%{connection_id: connection_id, buffer: buffer} = s
status = %{notify: notify(opts), mode: mode(opts), sync: :flushed_sync}
res = %Postgrex.Result{command: :close, connection_id: connection_id}
close(%{s | postgres: postgres, buffer: nil}, status, query, res, buffer)
end
def handle_close(query, _, %{postgres: {_, _}} = s) do
lock_error(s, :close, query)
end
def handle_close(%Query{name: @reserved_prefix <> _} = query, _, s) do
reserved_error(query, s)
end
def handle_close(query, opts, s) do
%{connection_id: connection_id, buffer: buffer} = s
status = %{notify: notify(opts), mode: mode(opts), sync: :sync}
res = %Postgrex.Result{command: :close, connection_id: connection_id}
close(%{s | buffer: nil}, status, query, res, buffer)
end
@spec handle_begin(Keyword.t, state) ::
{:ok, Postgrex.Result.t, state} |
{:error, Postgrex.Error.t, state} |
{:error | :disconnect, %RuntimeError{}, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_begin(_, %{postgres: {_, _}} = s) do
lock_error(s, :begin)
end
def handle_begin(opts, s) do
case Keyword.get(opts, :mode, :transaction) do
:transaction ->
statement = "BEGIN"
handle_transaction(statement, :transaction, :begin, opts, s)
:savepoint ->
statement = "SAVEPOINT postgrex_savepoint"
handle_savepoint([statement, :sync], :savepoint, opts, s)
end
end
@spec handle_commit(Keyword.t, state) ::
{:ok, Postgrex.Result.t, state} |
{:error, Postgrex.Error.t, state} |
{:error | :disconnect, %RuntimeError{}, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_commit(_, %{postgres: {_, _}} = s) do
lock_error(s, :commit)
end
def handle_commit(opts, %{postgres: postgres} = s) do
case Keyword.get(opts, :mode, :transaction) do
:transaction ->
statement = "COMMIT"
handle_transaction(statement, :idle, :commit, opts, s)
:savepoint when postgres == :failed ->
handle_rollback(opts, s)
:savepoint ->
statement = "RELEASE SAVEPOINT postgrex_savepoint"
handle_savepoint([statement, :sync], :release, opts, s)
end
end
@spec handle_rollback(Keyword.t, state) ::
{:ok, Postgrex.Result.t, state} |
{:error, Postgrex.Error.t, state} |
{:error | :disconnect, %RuntimeError{}, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_rollback(_, %{postgres: {_, _}} = s) do
lock_error(s, :rollback)
end
def handle_rollback(opts, s) do
case Keyword.get(opts, :mode, :transaction) do
:transaction ->
statement = "ROLLBACK"
handle_transaction(statement, :idle, :rollback, opts, s)
:savepoint ->
statements = ["ROLLBACK TO SAVEPOINT postgrex_savepoint",
"RELEASE SAVEPOINT postgrex_savepoint",
:sync]
handle_savepoint(statements, [:rollback, :release], opts, s)
end
end
@spec handle_simple(String.t, Keyword.t, state) ::
{:ok, Postgrex.Result.t, state} |
{:error, Postgrex.Error.t, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_simple(statement, opts, %{buffer: buffer} = s) do
status = %{notify: notify(opts), mode: :transaction, sync: :sync}
simple_send(%{s | buffer: nil}, status, statement, buffer)
end
@spec handle_info(any, Keyword.t, state) ::
{:ok, state} |
{:error, Postgrex.Error.t, state} |
{:disconnect, %DBConnection.ConnectionError{}, state}
def handle_info(msg, opts \\ [], s)
def handle_info({:tcp, sock, data}, opts, %{sock: {:gen_tcp, sock}} = s) do
handle_data(s, opts, data)
end
def handle_info({:tcp_closed, sock}, _, %{sock: {:gen_tcp, sock}} = s) do
disconnect(s, :tcp, "async recv", :closed)
end
def handle_info({:tcp_error, sock, reason}, _, %{sock: {:gen_tcp, sock}} = s) do
disconnect(s, :tcp, "async recv", reason)
end
def handle_info({:ssl, sock, data}, opts, %{sock: {:ssl, sock}} = s) do
handle_data(s, opts, data)
end
def handle_info({:ssl_closed, sock}, _, %{sock: {:ssl, sock}} = s) do
disconnect(s, :ssl, "async recv", :closed)
end
def handle_info({:ssl_error, sock, reason}, _, %{sock: {:ssl, sock}} = s) do
disconnect(s, :ssl, "async recv", reason)
end
def handle_info(msg, _, s) do
Logger.info(fn() -> [inspect(__MODULE__), ?\s, inspect(self()),
" received unexpected message: " | inspect(msg)]
end)
{:ok, s}
end
## connect
defp connect(host, port, sock_opts, %{timeout: timeout} = s) do
buffer? = Keyword.has_key?(sock_opts, :buffer)
case :gen_tcp.connect(host, port, sock_opts ++ @sock_opts, timeout) do
{:ok, sock} when buffer? ->
{:ok, %{s | sock: {:gen_tcp, sock}}}
{:ok, sock} ->
# A suitable :buffer is only set if :recbuf is included in
# :socket_options.
{:ok, [sndbuf: sndbuf, recbuf: recbuf, buffer: buffer]} =
:inet.getopts(sock, [:sndbuf, :recbuf, :buffer])
buffer = buffer
|> max(sndbuf)
|> max(recbuf)
:ok = :inet.setopts(sock, [buffer: buffer])
{:ok, %{s | sock: {:gen_tcp, sock}}}
{:error, reason} ->
{:error, conn_error(:tcp, "connect", reason)}
end
end
## handshake
defp handshake(%{timeout: timeout, sock: {:gen_tcp, sock}} = s,status) do
{:ok, peer} = :inet.peername(sock)
timer = start_handshake_timer(timeout, sock)
case do_handshake(%{s | peer: peer}, status) do
{:ok, %{parameters: parameters} = s} ->
cancel_handshake_timer(timer)
ref = Postgrex.Parameters.insert(parameters)
{:ok, %{s | parameters: ref}}
{:disconnect, err, s} ->
cancel_handshake_timer(timer)
disconnect(err, s)
{:error, err}
end
end
defp start_handshake_timer(:infinity, _), do: :infinity
defp start_handshake_timer(timeout, sock) do
{:ok, tref} = :timer.apply_after(timeout, :gen_tcp, :shutdown,
[sock, :read_write])
{:timer, tref}
end
def cancel_handshake_timer(:infinity), do: :ok
def cancel_handshake_timer({:timer, tref}) do
{:ok, _} = :timer.cancel(tref)
:ok
end
defp do_handshake(s, %{ssl: true} = status), do: ssl(s, status)
defp do_handshake(s, %{ssl: false} = status), do: startup(s, status)
## ssl
defp ssl(s, status) do
case msg_send(s, msg_ssl_request(), "") do
:ok -> ssl_recv(s, status)
{:disconnect, _, _} = dis -> dis
end
end
defp ssl_recv(%{sock: {:gen_tcp, sock}} = s, status) do
case :gen_tcp.recv(sock, 1, :infinity) do
{:ok, <<?S>>} ->
ssl_connect(s, status)
{:ok, <<?N>>} ->
disconnect(s, %Postgrex.Error{message: "ssl not available"}, "")
{:error, reason} ->
disconnect(s, :tcp, "recv", reason)
end
end
defp ssl_connect(%{sock: {:gen_tcp, sock}, timeout: timeout} = s, status) do
case :ssl.connect(sock, status.opts[:ssl_opts] || [], timeout) do
{:ok, ssl_sock} ->
startup(%{s | sock: {:ssl, ssl_sock}}, status)
{:error, reason} ->
disconnect(s, :ssl, "connect", reason)
end
end
## startup
defp startup(s, %{opts: opts} = status) do
params = opts[:parameters] || []
user = Keyword.fetch!(opts, :username)
database = Keyword.fetch!(opts, :database)
msg = msg_startup(params: [user: user, database: database] ++ params)
case msg_send(s, msg, "") do
:ok -> auth_recv(s, status, <<>>)
{:disconnect, _, _} = dis -> dis
end
end
## auth
defp auth_recv(s, status, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_auth(type: :ok), buffer} ->
init_recv(s, status, buffer)
{:ok, msg_auth(type: :cleartext), buffer} ->
auth_cleartext(s, status, buffer)
{:ok, msg_auth(type: :md5, data: salt), buffer} ->
auth_md5(s, status, salt, buffer)
{:ok, msg_error(fields: fields), buffer} ->
disconnect(s, Postgrex.Error.exception(postgres: fields), buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp auth_cleartext(s, %{opts: opts} = status, buffer) do
pass = Keyword.fetch!(opts, :password)
auth_send(s, msg_password(pass: pass), status, buffer)
end
defp auth_md5(s, %{opts: opts} = status, salt, buffer) do
user = Keyword.fetch!(opts, :username)
pass = Keyword.fetch!(opts, :password)
digest = :crypto.hash(:md5, [pass, user])
|> Base.encode16(case: :lower)
digest = :crypto.hash(:md5, [digest, salt])
|> Base.encode16(case: :lower)
auth_send(s, msg_password(pass: ["md5", digest]), status, buffer)
end
defp auth_send(s, msg, status, buffer) do
case msg_send(s, msg, buffer) do
:ok -> auth_recv(s, status, buffer)
{:disconnect, _, _} = dis -> dis
end
end
## init
defp init_recv(s, status, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_backend_key(pid: pid, key: key), buffer} ->
init_recv(%{s | connection_id: pid, connection_key: key}, status, buffer)
{:ok, msg_ready(), buffer} ->
bootstrap(s, status, buffer)
{:ok, msg_error(fields: fields), buffer} ->
disconnect(s, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, buffer} ->
init_recv(handle_msg(s, status, msg), status, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
## bootstrap
defp bootstrap(s, %{types_key: nil}, buffer) do
activate(s, buffer)
end
defp bootstrap(s, %{types_key: types_key} = status, buffer) do
case Postgrex.TypeServer.fetch(types_key) do
{:lock, ref, table} ->
status = %{status | types_ref: ref}
oids = Postgrex.Types.oids(table)
bootstrap_send(%{s | types: table}, status, oids, buffer)
{:go, table} ->
reserve_send(%{s | types: table}, status, buffer)
end
end
defp bootstrap_send(%{parameters: parameters} = s, status, oids, buffer) do
version = parameters["server_version"] |> Postgrex.Utils.parse_version
statement = Types.bootstrap_query(version, oids)
msg = msg_query(statement: statement)
case msg_send(s, msg, buffer) do
:ok ->
build_types = if oids == [], do: :create, else: :update
bootstrap_recv(s, %{status | build_types: build_types}, buffer)
{:disconnect, err, s} ->
bootstrap_fail(s, err, status)
end
end
defp bootstrap_recv(s, status, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_row_desc(), buffer} ->
bootstrap_recv(s, status, [], buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
bootstrap_fail(s, err, status, buffer)
{:ok, msg, buffer} ->
bootstrap_recv(handle_msg(s, status, msg), status, buffer)
{:disconnect, err, s} ->
bootstrap_fail(s, err, status)
end
end
defp bootstrap_recv(s, status, rows, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_data_row(values: values), buffer} ->
bootstrap_recv(s, status, [row_decode(values) | rows], buffer)
{:ok, msg_command_complete(), buffer} ->
bootstrap_types(s, status, rows, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
bootstrap_fail(s, err, status, buffer)
{:ok, msg, buffer} ->
bootstrap_recv(handle_msg(s, status, msg), status, rows, buffer)
{:disconnect, err, s} ->
bootstrap_fail(s, err, status)
end
end
defp bootstrap_types(s, %{build_types: :update} = status, rows, buffer) do
%{types: table} = s
%{types_ref: ref} = status
types = Types.build_types(rows)
Types.associate_extensions_with_types(table, types)
Postgrex.TypeServer.unlock(ref)
bootstrap_sync_recv(s, status, buffer)
end
defp bootstrap_types(s, %{build_types: :create} = status, rows, buffer) do
%{types: table, parameters: parameters} = s
%{extensions: extensions, types_ref: ref} = status
extension_keys = Enum.map(extensions, &elem(&1, 0))
extension_opts = Types.prepare_extensions(extensions, parameters)
types = Types.build_types(rows)
Types.associate_extensions_with_types(table, extension_keys, extension_opts, types)
Postgrex.TypeServer.unlock(ref)
bootstrap_sync_recv(s, status, buffer)
end
defp bootstrap_sync_recv(s, status, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_ready(status: :idle), buffer} ->
reserve_send(s, status, buffer)
{:ok, msg_ready(status: postgres), buffer} ->
sync_error(s, postgres, buffer)
{:ok, msg, buffer} ->
bootstrap_sync_recv(handle_msg(s, status, msg), status, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp bootstrap_fail(s, err, %{types_ref: ref}) do
is_nil(ref) || Postgrex.TypeServer.fail(ref)
{:disconnect, err, s}
end
defp bootstrap_fail(s, err, status, buffer) do
bootstrap_fail(%{s | buffer: buffer}, err, status)
end
defp reserve_send(s, %{prepare: :unnamed}, buffer) do
activate(s, buffer)
end
defp reserve_send(s, %{prepare: :named} = status, buffer) do
case msg_send(s, reserve_msgs() ++ [msg_sync()], buffer) do
:ok ->
reserve_recv(s, status, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp reserve_msgs() do
for statement <- @reserved_queries do
name = @reserved_prefix <> statement
msg_parse(name: name, statement: statement, type_oids: [])
end
end
defp reserve_recv(s, status, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_parse_complete(), buffer} ->
reserve_recv(s, status, buffer)
{:ok, msg_ready(status: :idle), buffer} ->
activate(%{s | queries: queries_new()}, buffer)
{:ok, msg_ready(status: postgres), buffer} ->
sync_error(s, postgres, buffer)
{:ok, msg_error(fields: fields), buffer} ->
disconnect(s, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, buffer} ->
reserve_recv(handle_msg(s, status, msg), status, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
## simple
defp simple_send(s, status, statement, buffer) do
msg = msg_query(statement: statement)
case msg_send(s, msg, buffer) do
:ok -> simple_recv(s, status, buffer)
{:disconnect, _, _} = dis -> dis
end
end
defp simple_recv(%{timeout: timeout} = s, status, buffer) do
## simple queries here are only done by Postgrex.Notifications processes
case msg_recv(s, timeout, buffer) do
{:ok, msg_command_complete(tag: tag), buffer} ->
simple_complete(s, status, tag, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
simple_sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
simple_recv(handle_msg(s, status, msg), status, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp simple_complete(s, status, tag, buffer) do
%{connection_id: connection_id} = s
{command, nrows} = decode_tag(tag)
result = %Postgrex.Result{command: command, num_rows: nrows || 0,
rows: nil, columns: nil,
connection_id: connection_id}
simple_sync_recv(s, status, result, buffer)
end
defp simple_sync_recv(s, status, res, buffer) do
%{postgres: postgres, transactions: transactions, timeout: timeout} = s
case msg_recv(s, timeout, buffer) do
{:ok, msg_ready(status: :idle), buffer}
when postgres == :transaction and transactions == :strict ->
sync_error(s, :idle, buffer)
{:ok, msg_ready(status: :transaction), buffer}
when postgres == :idle and transactions == :strict ->
sync_error(s, :transaction, buffer)
{:ok, msg_ready(status: :failed), buffer}
when postgres == :idle and transactions == :strict ->
sync_error(s, :failed, buffer)
{:ok, msg_ready(status: postgres), buffer} ->
ok(s, res, postgres, buffer)
{:ok, msg, buffer} ->
simple_sync_recv(handle_msg(s, status, msg), status, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
## prepare
defp prepare(opts) do
# TODO: Use fetch!/2 once version ">= 0.12"
case Keyword.get(opts, :function, :prepare) do
:prepare -> {:sync, &sync_recv/4}
:prepare_execute -> {:flush, &execute_ready/4}
end
end
defp parse_describe(s, status, query, buffer, next) do
%Query{name: name, statement: statement} = query
msgs =
[msg_parse(name: name, statement: statement, type_oids: []),
msg_describe(type: :statement, name: name)]
describe_recv = &describe_recv(&1, &2, &3, &4, next)
recv = &parse_recv(&1, &2, &3, &4, describe_recv)
send_and_recv(s, status, query, buffer, msgs, recv)
end
defp close_parse_describe(s, status, query, buffer, next) do
%Query{name: name, statement: statement} = query
msgs =
[msg_close(type: :statement, name: name),
msg_parse(name: name, statement: statement, type_oids: []),
msg_describe(type: :statement, name: name)]
describe_recv = &describe_recv(&1, &2, &3, &4, next)
parse_recv = &parse_recv(&1, &2, &3, &4, describe_recv)
recv = &close_recv(&1, &2, &3, &4, parse_recv)
send_and_recv(s, status, query, buffer, msgs, recv)
end
defp parse_recv(s, status, query, buffer, recv) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_parse_complete(), buffer} ->
recv.(s, status, query, buffer)
{:ok, msg_error(fields: fields), buffer} ->
unnamed_query_delete(s, query)
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
parse_recv(handle_msg(s, status, msg), status, query, buffer, recv)
{:disconnect, _, _} = dis ->
dis
end
end
defp describe_recv(s, status, %Query{ref: nil} = query, buffer, next) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_no_data(), buffer} ->
query = %Query{query | ref: make_ref(), types: s.types, null: s.null}
query_put(s, query)
next.(s, status, query, buffer)
{:ok, msg_parameter_desc(type_oids: param_oids), buffer} ->
describe_params(s, status, query, param_oids, buffer, next)
{:ok, msg_row_desc(fields: fields), buffer} ->
describe_result(s, status, query, fields, buffer, next)
{:ok, msg_too_many_parameters(len: len, max_len: max), buffer} ->
msg = "postgresql protocol can not handle #{len} parameters, " <>
"the maximum is #{max}"
err = RuntimeError.exception(message: msg)
{:disconnect, err, %{s | buffer: buffer}}
{:ok, msg_error(fields: fields), buffer} ->
sync_recv(s, status, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, buffer} ->
describe_recv(handle_msg(s, status, msg), status, query, buffer, next)
{:disconnect, _, _} = dis ->
dis
end
end
defp describe_recv(s, status, query, buffer, next) do
%Query{param_info: param_info, result_info: result_info} = query
case msg_recv(s, :infinity, buffer) do
{:ok, msg_no_data(), buffer} when is_nil(result_info) ->
query_put(s, query)
next.(s, status, query, buffer)
{:ok, msg_no_data(), buffer} when is_list(result_info) ->
describe_error(s, status, query, buffer)
{:ok, msg_parameter_desc(type_oids: param_oids), buffer} ->
case (for {oid, _, _} <- param_info, do: oid) do
^param_oids ->
describe_recv(s, status, query, buffer, next)
_ ->
describe_error(s, status, query, buffer)
end
{:ok, msg_row_desc(fields: fields), buffer} ->
result_oids = column_oids(fields)
case (for {oid, _, _} <- result_info, do: oid) do
^result_oids ->
query_put(s, query)
next.(s, status, query, buffer)
_ ->
describe_error(s, status, query, buffer)
end
{:ok, msg_too_many_parameters(len: len, max_len: max), buffer} ->
msg = "postgresql protocol can not handle #{len} parameters, " <>
"the maximum is #{max}"
err = RuntimeError.exception(message: msg)
{:disconnect, err, %{s | buffer: buffer}}
{:ok, msg_error(fields: fields), buffer} ->
sync_recv(s, status, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, buffer} ->
describe_recv(handle_msg(s, status, msg), status, query, buffer, next)
{:disconnect, _, _} = dis ->
dis
end
end
defp describe_params(s, status, query, param_oids, buffer, next) do
%{types: types} = s
case fetch_type_info(param_oids, types) do
{:ok, param_info} ->
query = %Query{query | param_info: param_info}
describe_recv(s, status, query, buffer, next)
{:error, err} ->
{:disconnect, err, %{s | buffer: buffer}}
end
end
defp describe_result(s, status, query, fields, buffer, next) do
%{types: types, null: null} = s
{result_oids, col_names} = columns(fields)
case fetch_type_info(result_oids, types) do
{:ok, result_info} ->
query = %Query{query | ref: make_ref(), types: types, null: null,
columns: col_names, result_info: result_info}
query_put(s, query)
next.(s, status, query, buffer)
{:error, err} ->
{:disconnect, err, %{s | buffer: buffer}}
end
end
defp fetch_type_info(oids, types, infos \\ [])
defp fetch_type_info([], _, infos) do
{:ok, Enum.reverse(infos)}
end
defp fetch_type_info([oid | oids], types, infos) do
case Postgrex.Types.fetch(types, oid) do
{:ok, info} ->
fetch_type_info(oids, types, [info | infos])
:error ->
msg = "oid `#{oid}` was not bootstrapped and lacks type information"
{:error, RuntimeError.exception(message: msg)}
end
end
defp describe_error(s, %{sync: :flush} = status, query, buffer) do
msg = "query #{inspect query} has stale type information"
err = ArgumentError.exception(message: msg)
%Query{name: name} = query
msgs = [msg_close(type: :statement, name: name)]
recv = &describe_error_recv/4
send_and_recv(s, %{status | sync: :flushed_sync}, err, buffer, msgs, recv)
end
defp describe_error_recv(s, status, err, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_close_complete(), buffer} ->
sync_recv(s, status, err, buffer)
{:ok, msg_no_data(), buffer} ->
describe_error_recv(s, status, err, buffer)
{:ok, msg_parameter_desc(), buffer} ->
describe_error_recv(s, status, err, buffer)
{:ok, msg_row_desc(), buffer} ->
describe_error_recv(s, status, err, buffer)
{:ok, msg_too_many_parameters(len: len, max_len: max), buffer} ->
msg = "postgresql protocol can not handle #{len} parameters, " <>
"the maximum is #{max}"
err = ArgumentError.exception(message: msg)
{:disconnect, err, %{s | buffer: buffer}}
{:ok, msg_error(fields: fields), buffer} ->
sync_recv(s, status, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, buffer} ->
describe_error_recv(handle_msg(s, status, msg), status, err, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp execute_ready(%{postgres: postgres} = s, _, query, buffer) do
%Query{ref: ref} = query
ok(s, query, {postgres, ref}, buffer)
end
## execute
defp query_error(s, msg) do
{:error, ArgumentError.exception(msg), s}
end
defp lock_error(s, fun) do
msg = "connection is locked copying to or from the database and " <>
"can not #{fun} transaction"
{:disconnect, RuntimeError.exception(msg), s}
end
defp lock_error(s, fun, query) do
msg = "connection is locked copying to or from the database and " <>
"can not #{fun} #{inspect query}"
{:error, RuntimeError.exception(msg), s}
end
defp execute(%{postgres: {postgres, ref}}, %Query{ref: ref} = query) do
fn(s, status, params, buffer) ->
s = %{s | postgres: postgres}
status = %{status | sync: :flushed_sync}
case query do
%Query{copy_data: true} ->
bind_copy_in(s, status, query, params, buffer)
_ ->
bind_execute(s, status, query, params, buffer)
end
end
end
defp execute(%{postgres: {_, _ref}} = s, %Query{} = query) do
lock_error(s, :execute, query)
end
defp execute(s, %Query{name: @reserved_prefix <> _} = query) do
reserved_error(query, s)
end
defp execute(s, %Query{types: nil} = query) do
query_error(s, "query #{inspect query} has not been prepared")
end
defp execute(%{types: types} = s, %Query{types: types} = query) do
case query_prepare(s, query) do
{:ready, %Query{copy_data: true} = query} ->
&bind_copy_in(&1, &2, query, &3, &4)
{:ready, query} ->
&bind_execute(&1, &2, query, &3, &4)
{:parse_describe, %Query{copy_data: true} = query} ->
fn(s, status, params, buffer) ->
next = &bind_copy_in(&1, %{&2 | sync: :flushed_sync}, &3, params, &4)
parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
{:parse_describe, query} ->
fn(s, status, params, buffer) ->
next = &bind_execute(&1, %{&2 | sync: :flushed_sync}, &3, params, &4)
parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
{:close_parse_describe, %Query{copy_data: true} = query} ->
fn(s, status, params, buffer) ->
next = &bind_copy_in(&1, %{&2 | sync: :flushed_sync}, &3, params, &4)
close_parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
{:close_parse_describe, query} ->
fn(s, status, params, buffer) ->
next = &bind_execute(&1, %{&2 | sync: :flushed_sync}, &3, params, &4)
close_parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
end
end
defp execute(s, %Query{} = query) do
query_error(s, "query #{inspect query} has invalid types for the connection")
end
defp execute(%{postgres: {_, ref}}, %Stream{ref: ref, state: state} = stream) do
case state do
:copy_out ->
fn(s, status, _params, buffer) ->
copy_out(s, status, stream, buffer)
end
:copy_done ->
fn(s, status, _params, buffer) ->
msg = msg_copy_done()
copy_in_stop(s, status, stream, msg, buffer)
end
:copy_fail ->
fn(s, status, _params, buffer) ->
msg = msg_copy_fail(message: "copying to database halted")
copy_in_stop(s, status, stream, msg, buffer)
end
end
end
defp execute(s, %Stream{state: state} = stream)
when state in [:copy_out, :copy_done, :copy_fail] do
msg = "connection lost lock for copying to or from the database and " <>
"can not execute #{inspect stream}"
{:disconnect, RuntimeError.exception(msg), s}
end
defp execute(%{postgres: {_, _ref}} = s, %Stream{} = stream) do
lock_error(s, :execute, stream)
end
defp execute(s, %Stream{query: %Query{name: @reserved_prefix <> _} = query}) do
reserved_error(query, s)
end
defp execute(s, %Stream{query: %Query{types: nil} = query}) do
query_error(s, "query #{inspect query} has not been prepared")
end
defp execute(%{types: types} = s, %Stream{query: %Query{types: types2} = query})
when types != types2 do
query_error(s, "query #{inspect query} has invalid types for the connection")
end
defp execute(_, %Stream{state: :out, query: %Query{copy_data: true}} = stream) do
fn(s, status, copy_data, buffer) ->
copy_in_portal(s, status, stream, copy_data, buffer)
end
end
defp execute(_, %Stream{state: state} = stream)
when state in [:out, :suspended] do
fn(s, status, _params, buffer) ->
execute_portal(s, status, stream, buffer)
end
end
defp execute(s, %Stream{query: query, state: :bind} = stream) do
case query_prepare(s, query) do
{:ready, query} ->
&bind(&1, &2, stream, query, &3, &4)
{:parse_describe, query} ->
fn(s, status, params, buffer) ->
next = &bind(&1, %{&2 | sync: :flushed_sync}, stream, &3, params, &4)
parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
{:close_parse_describe, query} ->
fn(s, status, params, buffer) ->
next = &bind(&1, %{&2 | sync: :flushed_sync}, stream, &3, params, &4)
close_parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
end
end
defp execute(s, %Stream{query: query, state: :copy_in} = stream) do
case query_prepare(s, query) do
{:ready, query} ->
&bind_copy_in(&1, &2, stream, query, &3, &4)
{:parse_describe, query} ->
fn(s, status, params, buffer) ->
next = &bind_copy_in(&1, %{&2 | sync: :flushed_sync}, stream, &3, params, &4)
parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
{:close_parse_describe, query} ->
fn(s, status, params, buffer) ->
next = &bind_copy_in(&1, %{&2 | sync: :flushed_sync}, stream, &3, params, &4)
close_parse_describe(s, %{status | sync: :flush}, query, buffer, next)
end
end
end
defp execute(%{postgres: {_, ref}}, %Postgrex.CopyData{ref: ref}) do
©_data/4
end
defp execute(%{postgres: {_, _ref}} = s, %Postgrex.CopyData{} = copy_data) do
lock_error(s, :execute, copy_data)
end
defp copy_in_portal(s, status, stream, copy_data_msg, buffer) do
%Stream{portal: portal, max_rows: max_rows} = stream
messages = [msg_execute(name_port: portal, max_rows: max_rows),
copy_data_msg,
msg_copy_done()]
send_and_recv(s, status, stream, buffer, messages, ©_in_recv/4)
end
defp execute_portal(s, status, stream, buffer) do
%Stream{portal: portal, max_rows: max_rows} = stream
messages = [msg_execute(name_port: portal, max_rows: max_rows)]
send_and_recv(s, status, stream, buffer, messages, &execute_recv/4)
end
defp bind(s, status, stream, query, params, buffer) do
%{connection_id: connection_id} = s
res = %Postgrex.Result{command: :bind, connection_id: connection_id}
%Stream{portal: portal} = stream
%Query{param_formats: pfs, result_formats: rfs, name: name} = query
messages = [
msg_bind(name_port: portal, name_stat: name, param_formats: pfs, params: params, result_formats: rfs)]
sync_recv = &sync_recv/4
recv = &bind_recv(&1, &2, &3, &4, sync_recv)
send_and_recv(s, status, res, buffer, messages, recv)
end
defp bind_execute(s, status, query, params, buffer) do
%Query{param_formats: pfs, result_formats: rfs, name: name} = query
msgs = [
msg_bind(name_port: "", name_stat: name, param_formats: pfs, params: params, result_formats: rfs),
msg_execute(name_port: "", max_rows: 0)]
send_and_recv(s, status, query, buffer, msgs, &bind_recv/4)
end
defp bind_copy_in(s, status, query, params, buffer) do
{params, [copy_data_msg]} = Enum.split(params, -1)
%Query{param_formats: pfs, result_formats: rfs, name: name} = query
msgs = [
msg_bind(name_port: "", name_stat: name, param_formats: pfs, params: params, result_formats: rfs),
msg_execute(name_port: "", max_rows: 0),
copy_data_msg,
msg_copy_done()]
copy_in_recv = ©_in_recv/4
bind_recv = &bind_recv(&1, &2, &3, &4, copy_in_recv)
send_and_recv(s, status, query, buffer, msgs, bind_recv)
end
defp bind_copy_in(s, status, stream, query, params, buffer) do
%Query{param_formats: pfs, result_formats: rfs, name: name} = query
msgs = [
msg_bind(name_port: "", name_stat: name, param_formats: pfs, params: params, result_formats: rfs),
msg_flush(),
msg_execute(name_port: "", max_rows: 0)]
copy_in_ready = ©_in_ready/4
bind_recv = &bind_recv(&1, &2, &3, &4, copy_in_ready)
copy_in_send(s, status, %Stream{stream | query: query}, buffer, msgs, bind_recv)
end
defp copy_in_send(s, %{mode: :savepoint, sync: :sync} = status, stream, buffer, msgs, recv) do
msgs = transaction_msgs(s, ["SAVEPOINT postgrex_query"]) ++ msgs
case msg_send(s, msgs, buffer) do
:ok ->
savepoint_recv(s, %{status | sync: :flush}, stream, buffer, recv)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_in_send(s, status, stream, buffer, msgs, recv) do
case msg_send(s, msgs, buffer) do
:ok ->
recv.(s, %{status | sync: :flush}, stream, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp send_and_recv(s, %{mode: :savepoint, sync: sync} = status, query, buffer, msgs, recv) do
case msg_send(s, savepoint_msgs(s, sync, msgs), buffer) do
:ok when sync == :flushed_sync ->
recv.(s, status, query, buffer)
:ok ->
savepoint_recv(s, status, query, buffer, recv)
{:disconnect, _, _} = dis ->
dis
end
end
defp send_and_recv(s, %{mode: :transaction, sync: sync} = status, query, buffer, msgs, recv) do
msgs = case sync do
:sync -> msgs ++ [msg_sync()]
:flush -> msgs ++ [msg_flush()]
:flushed_sync -> msgs ++ [msg_sync()]
end
case msg_send(s, msgs, buffer) do
:ok ->
recv.(s, status, query, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp savepoint_msgs(s, :sync, msgs) do
savepoint = transaction_msgs(s, ["SAVEPOINT postgrex_query"])
release = transaction_msgs(s, ["RELEASE SAVEPOINT postgrex_query", :sync])
savepoint ++ msgs ++ release
end
defp savepoint_msgs(s, :flush, msgs) do
transaction_msgs(s, ["SAVEPOINT postgrex_query"]) ++ msgs ++ [msg_flush()]
end
defp savepoint_msgs(s, :flushed_sync, msgs) do
msgs ++ transaction_msgs(s, ["RELEASE SAVEPOINT postgrex_query", :sync])
end
defp savepoint_recv(s, status, query, buffer, recv) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_parse_complete(), buffer} ->
savepoint_recv(s, status, query, buffer, recv)
{:ok, msg_bind_complete(), buffer} ->
savepoint_recv(s, status, query, buffer, recv)
{:ok, msg_command_complete(), buffer} ->
recv.(s, status, query, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
# Failed with savepoints can only await ready message and return error
sync_recv(s, %{status | mode: :transaction}, err, buffer)
{:ok, msg, buffer} ->
s = handle_msg(s, status, msg)
savepoint_recv(s, status, query, buffer, recv)
{:disconnect, _, _} = dis ->
dis
end
end
defp savepoint_rollback(s, %{sync: :flush} = status, err, buffer) do
savepoint_rollback(s, status, err, [msg_sync()], buffer)
end
defp savepoint_rollback(s, status, err, buffer) do
savepoint_rollback(s, status, err, [], buffer)
end
defp savepoint_rollback(s, status, err, msgs, buffer) do
statements = ["ROLLBACK TO SAVEPOINT postgrex_query",
"RELEASE SAVEPOINT postgrex_query",
:sync]
msgs = msgs ++ transaction_msgs(s, statements)
case msg_send(s, msgs, buffer) do
:ok ->
savepoint_rollback_recv(s, %{status | sync: :sync}, err, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp savepoint_rollback_recv(s, status, err, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_ready(status: :failed), buffer} ->
sync_recv = &do_sync_recv/4
recv = &savepoint_recv(&1, &2, &3, &4, sync_recv)
savepoint_recv(s, status, err, buffer, recv)
{:ok, msg_ready(status: postgres), buffer} ->
sync_error(s, postgres, buffer)
{:ok, msg, buffer} ->
savepoint_rollback_recv(handle_msg(s, status, msg), status, err, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp bind_recv(s, status, query, buffer, recv \\ &execute_recv/4) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_bind_complete(), buffer} ->
recv.(s, status, query, buffer)
{:ok, msg_error(fields: fields), buffer} ->
bind_error(s, status, query, fields, buffer)
{:ok, msg, buffer} ->
bind_recv(handle_msg(s, status, msg), status, query, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp bind_error(s, status, query, fields, buffer) do
err = Postgrex.Error.exception(postgres: fields)
_ = if err.postgres.code == :invalid_sql_statement_name do
Logger.error fn() ->
[inspect(query) | " is not prepared on backend"]
end
query_delete(s, query)
end
sync_recv(s, status, err, buffer)
end
defp execute_recv(s, status, query, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_data_row(values: values), buffer} ->
execute_recv(s, status, query, [values], buffer)
{:ok, msg_command_complete(tag: tag), buffer} ->
complete(s, status, query, [], tag, buffer)
{:ok, msg_empty_query(), buffer} ->
sync_recv(s, status, %Postgrex.Result{num_rows: 0}, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg_copy_in_response(), buffer} ->
msg = "query #{inspect query} is trying to copying but no copy data to send"
err = ArgumentError.exception(msg)
copy_fail(s, status, err, buffer)
{:ok, msg_copy_out_response(), buffer} ->
copy_out(s, status, query, buffer)
{:ok, msg_copy_both_response(), buffer} ->
copy_both_disconnect(s, query, buffer)
{:ok, msg, buffer} ->
execute_recv(handle_msg(s, status, msg), status, query, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp execute_recv(s, status, query, rows, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_data_row(values: values), buffer} ->
execute_recv(s, status, query, [values | rows], buffer)
{:ok, msg_command_complete(tag: tag), buffer} ->
complete(s, status, query, rows, tag, buffer)
{:ok, msg_portal_suspend(), buffer} ->
suspend(s, status, query, rows, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
execute_recv(handle_msg(s, status, msg), status, query, rows, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp complete(s, status, %Query{} = query, rows, tag, buffer) do
%{connection_id: connection_id} = s
{command, nrows} = decode_tag(tag)
%Query{columns: cols} = query
# Fix for PostgreSQL 8.4 (doesn't include number of selected rows in tag)
nrows =
if is_nil(nrows) and command == :select, do: length(rows), else: nrows
rows =
if is_nil(cols) and rows == [] and command != :copy, do: nil, else: rows
result = %Postgrex.Result{command: command, num_rows: nrows || 0,
rows: rows, columns: cols, connection_id: connection_id}
sync_recv(s, status, result, buffer)
end
defp complete(s, status, stream, rows, tag, buffer) do
%Postgrex.Stream{query: query, num_rows: previous_nrows} = stream
%{connection_id: connection_id} = s
{command, nrows} = decode_tag(tag)
%Query{columns: cols} = query
# Fix for PostgreSQL 8.4 (doesn't include number of selected rows in tag)
nrows =
if is_nil(nrows) and command == :select, do: length(rows), else: nrows
nrows =
if command == :select, do: nrows + previous_nrows, else: nrows
rows =
if is_nil(cols) and rows == [] and command != :copy, do: nil, else: rows
result = %Postgrex.Result{command: command, num_rows: nrows || 0,
rows: rows, columns: cols, connection_id: connection_id}
sync_recv(s, status, result, buffer)
end
defp suspend(s, status, stream, rows, buffer) do
%{connection_id: connection_id} = s
%Postgrex.Stream{query: %Query{columns: cols}} = stream
result = %Postgrex.Result{command: :stream, num_rows: :stream,
rows: rows, columns: cols,
connection_id: connection_id}
sync_recv(s, status, result, buffer)
end
defp copy_fail(s, %{mode: :transaction} = status, err, buffer) do
msg = Exception.message(err)
messages = [msg_copy_fail(message: msg), msg_sync()]
case msg_send(s, messages, buffer) do
:ok ->
copy_fail_recv(s, status, err, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_fail(s, %{mode: :savepoint} = status, err, buffer) do
# Releasing savepoint will cause an error so receive that
copy_fail_recv(s, status, err, buffer)
end
defp copy_fail_recv(s, status, err, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
copy_fail_recv(handle_msg(s, status, msg), status, err, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_out(s, status, %Query{} = query, buffer) do
copy_out_recv(s, status, query, :infinity, [], 0, buffer)
end
defp copy_out(s, status, stream, buffer) do
%Stream{max_rows: max_rows} = stream
max_rows = if max_rows == 0, do: :infinity, else: max_rows
copy_out_recv(s, status, stream, max_rows, [], 0, buffer)
end
defp copy_out_recv(s, _, stream, max_rows, acc, max_rows, buffer) do
%Stream{ref: ref} = stream
%{postgres: postgres, connection_id: connection_id} = s
result = %Postgrex.Result{command: :copy_stream, num_rows: :copy_stream,
rows: acc, columns: nil, connection_id: connection_id}
ok(s, result, {postgres, ref}, buffer)
end
defp copy_out_recv(s, status, query, max_rows, acc, nrows, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_copy_data(data: data), buffer} ->
copy_out_recv(s, status, query, max_rows, [data | acc], nrows+1, buffer)
{:ok, msg_copy_done(), buffer} ->
copy_out_done(s, status, query, acc, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
s = handle_msg(s, status, msg)
copy_out_recv(s, status, query, max_rows, acc, nrows, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_out_done(s, status, query, acc, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_command_complete(tag: tag), buffer} ->
complete(s, status, query, acc, tag, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
s = handle_msg(s, status, msg)
copy_out_done(s, status, query, acc, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_in_recv(s, status, query, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_copy_in_response(), buffer} ->
copy_in_done(s, status, query, buffer)
{:ok, msg_command_complete(tag: tag), buffer} ->
complete(s, status, query, [], tag, buffer)
{:ok, msg_data_row(values: values), buffer} ->
execute_recv(s, status, query, [values], buffer)
{:ok, msg_empty_query(), buffer} ->
sync_recv(s, status, %Postgrex.Result{num_rows: 0}, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg_copy_out_response(), buffer} ->
copy_out(s, status, query, buffer)
{:ok, msg_copy_both_response(), buffer} ->
copy_both_disconnect(s, query, buffer)
{:ok, msg, buffer} ->
copy_in_recv(handle_msg(s, status, msg), status, query, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_in_done(s, status, query, buffer) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_command_complete(tag: tag), buffer} ->
complete(s, status, query, nil, tag, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
copy_in_done(handle_msg(s, status, msg), status, query, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_in_ready(s, _status, stream, buffer) do
%{connection_id: connection_id, postgres: postgres} = s
result = %Postgrex.Result{connection_id: connection_id, command: :copy_stream,
rows: nil, num_rows: :copy_stream}
%Stream{ref: ref} = stream
ok(s, result, {postgres, ref}, buffer)
end
defp copy_data(s, _status, data, buffer) do
case do_send(s, data, buffer) do
:ok ->
%{connection_id: connection_id, postgres: postgres} = s
result = %Postgrex.Result{connection_id: connection_id,
command: :copy_stream, rows: nil,
num_rows: :copy_stream}
ok(s, result, postgres, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp copy_in_stop(s, status, stream, msg, buffer) do
status = %{status | sync: :flushed_sync}
send_and_recv(s, status, stream, buffer, [msg], ©_in_recv/4)
end
defp copy_both_disconnect(s, query, buffer) do
msg = "query #{inspect query} is trying to copy both ways but it is not supported"
err = ArgumentError.exception(msg)
{:disconnect, err, %{s | buffer: buffer}}
end
## close
defp close(s, status, %Query{name: name} = query, result, buffer) do
messages = [msg_close(type: :statement, name: name)]
close(s, status, query, buffer, result, messages)
end
defp close(s, status, %Stream{portal: portal} = stream, result, buffer) do
messages = [msg_close(type: :portal, name: portal)]
close(s, status, stream, buffer, result, messages)
end
defp close(s, status, query, buffer, result, messages) do
sync_recv = fn(s, status, _query, buffer) ->
sync_recv(s, status, result, buffer)
end
recv = &close_recv(&1, &2, &3, &4, sync_recv)
send_and_recv(s, status, query, buffer, messages, recv)
end
defp close_recv(s, status, query, buffer, recv) do
case msg_recv(s, :infinity, buffer) do
{:ok, msg_close_complete(), buffer} ->
statement_query_delete(s, query)
recv.(s, status, query, buffer)
{:ok, msg_error(fields: fields), buffer} ->
sync_recv(s, status, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, buffer} ->
close_recv(handle_msg(s, status, msg), status, query, buffer, recv)
{:disconnect, _, _} = dis ->
dis
end
end
## sync
defp sync(s, status, result, buffer) do
case msg_send(s, msg_sync(), buffer) do
:ok -> sync_recv(s, status, result, buffer)
{:disconnect, _, _} = dis -> dis
end
end
## transaction
defp handle_transaction(name, next_postgres, cmd, opts, s) do
%{connection_id: connection_id, buffer: buffer} = s
status = %{notify: notify(opts), mode: :transaction, sync: :sync}
res = %Postgrex.Result{command: cmd, connection_id: connection_id}
transaction_send(%{s | buffer: nil}, status, name, next_postgres, res, buffer)
end
defp transaction_send(s, status, statement, next_postgres, res, buffer) do
msgs = transaction_msgs(s, [statement, :sync])
case msg_send(s, msgs, buffer) do
:ok ->
transaction_recv(s, status, next_postgres, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp transaction_msgs(_, []) do
[]
end
defp transaction_msgs(_, [:sync]) do
[msg_sync()]
end
defp transaction_msgs(%{queries: nil} = s, [statement | statements]) do
[msg_parse(name: "", statement: statement, type_oids: []),
msg_bind(name_port: "", name_stat: "", param_formats: [], params: [], result_formats: []),
msg_execute(name_port: "" , max_rows: 0) |
transaction_msgs(s, statements)]
end
defp transaction_msgs(s, [name | names]) do
name = [@reserved_prefix | name]
[msg_bind(name_port: "", name_stat: name, param_formats: [], params: [], result_formats: []),
msg_execute(name_port: "" , max_rows: 0) |
transaction_msgs(s, names)]
end
defp transaction_recv(s, status, next_postgres, res, buffer) do
%{transactions: transactions} = s
case msg_recv(s, :infinity, buffer) do
{:ok, msg_ready(status: postgres), buffer} when transactions == :naive ->
ok(s, res, postgres, buffer)
{:ok, msg_ready(status: ^next_postgres), buffer} ->
ok(s, res, next_postgres, buffer)
{:ok, msg_ready(status: postgres), buffer} ->
sync_error(s, postgres, buffer)
{:ok, msg_parse_complete(), buffer} ->
transaction_recv(s, status, next_postgres, res, buffer)
{:ok, msg_bind_complete(), buffer} ->
transaction_recv(s, status, next_postgres, res, buffer)
{:ok, msg_command_complete(), buffer} ->
transaction_recv(s, status, next_postgres, res, buffer)
{:ok, msg_error(fields: fields), buffer} when transactions == :naive ->
err = Postgrex.Error.exception(postgres: fields)
sync_recv(s, status, err, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
disconnect(s, err, buffer)
{:ok, msg, buffer} ->
s = handle_msg(s, status, msg)
transaction_recv(s, status, next_postgres, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp handle_savepoint(names, cmd, opts, s) do
%{connection_id: connection_id, buffer: buffer} = s
status = %{notify: notify(opts), mode: :transaction, sync: :sync}
res = %Postgrex.Result{command: cmd, connection_id: connection_id}
savepoint_send(%{s | buffer: nil}, status, names, res, buffer)
end
defp savepoint_send(s, status, statements, res, buffer) do
msgs = transaction_msgs(s, statements)
case msg_send(s, msgs, buffer) do
:ok ->
savepoint_recv(s, status, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp savepoint_recv(s, status, res, buffer) do
%{postgres: postgres, transactions: transactions} = s
case msg_recv(s, :infinity, buffer) do
{:ok, msg_parse_complete(), buffer} ->
savepoint_recv(s, status, res, buffer)
{:ok, msg_bind_complete(), buffer} ->
savepoint_recv(s, status, res, buffer)
{:ok, msg_command_complete(), buffer} ->
savepoint_recv(s, status, res, buffer)
{:ok, msg_ready(status: :idle), buffer}
when postgres == :transaction and transactions == :strict ->
sync_error(s, :idle, buffer)
{:ok, msg_ready(status: :transaction), buffer}
when postgres == :idle and transactions == :strict ->
sync_error(s, :transaction, buffer)
{:ok, msg_ready(status: :failed), buffer}
when postgres == :idle and transactions == :strict ->
sync_error(s, :failed, buffer)
{:ok, msg_ready(status: postgres), buffer} ->
ok(s, res, postgres, buffer)
{:ok, msg_error(fields: fields), buffer} ->
err = Postgrex.Error.exception(postgres: fields)
do_sync_recv(s, status, err, buffer)
{:ok, msg, buffer} ->
s = handle_msg(s, status, msg)
savepoint_recv(s, status, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
## data
defp handle_data(s, opts, buffer) do
data(s, %{notify: notify(opts)}, buffer)
end
defp data(%{timeout: timeout} = s, status, buffer) do
case msg_recv(s, timeout, buffer) do
{:ok, msg_error(fields: fields), buffer} ->
disconnect(s, Postgrex.Error.exception(postgres: fields), buffer)
{:ok, msg, <<>>} ->
activate(handle_msg(s, status, msg), <<>>)
{:ok, msg, buffer} ->
data(handle_msg(s, status, msg), status, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
## helpers
defp notify(opts) do
opts[:notify] || fn(_, _) -> :ok end
end
defp mode(opts) do
case opts[:mode] || :transaction do
:transaction -> :transaction
:savepoint -> :savepoint
end
end
defp columns(fields) do
Enum.map(fields, fn row_field(type_oid: oid, name: name) ->
{oid, name}
end) |> :lists.unzip
end
defp column_oids(fields) do
for row_field(type_oid: oid) <- fields, do: oid
end
defp tag(:gen_tcp), do: :tcp
defp tag(:ssl), do: :ssl
defp decode_tag("INSERT " <> rest) do
[_oid, nrows] = :binary.split(rest, " ")
{:insert, String.to_integer(nrows)}
end
defp decode_tag("SELECT " <> int),
do: {:select, String.to_integer(int)}
defp decode_tag("UPDATE " <> int),
do: {:update, String.to_integer(int)}
defp decode_tag("DELETE " <> int),
do: {:delete, String.to_integer(int)}
defp decode_tag("FETCH " <> int),
do: {:fetch, String.to_integer(int)}
defp decode_tag("MOVE " <> int),
do: {:move, String.to_integer(int)}
defp decode_tag("COPY " <> int),
do: {:copy, String.to_integer(int)}
defp decode_tag("BEGIN"),
do: {:commit, nil}
defp decode_tag("COMMIT"),
do: {:commit, nil}
defp decode_tag("ROLLBACK"),
do: {:rollback, nil}
defp decode_tag(tag),
do: decode_tag(tag, "")
defp decode_tag(<<>>, acc),
do: {String.to_atom(acc), nil}
defp decode_tag(<<?\s, t::binary>>, acc),
do: decode_tag(t, <<acc::binary, ?_>>)
defp decode_tag(<<h, t::binary>>, acc) when h in ?A..?Z,
do: decode_tag(t, <<acc::binary, h+32>>)
defp decode_tag(<<h, t::binary>>, acc),
do: decode_tag(t, <<acc::binary, h>>)
# It is ok to use infinity timeout here if in client process as timer is
# running.
defp msg_recv(%{sock: {:gen_tcp, sock}} = s, timeout, :active_once) do
receive do
{:tcp, ^sock, buffer} ->
msg_recv(s, timeout, buffer)
{:tcp_closed, ^sock} ->
disconnect(s, :tcp, "async_recv", :closed, :active_once)
{:tcp_error, ^sock, reason} ->
disconnect(s, :tcp, "async_recv", reason, :active_once)
after
timeout ->
disconnect(s, :tcp, "async_recv", :timeout, :active_one)
end
end
defp msg_recv(%{sock: {:ssl, sock}} = s, timeout, :active_once) do
receive do
{:ssl, ^sock, buffer} ->
msg_recv(s, timeout, buffer)
{:ssl_closed, ^sock} ->
disconnect(s, :ssl, "async_recv", :closed, :active_once)
{:ssl_error, ^sock, reason} ->
disconnect(s, :ssl, "async_recv", reason, :active_once)
after
timeout ->
disconnect(s, :ssl, "async_recv", :timeout, :active_once)
end
end
defp msg_recv(s, timeout, buffer) do
case msg_decode(buffer) do
{:ok, _, _} = ok -> ok
{:more, more} -> msg_recv(s, timeout, buffer, more)
end
end
defp msg_recv(%{sock: {mod, sock}} = s, timeout, buffer, more) do
case mod.recv(sock, min(more, @max_packet), timeout) do
{:ok, data} when byte_size(data) < more ->
msg_recv(s, timeout, [buffer | data], more - byte_size(data))
{:ok, data} when is_binary(buffer) ->
msg_recv(s, timeout, buffer <> data)
{:ok, data} when is_list(buffer) ->
msg_recv(s, timeout, IO.iodata_to_binary([buffer | data]))
{:error, reason} ->
disconnect(s, tag(mod), "recv", reason, IO.iodata_to_binary(buffer))
end
end
defp msg_decode(bin) when byte_size(bin) < 5 do
{:more, 0}
end
defp msg_decode(<<type :: int8, size :: int32, rest :: binary>>) do
size = size - 4
case rest do
<<body :: binary(size), rest :: binary>> ->
{:ok, parse(body, type, size), rest}
_ ->
{:more, size - byte_size(rest)}
end
end
defp row_decode(<<>>), do: []
defp row_decode(<<-1::int32, rest::binary>>) do
[nil | row_decode(rest)]
end
defp row_decode(<<len::uint32, value::binary(len), rest::binary>>) do
[value | row_decode(rest)]
end
defp msg_send(s, msgs, buffer) when is_list(msgs) do
binaries = Enum.reduce(msgs, [], &[&2 | maybe_encode_msg(&1)])
do_send(s, binaries, buffer)
end
defp msg_send(s, msg, buffer) do
do_send(s, encode_msg(msg), buffer)
end
defp maybe_encode_msg(msg) when is_tuple(msg), do: encode_msg(msg)
defp maybe_encode_msg(msg) when is_binary(msg) or is_list(msg), do: msg
defp do_send(%{sock: {mod, sock}} = s, data, buffer) do
case mod.send(sock, data) do
:ok ->
:ok
{:error, reason} ->
disconnect(s, tag(mod), "send", reason, buffer)
end
end
defp handle_msg(s, _, msg_parameter(name: name, value: value)) do
%{parameters: parameters} = s
# Binaries likely part of much larger binary and only keeping name/value
# over long term
name = :binary.copy(name)
value = :binary.copy(value)
cond do
is_reference(parameters) ->
_ = Postgrex.Parameters.put(parameters, name, value)
s
is_map(parameters) ->
%{s | parameters: Map.put(parameters, name, value)}
end
end
defp handle_msg(s, status, msg_notify(channel: channel, payload: payload)) do
%{notify: notify} = status
notify.(channel, payload)
s
end
defp handle_msg(s, _, msg_notice()) do
# TODO: subscribers
s
end
defp ok(s, %Postgrex.Result{} = res, postgres, buffer) do
{:ok, res, %{s | postgres: postgres, buffer: buffer}}
end
defp ok(s, %Postgrex.Query{} = query, postgres, buffer) do
{:ok, query, %{s | postgres: postgres, buffer: buffer}}
end
defp ok(s, %Postgrex.Error{} = err, postgres, buffer) do
%{connection_id: connection_id} = s
err = %{err | connection_id: connection_id}
{:error, err, %{s | postgres: postgres, buffer: buffer}}
end
defp ok(s, %ArgumentError{} = err, postgres, buffer) do
{:error, err, %{s | postgres: postgres, buffer: buffer}}
end
defp ok(s, :active_once, postgres, buffer) do
activate(%{s | postgres: postgres}, buffer)
end
defp ok(s, nil, postgres, buffer) do
{:ok, %{s | postgres: postgres, buffer: buffer}}
end
defp disconnect(s, tag, action, reason, buffer) do
disconnect(%{s | buffer: buffer}, tag, action, reason)
end
defp disconnect(s, tag, action, reason) do
{:disconnect, conn_error(tag, action, reason), s}
end
defp conn_error(mod, action, reason) when reason in @nonposix_errors do
conn_error("#{mod} #{action}: #{reason}")
end
defp conn_error(:tcp, action, reason) do
formatted_reason = :inet.format_error(reason)
conn_error("tcp #{action}: #{formatted_reason} - #{inspect(reason)}")
end
defp conn_error(:ssl, action, reason) do
formatted_reason = :ssl.format_error(reason)
conn_error("ssl #{action}: #{formatted_reason} - #{inspect(reason)}")
end
defp conn_error(message) do
DBConnection.ConnectionError.exception(message)
end
defp disconnect(%{connection_id: connection_id} = s, %Postgrex.Error{} = err, buffer) do
{:disconnect, %{err | connection_id: connection_id}, %{s | buffer: buffer}}
end
defp reserved_error(query, s) do
err = ArgumentError.exception("query #{inspect query} uses reserved name")
{:error, err, s}
end
defp sync_recv(s, %{mode: :savepoint} = status, res, buffer) do
case res do
%Postgrex.Error{} ->
savepoint_rollback(s, status, res, buffer)
_ ->
savepoint_recv(s, status, res, buffer, &do_sync_recv/4)
end
end
defp sync_recv(s, %{mode: :transaction, sync: :flush} = status, res, buffer) do
case msg_send(s, msg_sync(), buffer) do
:ok ->
do_sync_recv(s, %{status | sync: :flushed_sync}, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp sync_recv(s, %{mode: :transaction} = status, res, buffer) do
do_sync_recv(s, status, res, buffer)
end
defp do_sync_recv(s, status, res, buffer) do
%{postgres: postgres, transactions: transactions} = s
case msg_recv(s, :infinity, buffer) do
{:ok, msg_ready(status: :idle), buffer}
when postgres == :transaction and transactions == :strict ->
sync_error(s, :idle, buffer)
{:ok, msg_ready(status: :transaction), buffer}
when postgres == :idle and transactions == :strict ->
sync_error(s, :transaction, buffer)
{:ok, msg_ready(status: :failed), buffer}
when postgres == :idle and transactions == :strict ->
sync_error(s, :failed, buffer)
{:ok, msg_ready(status: postgres), buffer} ->
ok(s, res, postgres, buffer)
{:ok, msg, buffer} ->
do_sync_recv(handle_msg(s, status, msg), status, res, buffer)
{:disconnect, _, _} = dis ->
dis
end
end
defp sync_error(s, postgres, buffer) do
sync_error(%{s | buffer: buffer}, postgres)
end
defp sync_error(s, postgres) do
err = %Postgrex.Error{message: "unexpected postgres status: #{postgres}"}
{:disconnect, err, s}
end
defp recv_buffer(%{sock: {:gen_tcp, sock}} = s) do
receive do
{:tcp, ^sock, buffer} ->
{:ok, %{s | buffer: buffer}}
{:tcp_closed, ^sock} ->
disconnect(s, :tcp, "async recv", :closed, "")
{:tcp_error, ^sock, reason} ->
disconnect(s, :tcp, "async_recv", reason, "")
after
0 ->
{:ok, %{s | buffer: <<>>}}
end
end
defp recv_buffer(%{sock: {:ssl, sock}} = s) do
receive do
{:ssl, ^sock, buffer} ->
{:ok, %{s | buffer: buffer}}
{:ssl_closed, ^sock} ->
disconnect(s, :ssl, "async recv", :closed, "")
{:ssl_error, ^sock, reason} ->
disconnect(s, :ssl, "async recv", reason, "")
after
0 ->
{:ok, %{s | buffer: <<>>}}
end
end
## Fake [active: once] if buffer not empty
defp activate(s, <<>>) do
case setopts(s, [active: :once], <<>>) do
:ok -> {:ok, %{s | buffer: :active_once}}
other -> other
end
end
defp activate(%{sock: {mod, sock}} = s, buffer) do
_ = send(self(), {tag(mod), sock, buffer})
{:ok, s}
end
defp setopts(%{sock: {mod, sock}} = s, opts, buffer) do
case setopts(mod, sock, opts) do
:ok ->
:ok
{:error, reason} ->
disconnect(s, tag(mod), "setopts", reason, buffer)
end
end
defp setopts(:gen_tcp, sock, opts), do: :inet.setopts(sock, opts)
defp setopts(:ssl, sock, opts), do: :ssl.setopts(sock, opts)
defp cancel_request(%{connection_key: nil}), do: :ok
defp cancel_request(s) do
case do_cancel_request(s) do
:ok ->
:ok
{:error, action, reason} ->
err = conn_error(:tcp, action, reason)
Logger.error fn() ->
["#{inspect __MODULE__} #{inspect self()} could not cancel backend: " |
Exception.message(err)]
end
end
end
defp do_cancel_request(%{peer: {ip, port}, timeout: timeout} = s) do
case :gen_tcp.connect(ip, port, [mode: :binary, active: false], timeout) do
{:ok, sock} -> cancel_send_recv(s, sock)
{:error, reason} -> {:error, :connect, reason}
end
end
defp cancel_send_recv(%{connection_id: pid, connection_key: key} = s, sock) do
msg = msg_cancel_request(pid: pid, key: key)
case :gen_tcp.send(sock, encode_msg(msg)) do
:ok -> cancel_recv(s, sock)
{:error, reason} -> {:error, :send, reason}
end
end
defp cancel_recv(%{timeout: timeout}, sock) do
# ignore result as socket will close, else can do nothing
_ = :gen_tcp.recv(sock, 0, timeout)
:gen_tcp.close(sock)
end
defp sock_close(%{sock: {mod, sock}}), do: mod.close(sock)
defp delete_parameters(%{parameters: ref}) when is_reference(ref) do
Postgrex.Parameters.delete(ref)
end
defp delete_parameters(_), do: :ok
defp queries_new(), do: :ets.new(__MODULE__, [:set, :public])
defp queries_delete(%{queries: nil}), do: true
defp queries_delete(%{queries: queries}), do: :ets.delete(queries)
defp query_put(%{queries: nil}, _), do: :ok
defp query_put(s, %Stream{query: query}), do: query_put(s, query)
defp query_put(_, %Query{ref: nil}), do: nil
defp query_put(%{queries: queries}, %Query{name: name, ref: ref}) do
try do
:ets.insert(queries, {name, ref})
rescue
ArgumentError ->
# ets table deleted, socket will be closed, rescue here and get nice
# error when trying to recv on socket.
:ok
else
true ->
:ok
end
end
defp unnamed(%Query{name: ""} = query), do: query
defp unnamed(query), do: %Query{query | name: ""}
defp unnamed_query_delete(s, %Query{name: ""} = query) do
query_delete(s, query)
end
defp unnamed_query_delete(s, %Stream{query: %Query{name: ""} = query}) do
query_delete(s, query)
end
defp unnamed_query_delete(_, _), do: :ok
defp statement_query_delete(s, %Query{} = query), do: query_delete(s, query)
defp statement_query_delete(_, %Stream{}), do: :ok
defp query_delete(%{queries: nil}, _), do: :ok
defp query_delete(s, %Stream{query: query}), do: query_delete(s, query)
defp query_delete(%{queries: queries}, %Query{name: name}) do
try do
:ets.delete(queries, name)
rescue
ArgumentError ->
:ok
else
true ->
:ok
end
end
defp query_prepare(%{queries: nil}, query) do
{:parse_describe, unnamed(query)}
end
defp query_prepare(%{queries: queries}, query) when queries != nil do
%Query{name: name, ref: ref} = query
try do
:ets.lookup_element(queries, name, 2)
rescue
ArgumentError ->
{:parse_describe, query}
else
^ref ->
{:ready, query}
_ ->
{:close_parse_describe, query}
end
end
end
| 36.281615 | 108 | 0.612052 |
9e1c868f1f3b00ec7ddfbc734e1a915af68f45a7 | 167 | ex | Elixir | lib/scrip/error.ex | maartenvanvliet/scrip | cb2a69f12a6a086b03cc131902014241392901c4 | [
"MIT"
] | 2 | 2021-03-24T11:51:53.000Z | 2021-05-16T14:13:20.000Z | lib/scrip/error.ex | maartenvanvliet/scrip | cb2a69f12a6a086b03cc131902014241392901c4 | [
"MIT"
] | 14 | 2020-12-28T04:13:47.000Z | 2022-03-24T04:04:53.000Z | lib/scrip/error.ex | maartenvanvliet/scrip | cb2a69f12a6a086b03cc131902014241392901c4 | [
"MIT"
] | null | null | null | defmodule Scrip.Error do
@type t :: %__MODULE__{
status_code: integer,
message: String.t()
}
defexception [:status_code, :message]
end
| 20.875 | 39 | 0.60479 |
9e1c8d318d771454c96bd649e0e00f867fe78d6b | 1,119 | ex | Elixir | test/support/channel_case.ex | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | 538 | 2022-02-02T21:46:52.000Z | 2022-03-29T20:50:34.000Z | test/support/channel_case.ex | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | 48 | 2022-02-03T11:46:09.000Z | 2022-03-31T04:44:53.000Z | test/support/channel_case.ex | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | 15 | 2022-02-03T05:55:14.000Z | 2022-02-28T11:09:03.000Z | defmodule MediaServerWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use MediaServerWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import MediaServerWeb.ChannelCase
# The default endpoint for testing
@endpoint MediaServerWeb.Endpoint
end
end
setup tags do
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(MediaServer.Repo, shared: not tags[:async])
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
:ok
end
end
| 30.243243 | 92 | 0.741734 |
9e1c98a94f5b44f67903d166fd80ad20c8f0f4c9 | 3,690 | ex | Elixir | snake/lib/model/point.ex | Krajzys/Snakelixir | f369408da254115037009c89abc2dfb5924b792e | [
"MIT"
] | null | null | null | snake/lib/model/point.ex | Krajzys/Snakelixir | f369408da254115037009c89abc2dfb5924b792e | [
"MIT"
] | 2 | 2021-05-15T11:28:20.000Z | 2021-06-02T09:21:58.000Z | snake/lib/model/point.ex | Krajzys/Snakelixir | f369408da254115037009c89abc2dfb5924b792e | [
"MIT"
] | 1 | 2021-06-02T09:23:03.000Z | 2021-06-02T09:23:03.000Z | defmodule Model.Point do
defstruct [
color: :red,
coordinates: {0, 0}
]
def new(options \\ []) do
__struct__(options)
end
def new_random(board_width, board_height, points_taken) do
%__MODULE__{ # HM? czy module dziala
color: random_color(),
coordinates: random_coordinates(board_width, board_height, points_taken)
}
end
def new_apple(id, board_width, board_height, points_taken) do
%{
id: id,
color: :red, # ADD SPECIFIC COLOR
coordinates: random_coordinates(board_width, board_height, points_taken)
}
end
def new_fireball(id \\ 0, coordinates, direction_function, snake_id) do
%{
id: id,
color: :yellow,
coordinates: coordinates,
direction: direction_function,
snake_id: snake_id
}
end
defp random_color() do
[:blue, :red, :green, :yellow]
|> Enum.shuffle()
|> List.first
end
defp random_coordinates(board_height, board_width, points_taken) do
# range 0..n
board_height = board_height - 1
board_width = board_width - 1
available_points =
Enum.map(0..board_width, fn(i) -> Enum.map(0..board_height, fn(j) -> {i, j} end) end)
|> List.flatten()
|> Enum.filter(fn(point) -> !Enum.any?(points_taken, fn(taken_point) -> taken_point == point end) end)
Enum.random(available_points)
end
def move_down(point) do
{x, y} = point.coordinates
%{point| coordinates: {x, y+1}}
end
def move_up(point) do
{x, y} = point.coordinates
%{point| coordinates: {x, y-1}}
end
def move_left(point) do
{x, y} = point.coordinates
%{point| coordinates: {x-1, y}}
end
def move_right(point) do
{x, y} = point.coordinates
%{point| coordinates: {x+1, y}}
end
def check_fireball_collision(fireball, board_width, board_height, snakes, apples, other_fireballs) do
fireball_coordinates = fireball.coordinates
{fx, fy} = fireball_coordinates
# OUT OF BOUNDS CHECK
case fx >= 0 && fx <= board_width && fy >= 0 && fy <= board_height do
true ->
# FOR EACH SNAKE TRY TO FIND A COLLISION POINT
snake_collided =
Enum.map(snakes, fn(snake) -> {snake, Enum.find(snake.points, nil, fn(snake_point) -> snake_point.coordinates == fireball_coordinates end)} end)
|> Enum.filter(fn({_snake, collision}) -> collision != nil end)
snake_collided = if length(snake_collided) > 0, do: Enum.at(snake_collided, 0), else: nil
# FIND OTHER FIREBALLS THAT COLLIDE WITH OURS
other_fireballs_collided = Enum.find_value(other_fireballs, fn(other_fireball) -> if other_fireball.coordinates == fireball_coordinates, do: true end)
apple_collided = Enum.find_value(apples, nil, fn(apple) ->
if apple.coordinates == fireball_coordinates, do: %{apple| color: :apple_hit} # TODO: handle this
end)
{fireball, status} =
cond do
snake_collided != nil ->
{%{fireball| color: :snake_hit}, :fireball_snake_end}
apple_collided != nil ->
{%{fireball| color: :apple_hit}, :fireball_apple_end}
other_fireballs_collided != nil ->
{%{fireball| color: :fireball_hit}, :fireball_end}
true ->
{fireball, :fireball_ok}
end
{
%{
fireball: fireball,
snake_collided: snake_collided,
other_fireballs_collided: other_fireballs_collided, # TODO: CZY TO WGL PRZEKAZYWAC WARTO?
apple_collided: apple_collided
},
status
}
false ->
{%{fireball: fireball}, :fireball_bounds_end}
end
end
end
| 28.828125 | 158 | 0.624932 |
9e1c9ee88271700f728b1c8a2410c8fd312f300e | 1,949 | exs | Elixir | backend/config/test.exs | bejolithic/honeyland | 8c4a0d3b56543648d3acb96cc6906df86526743b | [
"Apache-2.0"
] | null | null | null | backend/config/test.exs | bejolithic/honeyland | 8c4a0d3b56543648d3acb96cc6906df86526743b | [
"Apache-2.0"
] | null | null | null | backend/config/test.exs | bejolithic/honeyland | 8c4a0d3b56543648d3acb96cc6906df86526743b | [
"Apache-2.0"
] | null | null | null | import Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :honeyland, Honeyland.Repo,
username: "postgres",
password: "postgres",
database: "honeyland_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
pool_size: 10
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :honeyland, HoneylandWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "cJMfZ0TGL4Dy0e4kzSn5SrODWbgzWJ7E0rfWMKWvrtdiUjuYDrOQstMY/36V2ccd",
server: false
# In test we don't send emails.
config :honeyland, Honeyland.Mailer, adapter: Swoosh.Adapters.Test
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime
config :tesla, adapter: Tesla.Mock
# Astarte mocks for tests
config :honeyland, :astarte_device_status_module, Honeyland.Astarte.Device.DeviceStatusMock
config :honeyland, :astarte_storage_usage_module, Honeyland.Astarte.Device.StorageUsageMock
config :honeyland, :astarte_wifi_scan_result_module, Honeyland.Astarte.Device.WiFiScanResultMock
config :honeyland, :astarte_system_status_module, Honeyland.Astarte.Device.SystemStatusMock
config :honeyland,
ip_geolocation_provider: Honeyland.Geolocation.IPGeolocationProviderMock,
wifi_geolocation_provider: Honeyland.Geolocation.WiFiGeolocationProviderMock,
geocoding_provider: Honeyland.Geolocation.GeocodingProviderMock
config :honeyland, Honeyland.Geolocation.Providers.FreeGeoIp, api_key: "test_api_key"
config :honeyland, Honeyland.Geolocation.Providers.GoogleGeolocation, api_key: "test_api_key"
config :honeyland, Honeyland.Geolocation.Providers.GoogleGeocoding, api_key: "test_api_key"
| 38.98 | 96 | 0.811185 |
9e1ccc40b6cbd7b57204a2b647ec85c8d638e17d | 155 | ex | Elixir | web/views/twitter_follower_history_view.ex | Project-ShangriLa/sana_server_phoenix | d2ea4cc023d02e7249ae9267bb2b41a212b79ce7 | [
"Apache-2.0"
] | 5 | 2015-11-07T11:27:08.000Z | 2017-06-23T00:54:20.000Z | web/views/twitter_follower_history_view.ex | Project-ShangriLa/sana_server_phoenix | d2ea4cc023d02e7249ae9267bb2b41a212b79ce7 | [
"Apache-2.0"
] | null | null | null | web/views/twitter_follower_history_view.ex | Project-ShangriLa/sana_server_phoenix | d2ea4cc023d02e7249ae9267bb2b41a212b79ce7 | [
"Apache-2.0"
] | null | null | null | defmodule SanaServerPhoenix.TwitterFollowerHistoryView do
use SanaServerPhoenix.Web, :view
def render("index.json", %{msg: msg}) do
msg
end
end
| 17.222222 | 57 | 0.748387 |
9e1cda12bbb3e26b8d41b7b45498640ecbd2b537 | 808 | exs | Elixir | test/exquickbooks/api/account_test.exs | evanob/ExQuickBooks | 8c0f64dd658b1a6edfaa338e0cb62b95b9a853e2 | [
"0BSD"
] | null | null | null | test/exquickbooks/api/account_test.exs | evanob/ExQuickBooks | 8c0f64dd658b1a6edfaa338e0cb62b95b9a853e2 | [
"0BSD"
] | null | null | null | test/exquickbooks/api/account_test.exs | evanob/ExQuickBooks | 8c0f64dd658b1a6edfaa338e0cb62b95b9a853e2 | [
"0BSD"
] | null | null | null | defmodule ExQuickBooks.API.AccountTest do
use ExUnit.Case, async: false
use ExQuickBooks.APICase
alias ExQuickBooks.API.Account
alias ExQuickBooks.OAuth.Credentials
doctest Account
@creds %Credentials{
realm_id: "realm_id",
token: "token"
}
test "read_account/3 retrieves an account" do
load_response("account/read_account.json") |> send_response
assert {:ok, %{"Account" => _}} = Account.read_account(@creds, "account_id")
assert %{url: url} = take_request()
assert String.contains?(url, "/account_id")
end
test "read_account/3 recovers from an error" do
load_response("account/read_account_error.json")
|> Map.put(:status_code, 400)
|> send_response
assert {:error, %{"Fault" => _}} = Account.read_account(@creds, "account_id")
end
end
| 25.25 | 81 | 0.69802 |
9e1cdc974253c22946f9f96c1bf200155d4d2518 | 1,162 | exs | Elixir | clients/o_auth2/mix.exs | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/o_auth2/mix.exs | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/o_auth2/mix.exs | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | defmodule GoogleApi.OAuth2.V2.Mixfile do
use Mix.Project
@version "0.4.0"
def project do
[app: :google_api_o_auth2,
version: @version,
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/o_auth2"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.1.0"},
{:ex_doc, "~> 0.16", only: :dev},
{:goth, "~> 0.8.0", only: [:dev, :test]}
]
end
defp description() do
"""
Obtains end-user authorization grants for use with other Google APIs.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/o_auth2",
"Homepage" => "https://developers.google.com/accounts/docs/OAuth2"
}
]
end
end
| 23.714286 | 107 | 0.591222 |
9e1ce445bfa352522ab45db9164253384e85581f | 1,127 | ex | Elixir | lib/opus/safe.ex | hubertlepicki/opus | e1568bc81f678529e812455534ea663bb1ab4394 | [
"MIT"
] | 303 | 2018-09-11T10:19:01.000Z | 2022-03-11T00:57:44.000Z | lib/opus/safe.ex | hubertlepicki/opus | e1568bc81f678529e812455534ea663bb1ab4394 | [
"MIT"
] | 24 | 2018-09-26T10:38:05.000Z | 2021-09-29T12:51:28.000Z | lib/opus/safe.ex | hubertlepicki/opus | e1568bc81f678529e812455534ea663bb1ab4394 | [
"MIT"
] | 16 | 2018-10-05T18:54:18.000Z | 2022-02-19T18:59:45.000Z | defmodule Opus.Safe do
@moduledoc false
import Kernel, except: [apply: 2, apply: 3]
def apply(term), do: apply(term, %{})
def apply({_m, nil, _a}, _), do: nil
def apply({m, f, a}, opts) do
Kernel.apply(m, f, a)
rescue
e -> handle_exception({e, System.stacktrace()}, opts)
end
def apply(fun, opts) when is_function(fun, 0) do
fun.()
rescue
e -> handle_exception({e, System.stacktrace()}, opts)
end
def apply(fun, arg) when is_function(fun, 1), do: apply(fun, arg, %{})
def apply(fun, arg, opts \\ %{}) when is_function(fun, 1) do
fun.(arg)
rescue
e -> handle_exception({e, System.stacktrace()}, opts)
end
defp handle_exception({e, stacktrace}, %{raise: true}) do
reraise e, stacktrace
end
defp handle_exception({e, stacktrace} = error, %{raise: [_ | _] = exceptions}) do
if e.__struct__ in exceptions do
reraise e, stacktrace
end
error_with_stacktrace(error)
end
defp handle_exception(error, %{}), do: error_with_stacktrace(error)
defp error_with_stacktrace({e, stacktrace}), do: {:error, %{error: e, stacktrace: stacktrace}}
end
| 24.5 | 96 | 0.64685 |
9e1ce4fa242bf415d228b332b562771e0efc7dc2 | 1,058 | ex | Elixir | lib/price_tracker/application.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker/application.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | lib/price_tracker/application.ex | kevinbenard/price_tracker | 0a7248edd4afca21f00e8b0fc05561f97799c0ca | [
"MIT"
] | null | null | null | defmodule PriceTracker.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(PriceTracker.Repo, []),
# Start the endpoint when the application starts
supervisor(PriceTrackerWeb.Endpoint, []),
# Start your own worker by calling: PriceTracker.Worker.start_link(arg1, arg2, arg3)
# worker(PriceTracker.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: PriceTracker.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
PriceTrackerWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 33.0625 | 90 | 0.723062 |
9e1d1ddecb7920445663d25e720819d991193d62 | 6,165 | ex | Elixir | lib/base32_crockford.ex | faried/base32_crockford | 3f815273818212c94c43a1a233ead2a1a80c3a83 | [
"MIT"
] | null | null | null | lib/base32_crockford.ex | faried/base32_crockford | 3f815273818212c94c43a1a233ead2a1a80c3a83 | [
"MIT"
] | null | null | null | lib/base32_crockford.ex | faried/base32_crockford | 3f815273818212c94c43a1a233ead2a1a80c3a83 | [
"MIT"
] | null | null | null | defmodule Base32Crockford do
@moduledoc ~S"""
Base32-Crockford: base-32 encoding for expressing integer numbers
in a form that can be conveniently and accurately transmitted
between humans and computer systems.
[https://www.crockford.com/wrmg/base32.html](https://www.crockford.com/wrmg/base32.html)
A symbol set of 10 digits and 22 letters is used:
`0123456789ABCDEFGHJKMNPQRSTVWXYZ`
It does not include 4 of the 26 letters: I L O U.
A check symbol can be appended to a symbol string. 5 additional symbols
`*~$=U` are used only for encoding or decoding the check symbol.
When decoding, upper and lower case letters are accepted,
and i and l will be treated as 1 and o will be treated as 0.
When encoding, only upper case letters are used.
"""
@doc ~S"""
Encodes an integer number into base32-crockford encoded string.
Checksum can be added to the end of the string if the
`:checksum` option is set to true.
For better readability the resulting string can be partitioned by hyphens
if the `:partitions` option is provided.
## Options
* `:checksum` (boolean) - the check symbol will be added to the end
of the string. The check symbol encodes the number modulo 37,
37 being the least prime number greater than 32.
* `:partitions` (positive integer) - hyphens (-) will be inserted into
symbol strings to partition a string into manageable pieces,
improving readability by helping to prevent confusion.
## Examples
iex> Base32Crockford.encode(973_113_317)
"X011Z5"
To add a check symbol to the end of the string:
iex> Base32Crockford.encode(973_113_317, checksum: true)
"X011Z5$"
To partition a resulting string into pieces:
iex> Base32Crockford.encode(973_113_317, partitions: 2)
"X01-1Z5"
iex> Base32Crockford.encode(973_113_317, partitions: 3)
"X0-11-Z5"
iex> Base32Crockford.encode(973_113_317, partitions: 4)
"X-0-11-Z5"
"""
@spec encode(integer, keyword) :: binary
def encode(number, opts \\ []) when is_integer(number) do
init_encoding(number, opts)
|> base10to32(number)
|> to_string
|> partition(opts)
end
@doc ~S"""
Decodes base32-crockford encoded string into integer number.
Upper and lower case letters are accepted, and i and l will be treated as 1
and o will be treated as 0.
Hyphens are ignored during decoding.
## Options
* `:checksum` (boolean) - the last symbol will be considered as check symbol
and extracted from the encoded string before decoding. It then will be
compared with a check symbol calculated from a decoded number.
## Examples
iex> Base32Crockford.decode("X011Z5")
{:ok, 973113317}
iex> Base32Crockford.decode("XoIlZ5")
{:ok, 973113317}
iex> Base32Crockford.decode("X01-1Z5")
{:ok, 973113317}
iex> Base32Crockford.decode("X011Z5$", checksum: true)
{:ok, 973113317}
iex> Base32Crockford.decode("X011Z5=", checksum: true)
:error
"""
@spec decode(binary, keyword) :: {:ok, integer} | :error
def decode(binary, opts \\ []) when is_binary(binary) do
{chars, checksum} = binary
|> String.replace("-", "")
|> String.upcase
|> String.reverse
|> String.to_charlist
|> init_decoding(opts)
values = chars
|> Enum.with_index
|> Enum.map(&base32to10/1)
case Enum.filter(values, &(&1 == :error)) do
[] ->
Enum.sum(values)
|> check(checksum)
_ -> :error
end
end
@doc ~S"""
Similar to `decode/2` but raises `ArgumentError` if a checksum is invalid or
an invalid character is present in the string.
## Options
Accepts the same options as `decode/2`.
## Examples
iex> Base32Crockford.decode!("X011Z5")
973113317
"""
@spec decode!(binary, keyword) :: integer
def decode!(binary, opts \\ []) when is_binary(binary) do
case decode(binary, opts) do
{:ok, number} -> number
:error ->
raise ArgumentError, "contains invalid character or checksum does not match"
end
end
defp init_encoding(number, opts) do
if Keyword.get(opts, :checksum, false) do
[calculate_checksum(number)]
else
[]
end
end
defp init_decoding(chars, opts) do
if Keyword.get(opts, :checksum, false) do
[checksum | chars] = chars
{chars, checksum}
else
{chars, nil}
end
end
defp base10to32([], 0), do: '0'
defp base10to32('0', 0), do: '00'
defp base10to32(chars, 0), do: chars
defp base10to32(chars, number) do
reminder = rem(number, 32)
chars = [enc(reminder) | chars]
number = div(number, 32)
base10to32(chars, number)
end
defp base32to10({char, power}) do
with {:ok, value} <- dec(char) do
value * :math.pow(32, power) |> round
end
end
defp check(number, nil), do: {:ok, number}
defp check(number, checksum) do
case calculate_checksum(number) do
^checksum ->
{:ok, number}
_ -> :error
end
end
defp partition(binary, opts) do
case Keyword.get(opts, :partitions, 0) do
count when count in [0, 1] ->
binary
count ->
split([], binary, count)
|> Enum.reverse
|> Enum.join("-")
end
end
defp split(parts, binary, 1), do: [binary | parts]
defp split(parts, binary, count) do
len = div(String.length(binary), count)
{part, rest} = String.split_at(binary, len)
split([part | parts], rest, count - 1)
end
defp calculate_checksum(number) do
reminder = rem(number, 37)
enc(reminder)
end
encoding_symbols = '0123456789ABCDEFGHJKMNPQRSTVWXYZ'
check_symbols = '*~$=U'
encoding_alphabet = Enum.with_index(encoding_symbols ++ check_symbols)
for {encoding, value} <- encoding_alphabet do
defp enc(unquote(value)), do: unquote(encoding)
end
decoding_alphabet = Enum.with_index(encoding_symbols)
for {encoding, value} <- decoding_alphabet do
defp dec(unquote(encoding)), do: {:ok, unquote(value)}
end
defp dec(79), do: {:ok, 0} # O
defp dec(73), do: {:ok, 1} # I
defp dec(76), do: {:ok, 1} # L
defp dec(_), do: :error
end
| 27.522321 | 90 | 0.657583 |
9e1d7f637368227b44fa5b92389b125b31658edf | 4,431 | ex | Elixir | lib/talib/bollinger_band.ex | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | null | null | null | lib/talib/bollinger_band.ex | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | null | null | null | lib/talib/bollinger_band.ex | buurzx/talib | 9341dafd2dc0aa14c34b659b82375723ff5a37fc | [
"MIT"
] | 1 | 2021-04-29T22:14:28.000Z | 2021-04-29T22:14:28.000Z | defmodule Talib.BollingerBand do
alias Talib.SMA
alias Talib.Average
require OK
require Logger
@moduledoc ~S"""
Defines a Bollinger bands.
## History
Version: 1.0
https://stockcharts.com/school/doku.php?id=chart_school:technical_indicators:bollinger_bands
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines a Bollinger Band price volatility.
* :period - Period used to calculate SMA, typically 20
* :deviation - Multiplier to standard deviation from SMA typically 2
* :values - List of values resulting from the calculation {upper, middle, lower}
"""
@type t :: %Talib.BollingerBand{
period: integer,
deviation: integer,
values: [number]
}
defstruct period: 0,
deviation: 0,
values: []
@doc """
Gets the BBand of a list.
The return tuple looks like the following: {MACD, MACD Signal}.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.BollingerBand.from_list([1, 2, 3, 4, 5, 6], 3, 2)
{:ok, %Talib.BollingerBand{
period: 3,
deviation: 2,
values: [
{nil, nil, nil},
{nil, nil, nil},
{3.0, 2.0, 1.0},
{4.6329931618554525, 3.0, 1.367006838144548},
{5.6329931618554525, 4.0, 2.367006838144548},
{6.6329931618554525, 5.0, 3.367006838144548}
]
}}
iex>Talib.BollingerBand.from_list([], 3, 2)
{:error, :no_data}
"""
@spec from_list([number], integer, integer) ::
{:ok, Talib.BollingerBand.t()}
| {:error, atom}
def from_list(data, period \\ 20, deviation \\ 2),
do: calculate(data, period, deviation)
@doc """
Gets the BBand of a list.
The return tuple looks like the following: {Upper Band, Middle, Lower Band}.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.BollingerBand.from_list!([1, 2, 3], 3, 2)
%Talib.BollingerBand{
deviation: 2,
period: 3,
values: [
{nil, nil, nil},
{nil, nil, nil},
{3.0, 2.0, 1.0}
]
}
iex>Talib.BollingerBand.from_list!([], 20, 2)
** (NoDataError) no data error
"""
@spec from_list!([number], integer, integer) ::
Talib.BBand.t()
| no_return
def from_list!(data, period \\ 20, deviation \\ 2) do
case calculate(data, period, deviation) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
end
end
defp calculate_bband_point(mid, _stddev, _deviation) when is_nil(mid) do
{nil, nil, nil}
end
defp calculate_bband_point(_mid, stddev, _deviation) when is_nil(stddev) do
{nil, nil, nil}
end
defp calculate_bband_point(mid, stddev, deviation) when is_float(stddev) and is_float(mid) do
band = stddev * deviation
{mid + band, mid, mid - band}
end
defp calculate_bband_point(mid, stddev_series, deviation) when is_list(stddev_series) do
stddev = Average.deviation!(stddev_series)
calculate_bband_point(mid, stddev, deviation)
end
@doc false
@spec calculate([number], integer, integer) ::
{:ok, Talib.BollingerBand.t()}
| {:error, atom}
defp calculate(data, period, deviation) do
OK.try do
%SMA{values: middle_band} <- SMA.from_list(data, period)
bband_ =
data
|> Enum.reverse()
|> Enum.chunk_every(period, 1, [nil])
|> Enum.reverse()
|> Enum.map(&Enum.reverse(&1))
deficit = length(data) - length(bband_)
empty =
Stream.cycle([nil])
|> Enum.take(period)
bband =
Stream.cycle([empty])
|> Enum.take(deficit)
|> Kernel.++(bband_)
|> Enum.zip(middle_band)
|> Enum.map(fn {series, m} -> calculate_bband_point(m, series, deviation) end)
# bband = Enum.chunk_every(shaped_data, period, 1, [7])
# IO.inspect bband, limit: :infinity
# IO.inspect length(bband)
# |> Enum.zip(middle_band)
# |> Enum.map(fn({series, m}) -> calculate_bband_point(m, series, deviation) end)
after
{:ok,
%Talib.BollingerBand{
period: period,
deviation: deviation,
values: bband
}}
rescue
:no_data -> {:error, :no_data}
end
end
end
| 29.151316 | 95 | 0.581584 |
9e1d829972c57a40edf809f992b5068e63a9ebc6 | 538 | ex | Elixir | lib/rocketpay_web/views/users_view.ex | lucascprazeres/rocketpay | 1717d1882b089061990c3cf9b0646839f99e935a | [
"MIT"
] | 2 | 2021-02-26T13:03:29.000Z | 2021-02-28T17:08:22.000Z | lib/rocketpay_web/views/users_view.ex | lucascprazeres/rocketpay | 1717d1882b089061990c3cf9b0646839f99e935a | [
"MIT"
] | null | null | null | lib/rocketpay_web/views/users_view.ex | lucascprazeres/rocketpay | 1717d1882b089061990c3cf9b0646839f99e935a | [
"MIT"
] | 3 | 2021-02-22T19:26:18.000Z | 2021-03-22T13:10:53.000Z | defmodule RocketpayWeb.UsersView do
alias Rocketpay.{User, Account}
def render("create.json", %{
user: %User{
id: id,
name: name,
nickname: nickname,
account: %Account{
id: account_id,
balance: balance
}
}
}) do
%{
message: "User created",
user: %{
id: id,
name: name,
nickname: nickname,
account: %{
id: account_id,
balance: balance
}
}
}
end
end
| 18.551724 | 35 | 0.451673 |
9e1d9ff8f4420fe11b1d10019981e15037e3419a | 1,657 | ex | Elixir | ros/ros_ui_admin/lib/ros_ui_admin_web.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 1 | 2019-07-01T18:47:28.000Z | 2019-07-01T18:47:28.000Z | ros/ros_ui_admin/lib/ros_ui_admin_web.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | 4 | 2020-07-17T16:57:18.000Z | 2021-05-09T23:50:52.000Z | ros/ros_ui_admin/lib/ros_ui_admin_web.ex | kujua/elixir-handbook | 4185ad8da7f652fdb59c799dc58bcb33fda10475 | [
"Apache-2.0"
] | null | null | null | defmodule Ros.AdminWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use Ros.AdminWeb, :controller
use Ros.AdminWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: Ros.AdminWeb
import Plug.Conn
import Ros.AdminWeb.Gettext
alias Ros.AdminWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/ros_ui_admin_web/templates",
namespace: Ros.AdminWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Ros.AdminWeb.ErrorHelpers
import Ros.AdminWeb.Gettext
alias Ros.AdminWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import Ros.AdminWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.671429 | 83 | 0.687387 |
9e1dadbbd78149950abfb218a302386d809ee9e4 | 90 | exs | Elixir | test/from_the_ashes_web/views/page_view_test.exs | hazen/from_the_ashes | 05b4468b6ce5ff0225ff2fed5e27d6055bf3d852 | [
"MIT"
] | null | null | null | test/from_the_ashes_web/views/page_view_test.exs | hazen/from_the_ashes | 05b4468b6ce5ff0225ff2fed5e27d6055bf3d852 | [
"MIT"
] | 1 | 2021-03-09T22:08:11.000Z | 2021-03-09T22:08:11.000Z | test/from_the_ashes_web/views/page_view_test.exs | hazen/from_the_ashes | 05b4468b6ce5ff0225ff2fed5e27d6055bf3d852 | [
"MIT"
] | null | null | null | defmodule FromTheAshesWeb.PageViewTest do
use FromTheAshesWeb.ConnCase, async: true
end
| 22.5 | 43 | 0.844444 |
9e1de1f19f6bd8685e8a49d9b6999285172d399e | 2,199 | ex | Elixir | clients/service_control/lib/google_api/service_control/v1/model/resource_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_control/lib/google_api/service_control/v1/model/resource_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_control/lib/google_api/service_control/v1/model/resource_info.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceControl.V1.Model.ResourceInfo do
@moduledoc """
Describes a resource associated with this operation.
## Attributes
* `resourceContainer` (*type:* `String.t`, *default:* `nil`) - The identifier of the parent of this resource instance.
Must be in one of the following formats:
- “projects/<project-id or project-number>”
- “folders/<folder-id>”
- “organizations/<organization-id>”
* `resourceLocation` (*type:* `String.t`, *default:* `nil`) - The location of the resource. If not empty, the resource will be checked
against location policy. The value must be a valid zone, region or
multiregion. For example: "europe-west4" or "northamerica-northeast1-a"
* `resourceName` (*type:* `String.t`, *default:* `nil`) - Name of the resource. This is used for auditing purposes.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:resourceContainer => String.t(),
:resourceLocation => String.t(),
:resourceName => String.t()
}
field(:resourceContainer)
field(:resourceLocation)
field(:resourceName)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceControl.V1.Model.ResourceInfo do
def decode(value, options) do
GoogleApi.ServiceControl.V1.Model.ResourceInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceControl.V1.Model.ResourceInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.271186 | 138 | 0.714416 |
9e1dee27d06230064a46763f8bde857d4d95d0af | 206 | exs | Elixir | .credo.exs | Madumo/credo_demo_plugin | 7e746666e8412388a8d351c7fc2a3a4bab3cf788 | [
"MIT"
] | null | null | null | .credo.exs | Madumo/credo_demo_plugin | 7e746666e8412388a8d351c7fc2a3a4bab3cf788 | [
"MIT"
] | null | null | null | .credo.exs | Madumo/credo_demo_plugin | 7e746666e8412388a8d351c7fc2a3a4bab3cf788 | [
"MIT"
] | null | null | null | %{
configs: [
%{
name: "default",
requires: ["lib/"],
checks: [
{Credo.Check.Readability.ModuleDoc, false},
{CredoDemoPlugin.BetterModuleDoc, []}
]
}
]
}
| 15.846154 | 51 | 0.485437 |
9e1e3bd80e7f06591f1c7e8321116d577689ff8e | 3,913 | exs | Elixir | lib/elixir/test/elixir/process_test.exs | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 243 | 2020-02-03T03:48:51.000Z | 2021-11-08T12:56:25.000Z | lib/elixir/test/elixir/process_test.exs | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 6 | 2021-03-19T12:33:21.000Z | 2021-04-02T17:52:45.000Z | lib/elixir/test/elixir/process_test.exs | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2020-11-25T02:22:55.000Z | 2020-11-25T02:22:55.000Z | Code.require_file("test_helper.exs", __DIR__)
defmodule ProcessTest do
use ExUnit.Case, async: true
doctest Process
test "dictionary" do
assert Process.put(:foo, :bar) == nil
assert Process.put(:foo, :baz) == :bar
assert Enum.member?(Process.get_keys(), :foo)
refute Enum.member?(Process.get_keys(), :bar)
refute Enum.member?(Process.get_keys(), :baz)
assert Process.get_keys(:bar) == []
assert Process.get_keys(:baz) == [:foo]
assert Process.get(:foo) == :baz
assert Process.delete(:foo) == :baz
assert Process.get(:foo) == nil
end
test "group_leader/2 and group_leader/0" do
another = spawn_link(fn -> Process.sleep(1000) end)
assert Process.group_leader(self(), another)
assert Process.group_leader() == another
end
# In contrast with other inlined functions,
# it is important to test that monitor/1 is inlined,
# this way we gain the monitor receive optimisation.
test "monitor/1 is inlined" do
assert expand(quote(do: Process.monitor(pid())), __ENV__) ==
quote(do: :erlang.monitor(:process, pid()))
end
test "sleep/1" do
assert Process.sleep(0) == :ok
end
test "info/2" do
pid = spawn(fn -> Process.sleep(1000) end)
assert Process.info(pid, :priority) == {:priority, :normal}
assert Process.info(pid, [:priority]) == [priority: :normal]
Process.exit(pid, :kill)
assert Process.info(pid, :backtrace) == nil
assert Process.info(pid, [:backtrace, :status]) == nil
end
test "info/2 with registered name" do
pid = spawn(fn -> nil end)
Process.exit(pid, :kill)
assert Process.info(pid, :registered_name) == nil
assert Process.info(pid, [:registered_name]) == nil
assert Process.info(self(), :registered_name) == {:registered_name, []}
assert Process.info(self(), [:registered_name]) == [registered_name: []]
Process.register(self(), __MODULE__)
assert Process.info(self(), :registered_name) == {:registered_name, __MODULE__}
assert Process.info(self(), [:registered_name]) == [registered_name: __MODULE__]
end
test "send_after/3 sends messages once expired" do
Process.send_after(self(), :hello, 10)
assert_receive :hello
end
test "send_after/4 with absolute time sends message once expired" do
time = System.monotonic_time(:millisecond) + 10
Process.send_after(self(), :hello, time, abs: true)
assert_receive :hello
end
test "send_after/3 returns a timer reference that can be read or cancelled" do
timer = Process.send_after(self(), :hello, 100_000)
refute_received :hello
assert is_integer(Process.read_timer(timer))
assert is_integer(Process.cancel_timer(timer))
timer = Process.send_after(self(), :hello, 0)
assert_receive :hello
assert Process.read_timer(timer) == false
assert Process.cancel_timer(timer) == false
timer = Process.send_after(self(), :hello, 100_000)
assert Process.cancel_timer(timer, async: true)
assert_receive {:cancel_timer, ^timer, result}
assert is_integer(result)
end
test "exit(pid, :normal) does not cause the target process to exit" do
Process.flag(:trap_exit, true)
pid =
spawn_link(fn ->
receive do
:done -> nil
end
end)
true = Process.exit(pid, :normal)
refute_receive {:EXIT, ^pid, :normal}
assert Process.alive?(pid)
# now exit the process for real so it doesn't hang around
true = Process.exit(pid, :abnormal)
assert_receive {:EXIT, ^pid, :abnormal}
refute Process.alive?(pid)
end
test "exit(self(), :normal) causes the calling process to exit" do
Process.flag(:trap_exit, true)
pid = spawn_link(fn -> Process.exit(self(), :normal) end)
assert_receive {:EXIT, ^pid, :normal}
refute Process.alive?(pid)
end
defp expand(expr, env) do
{expr, _env} = :elixir_expand.expand(expr, env)
expr
end
end
| 31.304 | 84 | 0.671352 |
9e1e4def41db93cfeaec7596ec87ecb9980eb31e | 17 | ex | Elixir | testData/org/elixir_lang/parser_definition/literal_words_line_parsing_test_case/Braces.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/literal_words_line_parsing_test_case/Braces.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/literal_words_line_parsing_test_case/Braces.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | ~W{{\}[]<>"/()|'} | 17 | 17 | 0.058824 |
9e1e6cccf08b593a7bd14b0e221f1d2d8e9966da | 299 | exs | Elixir | .credo.exs | Ovyerus/vexil | 48d408efe8520fb8c90639aff51fa134d57876a0 | [
"MIT"
] | 9 | 2021-09-01T13:29:23.000Z | 2021-09-06T03:15:46.000Z | .credo.exs | Ovyerus/vexil | 48d408efe8520fb8c90639aff51fa134d57876a0 | [
"MIT"
] | null | null | null | .credo.exs | Ovyerus/vexil | 48d408efe8520fb8c90639aff51fa134d57876a0 | [
"MIT"
] | null | null | null | %{
configs: [
%{
name: "default",
files: %{
included: ["lib/", "priv/", "test/"],
excluded: []
},
color: true,
checks: [
{Credo.Check.Refactor.CyclomaticComplexity, false},
{Credo.Check.Refactor.Nesting, false}
]
}
]
}
| 17.588235 | 59 | 0.461538 |
9e1f1c5fafb16c77a75ae2760afce5441c1cb1fe | 1,278 | exs | Elixir | mix.exs | h3poteto/slack_logger_backend | 537aee5328963d5f4b8d22c48a932d9e0a09f501 | [
"MIT"
] | 34 | 2016-03-02T12:15:01.000Z | 2020-06-22T02:13:06.000Z | mix.exs | h3poteto/slack_logger_backend | 537aee5328963d5f4b8d22c48a932d9e0a09f501 | [
"MIT"
] | 6 | 2016-06-03T18:05:01.000Z | 2022-02-07T08:20:39.000Z | mix.exs | h3poteto/slack_logger_backend | 537aee5328963d5f4b8d22c48a932d9e0a09f501 | [
"MIT"
] | 14 | 2016-03-08T09:40:20.000Z | 2022-01-28T08:46:11.000Z | defmodule SlackLoggerBackend.Mixfile do
use Mix.Project
def project do
[
app: :slack_logger_backend,
description: "A logger backend for posting errors to Slack.",
version: "0.1.19",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: ["coveralls": :test, "coveralls.detail": :test, "coveralls.post": :test],
package: package()
]
end
def application do
[applications: [:logger, :httpoison, :gen_stage],
mod: {SlackLoggerBackend, []}]
end
defp deps do
[
{:httpoison, "~> 0.10"},
{:poison, "~> 2.2 or ~> 3.1"},
{:gen_stage, "~> 0.11"},
{:poolboy, "~> 1.5.1"},
{:excoveralls, "~> 0.5", only: :test},
{:earmark, "~> 1.0", only: :dev},
{:ex_doc, "~> 0.14", only: :dev},
{:dialyxir, "~> 0.3", only: :dev},
{:bypass, "~> 0.1", only: :test},
{:inch_ex, "~> 0.5", only: :docs},
{:credo, "~> 0.5", only: :dev}
]
end
def package do
[
files: ["lib", "mix.exs", "README*"],
licenses: ["MIT"],
maintainers: ["Craig Paterson"],
links: %{"Github" => "https://github.com/craigp/slack_logger_backend"}
]
end
end
| 26.625 | 98 | 0.537559 |
9e1f315b9b2eebfb62b1b1a6a69e04e2bab52cc4 | 87 | ex | Elixir | lib/reason_react_exercise_web/views/layout_view.ex | nunocf/reason-react-exercise | b6e6920a596fe436b02a602282750456a7edbdea | [
"MIT"
] | null | null | null | lib/reason_react_exercise_web/views/layout_view.ex | nunocf/reason-react-exercise | b6e6920a596fe436b02a602282750456a7edbdea | [
"MIT"
] | null | null | null | lib/reason_react_exercise_web/views/layout_view.ex | nunocf/reason-react-exercise | b6e6920a596fe436b02a602282750456a7edbdea | [
"MIT"
] | null | null | null | defmodule ReasonReactExerciseWeb.LayoutView do
use ReasonReactExerciseWeb, :view
end
| 21.75 | 46 | 0.862069 |
9e1f541e72521fc69aadcf4aea94e5f22746bf48 | 580 | exs | Elixir | test/lib/canvas_api/base62_uuid_field_test.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 123 | 2017-04-04T18:15:48.000Z | 2021-04-26T08:04:22.000Z | test/lib/canvas_api/base62_uuid_field_test.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | null | null | null | test/lib/canvas_api/base62_uuid_field_test.exs | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 17 | 2017-04-04T18:58:29.000Z | 2021-05-10T21:39:16.000Z | defmodule CanvasAPI.Base62UUIDFieldTest do
use ExUnit.Case, async: true
alias CanvasAPI.Base62UUIDField
test ".type is a string" do
assert Base62UUIDField.type == :string
end
test ".cast casts to a string" do
assert Base62UUIDField.cast(1) == {:ok, "1"}
end
test ".dump dumps the value" do
assert Base62UUIDField.dump(1) == {:ok, 1}
end
test ".load loads the value" do
assert Base62UUIDField.load(1) == {:ok, 1}
end
test ".autogenerate generates a base 62 UUID" do
assert String.length(Base62UUIDField.autogenerate) == 22
end
end
| 22.307692 | 60 | 0.689655 |
9e1f5eb5ccca5ca2142b11ccf82f7dae0b353380 | 761 | ex | Elixir | lib/ref_web/live/comment_live/index.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | lib/ref_web/live/comment_live/index.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | lib/ref_web/live/comment_live/index.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | defmodule RefWeb.CommentLive.Index do
use RefWeb, :live_view
alias Ref.Timeline
alias Ref.Timeline.Post
alias Ref.Users
alias Pow.Plug.Session
alias RefWeb.SocketAuth
alias Plug.Conn
alias Pow.CredentialsCache
alias Ref.Timeline.Comment
@impl true
def mount(%{"post_id" => post_id} =_params,%{"current_user_id" => user_id} =_session, socket) do
{:ok, assign(socket, user_id: user_id, post_id: post_id), temporary_assigns: [posts: []]}
end
@impl true
def handle_params(params, _url, socket) do
{:noreply, apply_action(socket, socket.assigns.live_action, params)}
end
defp apply_action(socket, :new, _params) do
socket
|> assign(:page_title, "New Comment")
|> assign(:comment, %Comment{})
end
end
| 20.567568 | 98 | 0.701708 |
9e1f72f0d5a5843331987475ee1c89184f2ceaf6 | 896 | ex | Elixir | clients/policy_simulator/lib/google_api/policy_simulator/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/policy_simulator/lib/google_api/policy_simulator/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/policy_simulator/lib/google_api/policy_simulator/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PolicySimulator.V1 do
@moduledoc """
API client metadata for GoogleApi.PolicySimulator.V1.
"""
@discovery_revision "20220305"
def discovery_revision(), do: @discovery_revision
end
| 33.185185 | 74 | 0.762277 |
9e1f78a38b4eb80c3ecc1a2162f798bfcb667955 | 894 | ex | Elixir | lib/slacker/web_api.ex | yalabot/slacker | 3cf4f61c9aa9d74837859568937e171b6a5ba4b3 | [
"MIT"
] | 1 | 2016-07-30T10:56:24.000Z | 2016-07-30T10:56:24.000Z | lib/slacker/web_api.ex | yalabot/slacker | 3cf4f61c9aa9d74837859568937e171b6a5ba4b3 | [
"MIT"
] | null | null | null | lib/slacker/web_api.ex | yalabot/slacker | 3cf4f61c9aa9d74837859568937e171b6a5ba4b3 | [
"MIT"
] | 1 | 2016-11-16T16:35:13.000Z | 2016-11-16T16:35:13.000Z | defmodule Slacker.WebAPI do
require Logger
use HTTPoison.Base
@url_base Application.get_env(:slacker, :url_base) || "https://slack.com/api/"
def post(path, body, headers \\ [], hackney_opts \\ [ssl: [{:versions, [:'tlsv1.2']}]]) do
path
|> super(body, headers, hackney_opts)
|> check_response
end
def process_url(path) do
@url_base <> path
end
def process_response_body(body) do
try do
body
|> Poison.decode!
|> Enum.reduce(%{}, fn {k, v}, map -> Map.put(map, String.to_atom(k), v) end)
rescue
x in [Poison.SyntaxError] ->
Logger.error(Exception.message(x))
Logger.error("body:")
Logger.error(inspect(body))
body
end
end
defp check_response({:ok, %{status_code: 200, body: %{ok: true} = body}}) do
{:ok, body}
end
defp check_response({_, response}), do: {:error, response}
end
| 24.833333 | 92 | 0.611857 |
9e1f96f8c0b3f7b8aad42158b392ae1fdd5ba2b0 | 1,202 | exs | Elixir | config/prod.secret.exs | holsee/odd_job | 299117ef7cc5c67e8dbe1fc1f47253ff1cda4b2d | [
"MIT"
] | 2 | 2019-09-15T22:55:28.000Z | 2020-01-12T17:39:27.000Z | config/prod.secret.exs | holsee/odd_job | 299117ef7cc5c67e8dbe1fc1f47253ff1cda4b2d | [
"MIT"
] | 2 | 2021-03-09T16:37:48.000Z | 2021-05-10T12:02:31.000Z | config/prod.secret.exs | holsee/odd_job | 299117ef7cc5c67e8dbe1fc1f47253ff1cda4b2d | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :odd_job, OddJob.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :odd_job_web, OddJobWeb.Endpoint,
http: [:inet6, port: String.to_integer(System.get_env("PORT") || "4000")],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :odd_job_web, OddJobWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 30.820513 | 76 | 0.725458 |
9e1fb3d6f073f7a0f4e0039d944dc5282e31fea5 | 2,666 | ex | Elixir | clients/games/lib/google_api/games/v1/model/turn_based_match_create_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/turn_based_match_create_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/turn_based_match_create_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Model.TurnBasedMatchCreateRequest do
@moduledoc """
This is a JSON template for a turn-based match creation request.
## Attributes
* `autoMatchingCriteria` (*type:* `GoogleApi.Games.V1.Model.TurnBasedAutoMatchingCriteria.t`, *default:* `nil`) - Criteria for auto-matching players into this match.
* `invitedPlayerIds` (*type:* `list(String.t)`, *default:* `nil`) - The player ids to invite to the match.
* `kind` (*type:* `String.t`, *default:* `games#turnBasedMatchCreateRequest`) - Uniquely identifies the type of this resource. Value is always the fixed string games#turnBasedMatchCreateRequest.
* `requestId` (*type:* `String.t`, *default:* `nil`) - A randomly generated numeric ID. This number is used at the server to ensure that the request is handled correctly across retries.
* `variant` (*type:* `integer()`, *default:* `nil`) - The variant / mode of the application to be played. This can be any integer value, or left blank. You should use a small number of variants to keep the auto-matching pool as large as possible.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoMatchingCriteria => GoogleApi.Games.V1.Model.TurnBasedAutoMatchingCriteria.t(),
:invitedPlayerIds => list(String.t()),
:kind => String.t(),
:requestId => String.t(),
:variant => integer()
}
field(:autoMatchingCriteria, as: GoogleApi.Games.V1.Model.TurnBasedAutoMatchingCriteria)
field(:invitedPlayerIds, type: :list)
field(:kind)
field(:requestId)
field(:variant)
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.TurnBasedMatchCreateRequest do
def decode(value, options) do
GoogleApi.Games.V1.Model.TurnBasedMatchCreateRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.TurnBasedMatchCreateRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.186441 | 250 | 0.731058 |
9e1fb64861cd93792c74d40899a59b3d850daf5d | 621 | ex | Elixir | lib/mastani_server_web/middleware/force_loader.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | lib/mastani_server_web/middleware/force_loader.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | lib/mastani_server_web/middleware/force_loader.ex | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServerWeb.Middleware.ForceLoader do
@moduledoc """
# this is a tmp solution for load related-users like situations
# it turn dataloader into nomal N+1 resolver
# NOTE: it should be replaced using "Select-Top-N-By-Group" solution
"""
@behaviour Absinthe.Middleware
def call(%{source: %{id: id}} = resolution, _) do
arguments = resolution.arguments |> Map.merge(%{what_ever: id})
%{resolution | arguments: arguments}
# resolution
end
def call(%{errors: errors} = resolution, _) when length(errors) > 0, do: resolution
def call(resolution, _) do
resolution
end
end
| 28.227273 | 85 | 0.700483 |
9e1fbfe7c79ef033dfd81bbd82d7d0aedf1cd153 | 2,588 | exs | Elixir | mix.exs | ne1ro/tracker_bot | 2dc96c0a8366dbd51e4e5e7b2a54383483c8c774 | [
"MIT"
] | 2 | 2019-10-06T18:08:28.000Z | 2020-05-05T11:58:32.000Z | mix.exs | ne1ro/tracker_bot | 2dc96c0a8366dbd51e4e5e7b2a54383483c8c774 | [
"MIT"
] | 66 | 2017-09-16T08:44:33.000Z | 2021-08-01T07:15:50.000Z | mix.exs | ne1ro/tracker_bot | 2dc96c0a8366dbd51e4e5e7b2a54383483c8c774 | [
"MIT"
] | null | null | null | defmodule TrackerBot.Mixfile do
use Mix.Project
def project do
[
app: :tracker_bot,
version: "0.5.2",
elixir: "~> 1.5",
name: "Tracker Bot",
homepage_url: "https://github.com/ne1ro/tracker_bot",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
escript: [main_module: TrackerBot.CLI],
docs: [extras: ["README.md"], output: "./doc/app"],
deps: deps(),
aliases: aliases(),
dialyzer: dialyzer()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[
mod: {TrackerBot, []},
extra_applications: ~w(logger cowboy plug)a,
elixirc_paths: elixirc_paths(Mix.env())
]
end
# This makes sure your factory and any other modules in test/support are compiled
# when in the test environment.
defp elixirc_paths(:test), do: ~w(lib test/support)
defp elixirc_paths(_), do: ~w(lib)
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:cowboy, "~> 1.0.0"},
{:credo, "~> 0.8", only: ~w(dev test)a},
{:dialyxir, "~> 0.4", only: ~w(dev test)a, runtime: false},
{:distillery, "~> 1.4"},
{:edeliver, ">= 1.2.9", only: :dev},
{:edeliver, "~> 1.4"},
{:effects, "~> 0.1.0"},
{:eper, "~> 0.94.0", only: :dev},
{:espec, "~> 1.5.1", only: :test},
{:ex_doc, "~> 0.11", only: :dev},
{:ex_machina, "~> 0.6.1", only: ~w(dev test)a},
{:faker, "~> 0.5", only: :test},
{:guardsafe, "~> 0.5.0"},
{:hackney, "~> 1.8"},
{:httpoison, "~> 0.13.0"},
{:logger_file_backend, "~> 0.0.9"},
{:monadex, "~> 1.1.3"},
{:nadia, "~> 0.4"},
{:observer_cli, "~> 1.3.1", only: :dev},
{:plug, "~> 1.3"},
{:poison, "~> 3.1"},
{:timex, "~> 3.1"}
]
end
# Dialyzer's configuration
def dialyzer,
do: [
plt_add_deps: :apps_direct,
flags: ~w(-Wunmatched_returns -Werror_handling -Wrace_conditions -Wunderspecs
-Wunknown -Woverspecs -Wspecdiffs)
]
defp aliases do
[
build: "release",
server: "run",
quality: ["compile --warnings-as-errors --force", "credo --strict", "dialyzer"],
setup: [
"local.hex --force",
"local.rebar --force",
"deps.get",
"compile"
]
]
end
end
| 27.242105 | 86 | 0.534389 |
9e1fde6082a4eae163a6f128a9e9e291aaf73e82 | 469 | exs | Elixir | test/models/participation_test.exs | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | test/models/participation_test.exs | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | test/models/participation_test.exs | birkand/remote_retro | f7fef13aa8779cd4a8a9fd73d2fbbd5ae537634c | [
"MIT"
] | null | null | null | defmodule RemoteRetro.ParticipationTest do
use RemoteRetro.ModelCase, async: true
alias RemoteRetro.Participation
test "user_id and retro_id are required" do
changeset = Participation.changeset(%Participation{})
{ user_id_error, _ } = Keyword.fetch!(changeset.errors, :user_id)
{ retro_id_error, _ } = Keyword.fetch!(changeset.errors, :retro_id)
assert user_id_error == "can't be blank"
assert retro_id_error == "can't be blank"
end
end
| 29.3125 | 71 | 0.733475 |
9e1fef624cfb6c89c8df71ff829326e84bb53324 | 1,676 | exs | Elixir | test/chess/moves/pieces/queen_test.exs | danbee/chess | c766ecb63ed15cd4a5c7ce4b503641d0222fb69d | [
"MIT"
] | 30 | 2018-06-16T00:41:59.000Z | 2021-03-05T08:39:02.000Z | test/chess/moves/pieces/queen_test.exs | danbee/chess | c766ecb63ed15cd4a5c7ce4b503641d0222fb69d | [
"MIT"
] | 221 | 2018-01-15T20:33:13.000Z | 2022-02-16T00:53:35.000Z | test/chess/moves/pieces/queen_test.exs | danbee/chess | c766ecb63ed15cd4a5c7ce4b503641d0222fb69d | [
"MIT"
] | 5 | 2018-02-20T17:25:19.000Z | 2018-12-29T00:09:30.000Z | defmodule Chess.Moves.Pieces.QueenTest do
use Chess.DataCase
alias Chess.Moves
test "queens can move in any direction" do
board = %{"4,5" => %{"type" => "queen", "colour" => "white"}}
moves = Moves.available(board, {4, 5})
expected_moves = Enum.sort([
{4, 0}, {4, 1}, {4, 2}, {4, 3}, {4, 4}, {4, 6}, {4, 7},
{0, 5}, {1, 5}, {2, 5}, {3, 5}, {5, 5}, {6, 5}, {7, 5},
{0, 1}, {1, 2}, {2, 3}, {3, 4}, {5, 6}, {6, 7},
{2, 7}, {3, 6}, {5, 4}, {6, 3}, {7, 2},
])
assert Enum.sort(moves) == expected_moves
end
test "queens are blocked by another piece of the same colour" do
board = %{
"0,0" => %{"type" => "queen", "colour" => "white"},
"0,5" => %{"type" => "king", "colour" => "white"},
"5,0" => %{"type" => "bishop", "colour" => "white"},
}
moves = Moves.available(board, {0, 0})
expected_moves = Enum.sort([
{0, 1}, {0, 2}, {0, 3}, {0, 4},
{1, 0}, {2, 0}, {3, 0}, {4, 0},
{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {7, 7},
])
assert Enum.sort(moves) == expected_moves
end
test "queens can take an opponents piece" do
board = %{
"0,0" => %{"type" => "queen", "colour" => "white"},
"0,5" => %{"type" => "knight", "colour" => "black"},
"5,0" => %{"type" => "rook", "colour" => "black"},
}
moves = Moves.available(board, {0, 0})
expected_moves = Enum.sort([
{0, 1}, {0, 2}, {0, 3}, {0, 4}, {0, 5},
{1, 0}, {2, 0}, {3, 0}, {4, 0}, {5, 0},
{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {7, 7},
])
assert Enum.sort(moves) == expected_moves
end
def board do
Chess.Board.default
end
end
| 30.472727 | 66 | 0.449881 |
9e201e621a323d07dcb5556497d08c473f271032 | 5,942 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/creative_asset_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/creative_asset_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/creative_asset_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.CreativeAssetMetadata do
@moduledoc """
CreativeAssets contains properties of a creative asset file which will be uploaded or has already been uploaded. Refer to the creative sample code for how to upload assets and insert a creative.
## Attributes
* `assetIdentifier` (*type:* `GoogleApi.DFAReporting.V34.Model.CreativeAssetId.t`, *default:* `nil`) - ID of the creative asset. This is a required field.
* `clickTags` (*type:* `list(GoogleApi.DFAReporting.V34.Model.ClickTag.t)`, *default:* `nil`) - List of detected click tags for assets. This is a read-only, auto-generated field. This field is empty for a rich media asset.
* `counterCustomEvents` (*type:* `list(GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent.t)`, *default:* `nil`) - List of counter events configured for the asset. This is a read-only, auto-generated field and only applicable to a rich media asset.
* `detectedFeatures` (*type:* `list(String.t)`, *default:* `nil`) - List of feature dependencies for the creative asset that are detected by Campaign Manager. Feature dependencies are features that a browser must be able to support in order to render your HTML5 creative correctly. This is a read-only, auto-generated field.
* `exitCustomEvents` (*type:* `list(GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent.t)`, *default:* `nil`) - List of exit events configured for the asset. This is a read-only, auto-generated field and only applicable to a rich media asset.
* `id` (*type:* `String.t`, *default:* `nil`) - Numeric ID of the asset. This is a read-only, auto-generated field.
* `idDimensionValue` (*type:* `GoogleApi.DFAReporting.V34.Model.DimensionValue.t`, *default:* `nil`) - Dimension value for the numeric ID of the asset. This is a read-only, auto-generated field.
* `kind` (*type:* `String.t`, *default:* `dfareporting#creativeAssetMetadata`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#creativeAssetMetadata".
* `richMedia` (*type:* `boolean()`, *default:* `nil`) - True if the uploaded asset is a rich media asset. This is a read-only, auto-generated field.
* `timerCustomEvents` (*type:* `list(GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent.t)`, *default:* `nil`) - List of timer events configured for the asset. This is a read-only, auto-generated field and only applicable to a rich media asset.
* `warnedValidationRules` (*type:* `list(String.t)`, *default:* `nil`) - Rules validated during code generation that generated a warning. This is a read-only, auto-generated field.
Possible values are:
- "ADMOB_REFERENCED"
- "ASSET_FORMAT_UNSUPPORTED_DCM"
- "ASSET_INVALID"
- "CLICK_TAG_HARD_CODED"
- "CLICK_TAG_INVALID"
- "CLICK_TAG_IN_GWD"
- "CLICK_TAG_MISSING"
- "CLICK_TAG_MORE_THAN_ONE"
- "CLICK_TAG_NON_TOP_LEVEL"
- "COMPONENT_UNSUPPORTED_DCM"
- "ENABLER_UNSUPPORTED_METHOD_DCM"
- "EXTERNAL_FILE_REFERENCED"
- "FILE_DETAIL_EMPTY"
- "FILE_TYPE_INVALID"
- "GWD_PROPERTIES_INVALID"
- "HTML5_FEATURE_UNSUPPORTED"
- "LINKED_FILE_NOT_FOUND"
- "MAX_FLASH_VERSION_11"
- "MRAID_REFERENCED"
- "NOT_SSL_COMPLIANT"
- "ORPHANED_ASSET"
- "PRIMARY_HTML_MISSING"
- "SVG_INVALID"
- "ZIP_INVALID"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:assetIdentifier => GoogleApi.DFAReporting.V34.Model.CreativeAssetId.t(),
:clickTags => list(GoogleApi.DFAReporting.V34.Model.ClickTag.t()),
:counterCustomEvents => list(GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent.t()),
:detectedFeatures => list(String.t()),
:exitCustomEvents => list(GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent.t()),
:id => String.t(),
:idDimensionValue => GoogleApi.DFAReporting.V34.Model.DimensionValue.t(),
:kind => String.t(),
:richMedia => boolean(),
:timerCustomEvents => list(GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent.t()),
:warnedValidationRules => list(String.t())
}
field(:assetIdentifier, as: GoogleApi.DFAReporting.V34.Model.CreativeAssetId)
field(:clickTags, as: GoogleApi.DFAReporting.V34.Model.ClickTag, type: :list)
field(:counterCustomEvents,
as: GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent,
type: :list
)
field(:detectedFeatures, type: :list)
field(:exitCustomEvents, as: GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent, type: :list)
field(:id)
field(:idDimensionValue, as: GoogleApi.DFAReporting.V34.Model.DimensionValue)
field(:kind)
field(:richMedia)
field(:timerCustomEvents, as: GoogleApi.DFAReporting.V34.Model.CreativeCustomEvent, type: :list)
field(:warnedValidationRules, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.CreativeAssetMetadata do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.CreativeAssetMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.CreativeAssetMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.018519 | 328 | 0.723662 |
9e20218c45f5c1ee39564c659da4410bd5304431 | 2,002 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_undeploy_processor_version_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_undeploy_processor_version_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta3_undeploy_processor_version_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3UndeployProcessorVersionMetadata do
@moduledoc """
The long running operation metadata for the undeploy processor version method.
## Attributes
* `commonMetadata` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3CommonOperationMetadata.t`, *default:* `nil`) - The basic metadata of the long running operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:commonMetadata =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3CommonOperationMetadata.t()
| nil
}
field(:commonMetadata,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3CommonOperationMetadata
)
end
defimpl Poison.Decoder,
for:
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3UndeployProcessorVersionMetadata do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3UndeployProcessorVersionMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta3UndeployProcessorVersionMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.517241 | 193 | 0.771229 |
9e2046b59d498a34c8e7442c5666ae302e7fce21 | 2,441 | ex | Elixir | lib/logz/nginx.ex | rciorba/logz | a31250884b703a5e69e40691b075ad56ab0c4fc1 | [
"Unlicense"
] | null | null | null | lib/logz/nginx.ex | rciorba/logz | a31250884b703a5e69e40691b075ad56ab0c4fc1 | [
"Unlicense"
] | null | null | null | lib/logz/nginx.ex | rciorba/logz | a31250884b703a5e69e40691b075ad56ab0c4fc1 | [
"Unlicense"
] | null | null | null | defmodule Logz.Nginx do
@moduledoc """
Implements a Stream that reads from files and emits maps.
"""
require Logger
defp parse_month(str) do
case str do
"Jan" -> 1
"Feb" -> 2
"Mar" -> 3
"Apr" -> 4
"May" -> 5
"Jun" -> 6
"Jul" -> 7
"Aug" -> 8
"Sep" -> 9
"Oct" -> 10
"Nov" -> 11
"Dec" -> 12
end
end
defp offset(sign, hours, minutes) do
off = String.to_integer(hours) * 3600 + String.to_integer(minutes) * 60
case sign do
"+" -> off
"-" -> -off
end
end
def parse_date!(str) do
case Regex.scan(~r{(\d+)/(\w+)/(\d+):(\d+):(\d+):(\d+) (\+|-)(\d\d)(\d\d)}, str) do
[[_, day, month, year, hour, minute, second, off_sign, off_hour, off_min]] ->
{:ok, date} =
NaiveDateTime.new(
String.to_integer(year),
parse_month(month),
String.to_integer(day),
String.to_integer(hour),
String.to_integer(minute),
String.to_integer(second)
)
tstamp =
NaiveDateTime.add(date, offset(off_sign, off_hour, off_min), :second)
|> NaiveDateTime.diff(~N[1970-01-01 00:00:00], :second)
tstamp
matched ->
throw({:error, matched})
end
end
def parse_request(request) do
case Regex.scan(~r{([a-zA-Z]+) ([^\s]+) [^\"]+}, request) do
[[_, method, uri]] ->
{method, uri}
_ ->
{nil, nil}
end
end
def parse(line) do
# 162.243.6.123 - - [07/Jun/2020:06:40:03 +0000] "GET /blog HTTP/1.1" 301 185 "-" "UA"
addr = ~S{([^\s]*)}
tstamp = ~S{\[(.*)\]}
request = ~S{"(.*)"}
status = ~S{([\d]+)}
size = ~S{([\d]+)}
user_agent = ~s{"(.*)"}
case Regex.scan(
~r/#{addr} - - #{tstamp} #{request} #{status} #{size} ".*" #{user_agent}/,
line
) do
[[_, addr, tstamp, request, status, size, user_agent]] ->
{method, uri} = parse_request(request)
{:ok,
%{
addr: addr,
tstamp: parse_date!(tstamp),
request_method: method,
uri: uri,
request: request,
status: status,
size: size,
user_agent: user_agent
}}
matched ->
# IO.inspect(line)
{:error, {line, matched}}
end
end
def parse!(line) do
{:ok, data} = parse(line)
data
end
end
| 22.813084 | 90 | 0.477263 |
9e205595030d96bd25aa8c766f5c28c7e301054e | 307 | exs | Elixir | priv/repo/migrations/20180509130623_create_users.exs | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 37 | 2018-07-13T14:08:16.000Z | 2021-04-09T15:00:22.000Z | priv/repo/migrations/20180509130623_create_users.exs | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 9 | 2018-07-16T15:24:39.000Z | 2021-09-01T14:21:20.000Z | priv/repo/migrations/20180509130623_create_users.exs | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 3 | 2018-10-05T20:19:25.000Z | 2019-12-05T00:30:01.000Z | defmodule Planga.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :name, :string
add :remote_id, :string
add :app_id, references(:apps)
timestamps()
end
create index(:users, [:app_id, :remote_id], unique: true)
end
end
| 19.1875 | 61 | 0.654723 |
9e206e1095db2cc96e03aaee4101f94b07aeb2e3 | 203 | exs | Elixir | priv/repo/migrations/20210307143553_add_unique_index_to_profile_id_in_account_opening_requests.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | priv/repo/migrations/20210307143553_add_unique_index_to_profile_id_in_account_opening_requests.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | priv/repo/migrations/20210307143553_add_unique_index_to_profile_id_in_account_opening_requests.exs | andsleonardo/magnemite | 2a06c1520defeb193d718313ad3fc6a50349bc8d | [
"MIT"
] | null | null | null | defmodule Magnemite.Repo.Migrations.AddUniqueIndexToProfileIdInAccountOpeningRequests do
use Ecto.Migration
def change do
create unique_index(:account_opening_requests, [:profile_id])
end
end
| 25.375 | 88 | 0.827586 |
9e20827fe73802ccf9b040a585f4f1d248961567 | 882 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1/metadata.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1/metadata.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | clients/memcache/lib/google_api/memcache/v1/metadata.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1 do
@moduledoc """
API client metadata for GoogleApi.Memcache.V1.
"""
@discovery_revision "20210615"
def discovery_revision(), do: @discovery_revision
end
| 32.666667 | 74 | 0.758503 |
9e20dea07a55b073a5907c33eab48e69cf97f945 | 656 | ex | Elixir | recallr/lib/recallr_web/live/modal_component.ex | myfoundea/liveview_march_2021 | 8aef6225271bbc8e9985746505d74dc850f295fb | [
"MIT"
] | null | null | null | recallr/lib/recallr_web/live/modal_component.ex | myfoundea/liveview_march_2021 | 8aef6225271bbc8e9985746505d74dc850f295fb | [
"MIT"
] | 1 | 2021-03-24T14:49:29.000Z | 2021-03-24T14:49:29.000Z | recallr/lib/recallr_web/live/modal_component.ex | myfoundea/liveview_march_2021 | 8aef6225271bbc8e9985746505d74dc850f295fb | [
"MIT"
] | 5 | 2021-03-21T19:45:38.000Z | 2021-03-24T14:53:07.000Z | defmodule RecallrWeb.ModalComponent do
use RecallrWeb, :live_component
@impl true
def render(assigns) do
~L"""
<div id="<%= @id %>" class="phx-modal"
phx-capture-click="close"
phx-window-keydown="close"
phx-key="escape"
phx-target="#<%= @id %>"
phx-page-loading>
<div class="phx-modal-content">
<%= live_patch raw("×"), to: @return_to, class: "phx-modal-close" %>
<%= live_component @socket, @component, @opts %>
</div>
</div>
"""
end
@impl true
def handle_event("close", _, socket) do
{:noreply, push_patch(socket, to: socket.assigns.return_to)}
end
end
| 24.296296 | 82 | 0.592988 |
9e2113449144a45dbc30b2388ba6530e0e169cc4 | 2,648 | ex | Elixir | lib/exq/enqueuer/server.ex | blueshift-labs/exq | 57973ec3e83f9a505fc83385b645bf2e658b8cdd | [
"Apache-2.0"
] | null | null | null | lib/exq/enqueuer/server.ex | blueshift-labs/exq | 57973ec3e83f9a505fc83385b645bf2e658b8cdd | [
"Apache-2.0"
] | null | null | null | lib/exq/enqueuer/server.ex | blueshift-labs/exq | 57973ec3e83f9a505fc83385b645bf2e658b8cdd | [
"Apache-2.0"
] | null | null | null | defmodule Exq.Enqueuer.Server do
@moduledoc """
The Enqueuer is responsible for enqueueing jobs into Redis. It can
either be called directly by the client, or instantiated as a standalone process.
It supports enqueuing immediate jobs, or scheduling jobs in the future.
## Initialization:
* `:name` - Name of target registered process
* `:namespace` - Redis namespace to store all data under. Defaults to "exq".
* `:queues` - Array of currently active queues (TODO: Remove, I suspect it's not needed).
* `:scheduler_poll_timeout` - How often to poll Redis for scheduled / retry jobs.
"""
require Logger
alias Exq.Support.Config
alias Exq.Redis.JobQueue
use GenServer
defmodule State do
defstruct namespace: nil
end
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: server_name(opts[:name]))
end
## ===========================================================
## gen server callbacks
## ===========================================================
def init(opts) do
{:ok, %State{namespace: opts[:namespace]}}
end
def handle_cast({:enqueue, from, queue, worker, args, options}, state) do
response = JobQueue.enqueue(state.namespace, queue, worker, args, options)
GenServer.reply(from, response)
{:noreply, state}
end
def handle_cast({:enqueue_at, from, queue, time, worker, args, options}, state) do
response =
JobQueue.enqueue_at(state.namespace, queue, time, worker, args, options)
GenServer.reply(from, response)
{:noreply, state}
end
def handle_cast({:enqueue_in, from, queue, offset, worker, args, options}, state) do
response =
JobQueue.enqueue_in(state.namespace, queue, offset, worker, args, options)
GenServer.reply(from, response)
{:noreply, state}
end
def handle_call({:enqueue, queue, worker, args, options}, _from, state) do
response = JobQueue.enqueue(state.namespace, queue, worker, args, options)
{:reply, response, state}
end
def handle_call({:enqueue_at, queue, time, worker, args, options}, _from, state) do
response =
JobQueue.enqueue_at(state.namespace, queue, time, worker, args, options)
{:reply, response, state}
end
def handle_call({:enqueue_in, queue, offset, worker, args, options}, _from, state) do
response =
JobQueue.enqueue_in(state.namespace, queue, offset, worker, args, options)
{:reply, response, state}
end
def terminate(_reason, _state) do
:ok
end
# Internal Functions
def server_name(name) do
name = name || Config.get(:name)
"#{name}.Enqueuer" |> String.to_atom()
end
end
| 29.752809 | 93 | 0.659366 |
9e213019d4270655c93380ef55db074790c52b4b | 2,078 | ex | Elixir | lib/codes/codes_k86.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_k86.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_k86.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_K86 do
alias IcdCode.ICDCode
def _K860 do
%ICDCode{full_code: "K860",
category_code: "K86",
short_code: "0",
full_name: "Alcohol-induced chronic pancreatitis",
short_name: "Alcohol-induced chronic pancreatitis",
category_name: "Alcohol-induced chronic pancreatitis"
}
end
def _K861 do
%ICDCode{full_code: "K861",
category_code: "K86",
short_code: "1",
full_name: "Other chronic pancreatitis",
short_name: "Other chronic pancreatitis",
category_name: "Other chronic pancreatitis"
}
end
def _K862 do
%ICDCode{full_code: "K862",
category_code: "K86",
short_code: "2",
full_name: "Cyst of pancreas",
short_name: "Cyst of pancreas",
category_name: "Cyst of pancreas"
}
end
def _K863 do
%ICDCode{full_code: "K863",
category_code: "K86",
short_code: "3",
full_name: "Pseudocyst of pancreas",
short_name: "Pseudocyst of pancreas",
category_name: "Pseudocyst of pancreas"
}
end
def _K8681 do
%ICDCode{full_code: "K8681",
category_code: "K86",
short_code: "81",
full_name: "Exocrine pancreatic insufficiency",
short_name: "Exocrine pancreatic insufficiency",
category_name: "Exocrine pancreatic insufficiency"
}
end
def _K8689 do
%ICDCode{full_code: "K8689",
category_code: "K86",
short_code: "89",
full_name: "Other specified diseases of pancreas",
short_name: "Other specified diseases of pancreas",
category_name: "Other specified diseases of pancreas"
}
end
def _K869 do
%ICDCode{full_code: "K869",
category_code: "K86",
short_code: "9",
full_name: "Disease of pancreas, unspecified",
short_name: "Disease of pancreas, unspecified",
category_name: "Disease of pancreas, unspecified"
}
end
end
| 29.685714 | 63 | 0.602502 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.