Skip to content

feat: upgrade logger #1265

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 16 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions config/prod.exs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,17 @@ import Config
# Do not print debug messages in production
config :logger, level: :info

# Add the CloudWatch logger backend in production
config :logger, backends: [:console, {Cadet.Logger.CloudWatchLogger, :cloudwatch_logger}]

# Configure CloudWatch Logger
config :logger, :cloudwatch_logger,
level: :info,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id],
log_group: "cadet-logs",
log_stream: "#{node()}-#{:os.system_time(:second)}"

# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
Expand Down
301 changes: 301 additions & 0 deletions lib/cadet/logger/cloudwatch_logger.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,301 @@
defmodule Cadet.Logger.CloudWatchLogger do
@moduledoc """
A custom Logger backend that sends logs to AWS CloudWatch.
This backend can be configured to log at different levels and formats,
and can include specific metadata in the logs.
"""

@behaviour :gen_event
require Logger

defstruct [
:level,
:format,
:metadata,
:log_group,
:log_stream,
:buffer,
:timer_ref
]

@max_buffer_size 1000
@max_retries 3
@retry_delay 200
@flush_interval 5000
@failed_message "Failed to send log to CloudWatch."

@impl true
def init({__MODULE__, opts}) when is_list(opts) do
config = configure_merge(read_env(), opts)
{:ok, init(config, %__MODULE__{})}
end

@impl true
def init({__MODULE__, name}) when is_atom(name) do
config = read_env()
{:ok, init(config, %__MODULE__{})}
end

@impl true
def handle_call({:configure, options}, state) do
{:ok, :ok, configure(options, state)}
end

@impl true
def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do
%{
format: format,
metadata: metadata,
buffer: buffer,
log_stream: log_stream,
log_group: log_group
} = state

if meet_level?(level, state.level) and not meet_cloudwatch_error?(msg) do
formatted_msg = Logger.Formatter.format(format, level, msg, ts, take_metadata(md, metadata))
timestamp = timestamp_from_logger_ts(ts)

log_event = %{
"timestamp" => timestamp,
"message" => IO.chardata_to_string(formatted_msg)
}

new_buffer = [log_event | buffer]

new_buffer =
if length(new_buffer) >= @max_buffer_size do
flush_buffer_async(log_stream, log_group, new_buffer)
[]
else
new_buffer
end

{:ok, %{state | buffer: new_buffer}}
else
{:ok, state}
end
end

@impl true
def handle_info(:flush_buffer, state) do
%{buffer: buffer, timer_ref: timer_ref, log_stream: log_stream, log_group: log_group} = state

if timer_ref, do: Process.cancel_timer(timer_ref)

new_state =
if length(buffer) > 0 do
flush_buffer_sync(log_stream, log_group, buffer)
%{state | buffer: []}
else
state
end

new_timer_ref = schedule_flush(@flush_interval)
{:ok, %{new_state | timer_ref: new_timer_ref}}
end

@impl true
def terminate(_reason, state) do
%{log_stream: log_stream, log_group: log_group, buffer: buffer, timer_ref: timer_ref} = state

if timer_ref, do: Process.cancel_timer(timer_ref)
flush_buffer_sync(log_stream, log_group, buffer)
:ok
end

def handle_event(_, state), do: {:ok, state}
def handle_call(_, state), do: {:ok, :ok, state}
def handle_info(_, state), do: {:ok, state}

# Helpers
defp configure(options, state) do
config = configure_merge(read_env(), options)
Application.put_env(:logger, __MODULE__, config)
init(config, state)
end

defp normalize_level(lvl) when lvl in [:warn, :warning], do: :warning
defp normalize_level(lvl), do: lvl

defp meet_level?(_lvl, nil), do: true

defp meet_level?(lvl, min) do
lvl = normalize_level(lvl)
min = normalize_level(min)
Logger.compare_levels(lvl, min) != :lt
end

defp meet_cloudwatch_error?(msg) when is_binary(msg) do
String.contains?(msg, @failed_message)
end

defp meet_cloudwatch_error?(_) do
false
end

defp flush_buffer_async(log_stream, log_group, buffer) do
if length(buffer) > 0 do
Task.start(fn -> send_to_cloudwatch(log_stream, log_group, buffer) end)
end
end

defp flush_buffer_sync(log_stream, log_group, buffer) do
if length(buffer) > 0 do
send_to_cloudwatch(log_stream, log_group, buffer)
end
end

defp schedule_flush(interval) do
Process.send_after(self(), :flush_buffer, interval)
end

defp send_to_cloudwatch(log_stream, log_group, buffer) do
# Ensure that the already have ExAws authentication configured
with :ok <- check_exaws_config() do
operation = build_log_operation(log_stream, log_group, buffer)

operation
|> send_with_retry()
end
end

defp build_log_operation(log_stream, log_group, buffer) do
# The headers and body structure can be found in the AWS API documentation:
# https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html
%ExAws.Operation.JSON{
http_method: :post,
service: :logs,
headers: [
{"x-amz-target", "Logs_20140328.PutLogEvents"},
{"content-type", "application/x-amz-json-1.1"}
],
data: %{
"logGroupName" => log_group,
"logStreamName" => log_stream,
"logEvents" => Enum.reverse(buffer)
}
}
end

defp check_exaws_config do
id = Application.get_env(:ex_aws, :access_key_id) || System.get_env("AWS_ACCESS_KEY_ID")

secret =
Application.get_env(:ex_aws, :secret_access_key) || System.get_env("AWS_SECRET_ACCESS_KEY")

region = Application.get_env(:ex_aws, :region) || System.get_env("AWS_REGION")

cond do
is_nil(id) or id == "" or is_nil(secret) or secret == "" ->
Logger.error(
"#{@failed_message} AWS credentials missing. Ensure AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are set or configured in ex_aws."
)

:error

region in [nil, ""] ->
Logger.error(
"#{@failed_message} AWS region not configured. Ensure AWS_REGION is set or configured in ex_aws."
)

:error

true ->
:ok
end
end

defp send_with_retry(operation, retries \\ @max_retries)

defp send_with_retry(operation, retries) when retries > 0 do
case request(operation) do
{:ok, _response} ->
:ok

{:error, reason} ->
Logger.error("#{@failed_message} #{inspect(reason)}. Retrying...")
# Wait before retrying
:timer.sleep(@retry_delay)
send_with_retry(operation, retries - 1)
end
end

defp send_with_retry(_, 0) do
Logger.error("#{@failed_message} After multiple retries.")
end

defp init(config, state) do
level = Keyword.get(config, :level)
format = Logger.Formatter.compile(Keyword.get(config, :format))
raw_metadata = Keyword.get(config, :metadata, [])
metadata = configure_metadata(raw_metadata)
log_group = Keyword.get(config, :log_group, "cadet-logs")
log_stream = Keyword.get(config, :log_stream, "#{node()}-#{:os.system_time(:second)}")
timer_ref = schedule_flush(@flush_interval)

%{
state
| level: level,
format: format,
metadata: metadata,
log_group: log_group,
log_stream: log_stream,
buffer: [],
timer_ref: timer_ref
}
end

defp configure_metadata(:all), do: :all
defp configure_metadata(metadata), do: Enum.reverse(metadata)

defp take_metadata(metadata, :all) do
metadata
end

defp take_metadata(metadata, keys) do
Enum.reduce(keys, [], fn key, acc ->
case Keyword.fetch(metadata, key) do
{:ok, val} -> [{key, val} | acc]
:error -> acc
end
end)
end

defp timestamp_from_logger_ts({{year, month, day}, {hour, minute, second, microsecond}}) do
datetime = %DateTime{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, 6},
time_zone: "Etc/UTC",
zone_abbr: "UTC",
utc_offset: 0,
std_offset: 0
}

DateTime.to_unix(datetime, :millisecond)
end

defp read_env do
Application.get_env(:logger, __MODULE__, Application.get_env(:logger, :cloudwatch_logger, []))
end

"""
Merges the given options with the existing environment configuration.
If a key exists in both, the value from `options` will take precedence.
"""

defp configure_merge(env, options) do
Keyword.merge(env, options, fn
_, _v1, v2 -> v2
end)
end

defp request(operation) do
client = Application.get_env(:ex_aws, :ex_aws_mock, ExAws)
client.request(operation)
end
end
4 changes: 4 additions & 0 deletions mix.exs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,10 @@ defmodule Cadet.Mixfile do
{:exvcr, "~> 0.10", only: :test},
{:mock, "~> 0.3.0", only: :test},

# Dependencies for logger unit testing
{:mox, "~> 1.2", only: :test},
{:logger_backends, "~> 1.0.0", only: :test},

# The following are indirect dependencies, but we need to override the
# versions due to conflicts
{:jsx, "~> 3.1", override: true},
Expand Down
2 changes: 2 additions & 0 deletions mix.lock
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,15 @@
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
"jose": {:hex, :jose, "1.11.6", "613fda82552128aa6fb804682e3a616f4bc15565a048dabd05b1ebd5827ed965", [:mix, :rebar3], [], "hexpm", "6275cb75504f9c1e60eeacb771adfeee4905a9e182103aa59b53fed651ff9738"},
"jsx": {:hex, :jsx, "3.1.0", "d12516baa0bb23a59bb35dccaf02a1bd08243fcbb9efe24f2d9d056ccff71268", [:rebar3], [], "hexpm", "0c5cc8fdc11b53cc25cf65ac6705ad39e54ecc56d1c22e4adb8f5a53fb9427f3"},
"logger_backends": {:hex, :logger_backends, "1.0.0", "09c4fad6202e08cb0fbd37f328282f16539aca380f512523ce9472b28edc6bdf", [:mix], [], "hexpm", "1faceb3e7ec3ef66a8f5746c5afd020e63996df6fd4eb8cdb789e5665ae6c9ce"},
"mail": {:hex, :mail, "0.2.3", "2c6bb5f8a5f74845fa50ecd0fb45ea16b164026f285f45104f1c4c078cd616d4", [:mix], [], "hexpm", "932b398fa9c69fdf290d7ff63175826e0f1e24414d5b0763bb00a2acfc6c6bf5"},
"meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"},
"memento": {:hex, :memento, "0.3.2", "38cfc8ff9bcb1adff7cbd0f3b78a762636b86dff764729d1c82d0464c539bdd0", [:mix], [], "hexpm", "25cf691a98a0cb70262f4a7543c04bab24648cb2041d937eb64154a8d6f8012b"},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
"mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"},
"mimerl": {:hex, :mimerl, "1.4.0", "3882a5ca67fbbe7117ba8947f27643557adec38fa2307490c4c4207624cb213b", [:rebar3], [], "hexpm", "13af15f9f68c65884ecca3a3891d50a7b57d82152792f3e19d88650aa126b144"},
"mock": {:hex, :mock, "0.3.9", "10e44ad1f5962480c5c9b9fa779c6c63de9bd31997c8e04a853ec990a9d841af", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "9e1b244c4ca2551bb17bb8415eed89e40ee1308e0fbaed0a4fdfe3ec8a4adbd3"},
"mox": {:hex, :mox, "1.2.0", "a2cd96b4b80a3883e3100a221e8adc1b98e4c3a332a8fc434c39526babafd5b3", [:mix], [{:nimble_ownership, "~> 1.0", [hex: :nimble_ownership, repo: "hexpm", optional: false]}], "hexpm", "c7b92b3cc69ee24a7eeeaf944cd7be22013c52fcb580c1f33f50845ec821089a"},
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
"nimble_ownership": {:hex, :nimble_ownership, "1.0.1", "f69fae0cdd451b1614364013544e66e4f5d25f36a2056a9698b793305c5aa3a6", [:mix], [], "hexpm", "3825e461025464f519f3f3e4a1f9b68c47dc151369611629ad08b636b73bb22d"},
"oban": {:hex, :oban, "2.18.0", "092d20bfd3d70c7ecb70960f8548d300b54bb9937c7f2e56b388f3a9ed02ec68", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aace1eff6f8227ae38d4274af967d96f051c2f0a5152f2ef9809dd1f97866745"},
Expand Down
Loading
Loading