diff --git a/src/invidious.cr b/src/invidious.cr index 40acd9df..b8b12775 100644 --- a/src/invidious.cr +++ b/src/invidious.cr @@ -107,7 +107,8 @@ LOCALES = { YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0) config = CONFIG -logger = Invidious::LogHandler.new +output = STDOUT +loglvl = LogLevel::Debug Kemal.config.extra_options do |parser| parser.banner = "Usage: invidious [arguments]" @@ -127,11 +128,14 @@ Kemal.config.extra_options do |parser| exit end end - parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output| - FileUtils.mkdir_p(File.dirname(output)) - logger = Invidious::LogHandler.new(File.open(output, mode: "a")) + parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output_arg| + FileUtils.mkdir_p(File.dirname(output_arg)) + output = File.open(output_arg, mode: "a") end - parser.on("-v", "--version", "Print version") do |output| + parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{loglvl})") do |loglvl_arg| + loglvl = LogLevel.parse(loglvl_arg) + end + parser.on("-v", "--version", "Print version") do puts SOFTWARE.to_pretty_json exit end @@ -139,6 +143,8 @@ end Kemal::CLI.new ARGV +logger = Invidious::LogHandler.new(output, loglvl) + # Check table integrity if CONFIG.check_tables check_enum(PG_DB, logger, "privacy", PlaylistPrivacy) @@ -1495,7 +1501,7 @@ post "/feed/webhook/:token" do |env| signature = env.request.headers["X-Hub-Signature"].lchop("sha1=") if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body) - logger.puts("#{token} : Invalid signature") + logger.error("/feed/webhook/#{token} : Invalid signature") env.response.status_code = 200 next end diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr index dc68fb5c..7a871dba 100644 --- a/src/invidious/helpers/helpers.cr +++ b/src/invidious/helpers/helpers.cr @@ -335,7 +335,7 @@ def check_enum(db, logger, enum_name, struct_type = nil) return # TODO if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool) - logger.puts("CREATE TYPE #{enum_name}") + logger.info("check_enum: CREATE TYPE #{enum_name}") db.using_connection do |conn| conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql")) @@ -348,7 +348,7 @@ def check_table(db, logger, table_name, struct_type = nil) begin db.exec("SELECT * FROM #{table_name} LIMIT 0") rescue ex - logger.puts("CREATE TABLE #{table_name}") + logger.info("check_table: check_table: CREATE TABLE #{table_name}") db.using_connection do |conn| conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql")) @@ -368,7 +368,7 @@ def check_table(db, logger, table_name, struct_type = nil) if name != column_array[i]? if !column_array[i]? new_column = column_types.select { |line| line.starts_with? name }[0] - logger.puts("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") + logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") next end @@ -386,29 +386,29 @@ def check_table(db, logger, table_name, struct_type = nil) # There's a column we didn't expect if !new_column - logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}") + logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") column_array = get_column_array(db, table_name) next end - logger.puts("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") + logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") - logger.puts("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") + logger.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") - logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") + logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") - logger.puts("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") + logger.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") column_array = get_column_array(db, table_name) end else - logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") + logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") end end @@ -418,7 +418,7 @@ def check_table(db, logger, table_name, struct_type = nil) column_array.each do |column| if !struct_array.includes? column - logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") + logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") end end diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr index 52f0a22c..4e4d7306 100644 --- a/src/invidious/helpers/logger.cr +++ b/src/invidious/helpers/logger.cr @@ -1,66 +1,52 @@ require "logger" enum LogLevel + All + Trace Debug Info Warn Error + Fatal + Off end class Invidious::LogHandler < Kemal::BaseLogHandler - def initialize(@io : IO = STDOUT, @level = LogLevel::Warn) + def initialize(@io : IO = STDOUT, @level = LogLevel::Debug) end def call(context : HTTP::Server::Context) - time = Time.utc - call_next(context) - elapsed_text = elapsed_text(Time.utc - time) + elapsed_time = Time.measure { call_next(context) } + elapsed_text = elapsed_text(elapsed_time) - @io << time << ' ' << context.response.status_code << ' ' << context.request.method << ' ' << context.request.resource << ' ' << elapsed_text << '\n' - - if @io.is_a? File - @io.flush - end + info("#{context.response.status_code} #{context.request.method} #{context.request.resource} #{elapsed_text}") context end def puts(message : String) @io << message << '\n' - - if @io.is_a? File - @io.flush - end + @io.flush end - def write(message : String, level = @level) + def write(message : String) @io << message - - if @io.is_a? File - @io.flush - end + @io.flush end def set_log_level(level : String) - case level.downcase - when "debug" - set_log_level(LogLevel::Debug) - when "info" - set_log_level(LogLevel::Info) - when "warn" - set_log_level(LogLevel::Warn) - when "error" - set_log_level(LogLevel::Error) - end + @level = LogLevel.parse(level) end def set_log_level(level : LogLevel) @level = level end - {% for level in %w(debug info warn error) %} + {% for level in %w(trace debug info warn error fatal) %} def {{level.id}}(message : String) - puts(message, LogLevel::{{level.id.capitalize}}) + if LogLevel::{{level.id.capitalize}} >= @level + puts("#{Time.utc} [{{level.id}}] #{message}") + end end {% end %} diff --git a/src/invidious/jobs/bypass_captcha_job.cr b/src/invidious/jobs/bypass_captcha_job.cr index 6778f7c3..daba64d5 100644 --- a/src/invidious/jobs/bypass_captcha_job.cr +++ b/src/invidious/jobs/bypass_captcha_job.cr @@ -125,7 +125,7 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob end end rescue ex - logger.puts("Exception: #{ex.message}") + logger.error("BypassCaptchaJob: #{ex.message}") ensure sleep 1.minute Fiber.yield diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr index 75fc474d..bbf55ff3 100644 --- a/src/invidious/jobs/refresh_channels_job.cr +++ b/src/invidious/jobs/refresh_channels_job.cr @@ -14,6 +14,7 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob backoff = 1.seconds loop do + logger.debug("RefreshChannelsJob: Refreshing all channels") db.query("SELECT id FROM channels ORDER BY updated") do |rs| rs.each do id = rs.read(String) @@ -27,17 +28,18 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob active_threads += 1 spawn do begin + logger.trace("RefreshChannelsJob: Fetching channel #{id}") channel = fetch_channel(id, db, config.full_refresh) lim_threads = max_threads db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id) rescue ex - logger.puts("#{id} : #{ex.message}") + logger.error("RefreshChannelsJob: #{id} : #{ex.message}") if ex.message == "Deleted or invalid channel" db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id) else lim_threads = 1 - logger.puts("#{id} : backing off for #{backoff}s") + logger.error("RefreshChannelsJob: #{id} : backing off for #{backoff}s") sleep backoff if backoff < 1.days backoff += backoff diff --git a/src/invidious/jobs/refresh_feeds_job.cr b/src/invidious/jobs/refresh_feeds_job.cr index eebdf0f3..5dd47639 100644 --- a/src/invidious/jobs/refresh_feeds_job.cr +++ b/src/invidious/jobs/refresh_feeds_job.cr @@ -30,14 +30,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob column_array = get_column_array(db, view_name) ChannelVideo.type_array.each_with_index do |name, i| if name != column_array[i]? - logger.puts("DROP MATERIALIZED VIEW #{view_name}") + logger.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}") db.exec("DROP MATERIALIZED VIEW #{view_name}") raise "view does not exist" end end if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))" - logger.puts("Materialized view #{view_name} is out-of-date, recreating...") + logger.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...") db.exec("DROP MATERIALIZED VIEW #{view_name}") end @@ -49,18 +49,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob legacy_view_name = "subscriptions_#{sha256(email)[0..7]}" db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0") - logger.puts("RENAME MATERIALIZED VIEW #{legacy_view_name}") + logger.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}") db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}") rescue ex begin # While iterating through, we may have an email stored from a deleted account if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool) - logger.puts("CREATE #{view_name}") + logger.info("RefreshFeedsJob: CREATE #{view_name}") db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}") db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email) end rescue ex - logger.puts("REFRESH #{email} : #{ex.message}") + logger.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}") end end end diff --git a/src/invidious/jobs/subscribe_to_feeds_job.cr b/src/invidious/jobs/subscribe_to_feeds_job.cr index 3d3b2218..3bb31299 100644 --- a/src/invidious/jobs/subscribe_to_feeds_job.cr +++ b/src/invidious/jobs/subscribe_to_feeds_job.cr @@ -34,10 +34,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob response = subscribe_pubsub(ucid, hmac_key, config) if response.status_code >= 400 - logger.puts("#{ucid} : #{response.body}") + logger.error("SubscribeToFeedsJob: #{ucid} : #{response.body}") end rescue ex - logger.puts("#{ucid} : #{ex.message}") + logger.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}") end active_channel.send(true) diff --git a/src/invidious/routes/watch.cr b/src/invidious/routes/watch.cr index 9c3ef755..7225c17f 100644 --- a/src/invidious/routes/watch.cr +++ b/src/invidious/routes/watch.cr @@ -62,7 +62,7 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute rescue ex : VideoRedirect return env.redirect env.request.resource.gsub(id, ex.video_id) rescue ex - logger.puts("#{id} : #{ex.message}") + logger.error("get_video: #{id} : #{ex.message}") return error_template(500, ex) end