2019-04-04 01:35:58 +09:00
|
|
|
require "./macros"
|
|
|
|
|
2019-04-11 06:23:37 +09:00
|
|
|
struct Nonce
|
2020-07-26 23:58:50 +09:00
|
|
|
include DB::Serializable
|
|
|
|
|
|
|
|
property nonce : String
|
|
|
|
property expire : Time
|
2019-04-11 06:23:37 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
struct SessionId
|
2020-07-26 23:58:50 +09:00
|
|
|
include DB::Serializable
|
|
|
|
|
|
|
|
property id : String
|
|
|
|
property email : String
|
|
|
|
property issued : String
|
2019-04-11 06:23:37 +09:00
|
|
|
end
|
|
|
|
|
2019-04-16 01:13:09 +09:00
|
|
|
struct Annotation
|
2020-07-26 23:58:50 +09:00
|
|
|
include DB::Serializable
|
|
|
|
|
|
|
|
property id : String
|
|
|
|
property annotations : String
|
2019-04-16 01:13:09 +09:00
|
|
|
end
|
|
|
|
|
2019-06-09 05:08:27 +09:00
|
|
|
def html_to_content(description_html : String)
|
|
|
|
description = description_html.gsub(/(<br>)|(<br\/>)/, {
|
|
|
|
"<br>": "\n",
|
|
|
|
"<br/>": "\n",
|
|
|
|
})
|
|
|
|
|
|
|
|
if !description.empty?
|
|
|
|
description = XML.parse_html(description).content.strip("\n ")
|
2018-08-10 22:38:31 +09:00
|
|
|
end
|
|
|
|
|
2019-06-09 05:08:27 +09:00
|
|
|
return description
|
2018-08-10 22:38:31 +09:00
|
|
|
end
|
2018-08-10 23:44:19 +09:00
|
|
|
|
2021-12-07 10:55:43 +09:00
|
|
|
def cache_annotation(id, annotations)
|
2019-04-16 01:13:09 +09:00
|
|
|
if !CONFIG.cache_annotations
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
body = XML.parse(annotations)
|
|
|
|
nodeset = body.xpath_nodes(%q(/document/annotations/annotation))
|
|
|
|
|
2020-04-08 03:34:40 +09:00
|
|
|
return if nodeset == 0
|
2019-04-16 01:13:09 +09:00
|
|
|
|
|
|
|
has_legacy_annotations = false
|
|
|
|
nodeset.each do |node|
|
|
|
|
if !{"branding", "card", "drawer"}.includes? node["type"]?
|
|
|
|
has_legacy_annotations = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-12-07 01:24:49 +09:00
|
|
|
Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations
|
2019-04-16 01:13:09 +09:00
|
|
|
end
|
2019-05-19 09:14:58 +09:00
|
|
|
|
2020-06-16 07:10:30 +09:00
|
|
|
def create_notification_stream(env, topics, connection_channel)
|
2019-06-04 03:36:49 +09:00
|
|
|
connection = Channel(PQ::Notification).new(8)
|
2019-06-04 03:12:06 +09:00
|
|
|
connection_channel.send({true, connection})
|
|
|
|
|
2021-11-09 07:52:55 +09:00
|
|
|
locale = env.get("preferences").as(Preferences).locale
|
2019-05-21 23:01:17 +09:00
|
|
|
|
|
|
|
since = env.params.query["since"]?.try &.to_i?
|
2019-06-02 21:41:53 +09:00
|
|
|
id = 0
|
2019-05-21 23:01:17 +09:00
|
|
|
|
2019-06-02 21:41:53 +09:00
|
|
|
if topics.includes? "debug"
|
2019-05-21 23:01:17 +09:00
|
|
|
spawn do
|
2019-06-04 03:12:06 +09:00
|
|
|
begin
|
|
|
|
loop do
|
|
|
|
time_span = [0, 0, 0, 0]
|
|
|
|
time_span[rand(4)] = rand(30) + 5
|
2020-04-10 02:18:09 +09:00
|
|
|
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3])
|
2019-06-04 03:12:06 +09:00
|
|
|
video_id = TEST_IDS[rand(TEST_IDS.size)]
|
|
|
|
|
2021-12-07 10:55:43 +09:00
|
|
|
video = get_video(video_id)
|
2019-06-04 03:12:06 +09:00
|
|
|
video.published = published
|
2021-10-29 21:53:06 +09:00
|
|
|
response = JSON.parse(video.to_json(locale, nil))
|
2019-06-04 03:12:06 +09:00
|
|
|
|
|
|
|
env.response.puts "id: #{id}"
|
|
|
|
env.response.puts "data: #{response.to_json}"
|
|
|
|
env.response.puts
|
|
|
|
env.response.flush
|
2019-05-21 23:01:17 +09:00
|
|
|
|
2019-06-04 03:12:06 +09:00
|
|
|
id += 1
|
2019-06-02 21:41:53 +09:00
|
|
|
|
2019-06-04 03:12:06 +09:00
|
|
|
sleep 1.minute
|
2019-06-16 09:18:36 +09:00
|
|
|
Fiber.yield
|
2019-06-04 03:12:06 +09:00
|
|
|
end
|
|
|
|
rescue ex
|
2019-06-02 21:41:53 +09:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
spawn do
|
2019-06-04 03:12:06 +09:00
|
|
|
begin
|
|
|
|
if since
|
2021-12-03 03:16:41 +09:00
|
|
|
since_unix = Time.unix(since.not_nil!)
|
|
|
|
|
2019-06-04 03:12:06 +09:00
|
|
|
topics.try &.each do |topic|
|
|
|
|
case topic
|
|
|
|
when .match(/UC[A-Za-z0-9_-]{22}/)
|
2021-12-03 03:16:41 +09:00
|
|
|
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
|
2020-06-16 07:10:30 +09:00
|
|
|
response = JSON.parse(video.to_json(locale))
|
2019-06-04 03:12:06 +09:00
|
|
|
|
|
|
|
env.response.puts "id: #{id}"
|
|
|
|
env.response.puts "data: #{response.to_json}"
|
|
|
|
env.response.puts
|
|
|
|
env.response.flush
|
2019-06-02 21:41:53 +09:00
|
|
|
|
2019-06-04 03:12:06 +09:00
|
|
|
id += 1
|
|
|
|
end
|
|
|
|
else
|
|
|
|
# TODO
|
2019-06-02 21:41:53 +09:00
|
|
|
end
|
2019-05-21 23:01:17 +09:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-06-02 21:41:53 +09:00
|
|
|
end
|
2019-05-21 23:01:17 +09:00
|
|
|
|
2019-06-02 21:41:53 +09:00
|
|
|
spawn do
|
2019-06-04 03:12:06 +09:00
|
|
|
begin
|
|
|
|
loop do
|
|
|
|
event = connection.receive
|
|
|
|
|
|
|
|
notification = JSON.parse(event.payload)
|
|
|
|
topic = notification["topic"].as_s
|
|
|
|
video_id = notification["videoId"].as_s
|
|
|
|
published = notification["published"].as_i64
|
|
|
|
|
2019-06-04 03:36:49 +09:00
|
|
|
if !topics.try &.includes? topic
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2021-12-07 10:55:43 +09:00
|
|
|
video = get_video(video_id)
|
2019-06-04 03:12:06 +09:00
|
|
|
video.published = Time.unix(published)
|
2021-10-29 21:53:06 +09:00
|
|
|
response = JSON.parse(video.to_json(locale, nil))
|
2019-06-04 03:12:06 +09:00
|
|
|
|
2019-06-04 03:36:49 +09:00
|
|
|
env.response.puts "id: #{id}"
|
|
|
|
env.response.puts "data: #{response.to_json}"
|
|
|
|
env.response.puts
|
|
|
|
env.response.flush
|
2019-06-02 21:41:53 +09:00
|
|
|
|
2019-06-04 03:36:49 +09:00
|
|
|
id += 1
|
2019-06-02 21:41:53 +09:00
|
|
|
end
|
2019-06-04 03:12:06 +09:00
|
|
|
rescue ex
|
|
|
|
ensure
|
|
|
|
connection_channel.send({false, connection})
|
2019-05-21 23:01:17 +09:00
|
|
|
end
|
2019-06-02 21:41:53 +09:00
|
|
|
end
|
|
|
|
|
2019-06-04 03:12:06 +09:00
|
|
|
begin
|
|
|
|
# Send heartbeat
|
|
|
|
loop do
|
2019-06-08 09:56:41 +09:00
|
|
|
env.response.puts ":keepalive #{Time.utc.to_unix}"
|
2019-06-04 03:12:06 +09:00
|
|
|
env.response.puts
|
|
|
|
env.response.flush
|
|
|
|
sleep (20 + rand(11)).seconds
|
|
|
|
end
|
|
|
|
rescue ex
|
|
|
|
ensure
|
|
|
|
connection_channel.send({false, connection})
|
2019-05-21 23:01:17 +09:00
|
|
|
end
|
|
|
|
end
|
2019-07-11 21:27:42 +09:00
|
|
|
|
2020-06-16 07:33:23 +09:00
|
|
|
def extract_initial_data(body) : Hash(String, JSON::Any)
|
2021-03-30 09:37:12 +09:00
|
|
|
return JSON.parse(body.match(/(window\["ytInitialData"\]|var\s*ytInitialData)\s*=\s*(?<info>{.*?});<\/script>/mx).try &.["info"] || "{}").as_h
|
2019-07-11 21:27:42 +09:00
|
|
|
end
|
2019-07-19 08:51:10 +09:00
|
|
|
|
|
|
|
def proxy_file(response, env)
|
|
|
|
if response.headers.includes_word?("Content-Encoding", "gzip")
|
2020-06-16 07:57:20 +09:00
|
|
|
Compress::Gzip::Writer.open(env.response) do |deflate|
|
|
|
|
IO.copy response.body_io, deflate
|
2019-07-19 08:51:10 +09:00
|
|
|
end
|
|
|
|
elsif response.headers.includes_word?("Content-Encoding", "deflate")
|
2020-06-16 07:57:20 +09:00
|
|
|
Compress::Deflate::Writer.open(env.response) do |deflate|
|
|
|
|
IO.copy response.body_io, deflate
|
2019-07-19 08:51:10 +09:00
|
|
|
end
|
|
|
|
else
|
2020-06-16 07:57:20 +09:00
|
|
|
IO.copy response.body_io, env.response
|
|
|
|
end
|
|
|
|
end
|
2023-11-04 22:52:30 +09:00
|
|
|
|
|
|
|
# Fetch the playback requests tracker from the statistics endpoint.
|
|
|
|
#
|
|
|
|
# Creates a new tracker when unavailable.
|
|
|
|
def get_playback_statistic
|
|
|
|
if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
|
|
|
|
tracker = {
|
|
|
|
"totalRequests" => 0_i64,
|
|
|
|
"successfulRequests" => 0_i64,
|
|
|
|
"ratio" => 0_f64,
|
|
|
|
}
|
|
|
|
|
|
|
|
Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
|
|
|
|
end
|
|
|
|
|
|
|
|
return tracker.as(Hash(String, Int64 | Float64))
|
|
|
|
end
|