invidious-mod/src/invidious/helpers/helpers.cr

201 行
4.8 KiB
Crystal
Raw 通常表示 履歴

require "./macros"
2019-04-11 06:23:37 +09:00
struct Nonce
include DB::Serializable
property nonce : String
property expire : Time
2019-04-11 06:23:37 +09:00
end
struct SessionId
include DB::Serializable
property id : String
property email : String
property issued : String
2019-04-11 06:23:37 +09:00
end
struct Annotation
include DB::Serializable
property id : String
property annotations : String
end
2019-06-09 05:08:27 +09:00
def html_to_content(description_html : String)
description = description_html.gsub(/(<br>)|(<br\/>)/, {
"<br>": "\n",
"<br/>": "\n",
})
if !description.empty?
description = XML.parse_html(description).content.strip("\n ")
2018-08-10 22:38:31 +09:00
end
2019-06-09 05:08:27 +09:00
return description
2018-08-10 22:38:31 +09:00
end
2021-12-07 10:55:43 +09:00
def cache_annotation(id, annotations)
if !CONFIG.cache_annotations
return
end
body = XML.parse(annotations)
nodeset = body.xpath_nodes(%q(/document/annotations/annotation))
return if nodeset == 0
has_legacy_annotations = false
nodeset.each do |node|
if !{"branding", "card", "drawer"}.includes? node["type"]?
has_legacy_annotations = true
break
end
end
Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations
end
2019-05-19 09:14:58 +09:00
2020-06-16 07:10:30 +09:00
def create_notification_stream(env, topics, connection_channel)
2019-06-04 03:36:49 +09:00
connection = Channel(PQ::Notification).new(8)
connection_channel.send({true, connection})
locale = env.get("preferences").as(Preferences).locale
2019-05-21 23:01:17 +09:00
since = env.params.query["since"]?.try &.to_i?
id = 0
2019-05-21 23:01:17 +09:00
if topics.includes? "debug"
2019-05-21 23:01:17 +09:00
spawn do
begin
loop do
time_span = [0, 0, 0, 0]
time_span[rand(4)] = rand(30) + 5
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
2021-12-07 10:55:43 +09:00
video = get_video(video_id)
video.published = published
2021-10-29 21:53:06 +09:00
response = JSON.parse(video.to_json(locale, nil))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
2019-05-21 23:01:17 +09:00
id += 1
sleep 1.minute
2019-06-16 09:18:36 +09:00
Fiber.yield
end
rescue ex
end
end
end
spawn do
begin
if since
since_unix = Time.unix(since.not_nil!)
topics.try &.each do |topic|
case topic
when .match(/UC[A-Za-z0-9_-]{22}/)
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
2020-06-16 07:10:30 +09:00
response = JSON.parse(video.to_json(locale))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
id += 1
end
else
# TODO
end
2019-05-21 23:01:17 +09:00
end
end
end
end
2019-05-21 23:01:17 +09:00
spawn do
begin
loop do
event = connection.receive
notification = JSON.parse(event.payload)
topic = notification["topic"].as_s
video_id = notification["videoId"].as_s
published = notification["published"].as_i64
2019-06-04 03:36:49 +09:00
if !topics.try &.includes? topic
next
end
2021-12-07 10:55:43 +09:00
video = get_video(video_id)
video.published = Time.unix(published)
2021-10-29 21:53:06 +09:00
response = JSON.parse(video.to_json(locale, nil))
2019-06-04 03:36:49 +09:00
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
2019-06-04 03:36:49 +09:00
id += 1
end
rescue ex
ensure
connection_channel.send({false, connection})
2019-05-21 23:01:17 +09:00
end
end
begin
# Send heartbeat
loop do
2019-06-08 09:56:41 +09:00
env.response.puts ":keepalive #{Time.utc.to_unix}"
env.response.puts
env.response.flush
sleep (20 + rand(11)).seconds
end
rescue ex
ensure
connection_channel.send({false, connection})
2019-05-21 23:01:17 +09:00
end
end
2019-07-11 21:27:42 +09:00
2020-06-16 07:33:23 +09:00
def extract_initial_data(body) : Hash(String, JSON::Any)
return JSON.parse(body.match(/(window\["ytInitialData"\]|var\s*ytInitialData)\s*=\s*(?<info>{.*?});<\/script>/mx).try &.["info"] || "{}").as_h
2019-07-11 21:27:42 +09:00
end
def proxy_file(response, env)
if response.headers.includes_word?("Content-Encoding", "gzip")
2020-06-16 07:57:20 +09:00
Compress::Gzip::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
elsif response.headers.includes_word?("Content-Encoding", "deflate")
2020-06-16 07:57:20 +09:00
Compress::Deflate::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
else
2020-06-16 07:57:20 +09:00
IO.copy response.body_io, env.response
end
end
# Fetch the playback requests tracker from the statistics endpoint.
#
# Creates a new tracker when unavailable.
def get_playback_statistic
if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
tracker = {
"totalRequests" => 0_i64,
"successfulRequests" => 0_i64,
"ratio" => 0_f64,
}
Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
end
return tracker.as(Hash(String, Int64 | Float64))
end