invidious-mod/src/invidious/helpers/helpers.cr

236 行
6.1 KiB
Crystal
Raw 通常表示 履歴

require "./macros"
2019-04-11 06:23:37 +09:00
struct Nonce
include DB::Serializable
property nonce : String
property expire : Time
2019-04-11 06:23:37 +09:00
end
struct SessionId
include DB::Serializable
property id : String
property email : String
property issued : String
2019-04-11 06:23:37 +09:00
end
struct Annotation
include DB::Serializable
property id : String
property annotations : String
end
2019-06-10 03:48:31 +09:00
def login_req(f_req)
2018-08-05 05:30:44 +09:00
data = {
2019-06-10 03:48:31 +09:00
# Unfortunately there's not much information available on `bgRequest`; part of Google's BotGuard
2019-07-13 02:04:39 +09:00
# Generally this is much longer (>1250 characters), see also
# https://github.com/ytdl-org/youtube-dl/commit/baf67a604d912722b0fe03a40e9dc5349a2208cb .
2019-06-10 03:48:31 +09:00
# For now this can be empty.
2019-07-13 02:04:39 +09:00
"bgRequest" => %|["identifier",""]|,
"pstMsg" => "1",
"checkConnection" => "youtube",
"checkedDomains" => "youtube",
"hl" => "en",
"deviceinfo" => %|[null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]|,
"f.req" => f_req,
2018-08-05 05:30:44 +09:00
"flowName" => "GlifWebSignIn",
"flowEntry" => "ServiceLogin",
2019-07-13 02:04:39 +09:00
# "cookiesDisabled" => "false",
# "gmscoreversion" => "undefined",
# "continue" => "https://accounts.google.com/ManageAccount",
# "azt" => "",
# "bgHash" => "",
2019-06-10 03:48:31 +09:00
}
2018-08-05 05:30:44 +09:00
return HTTP::Params.encode(data)
end
2019-06-09 05:08:27 +09:00
def html_to_content(description_html : String)
description = description_html.gsub(/(<br>)|(<br\/>)/, {
"<br>": "\n",
"<br/>": "\n",
})
if !description.empty?
description = XML.parse_html(description).content.strip("\n ")
2018-08-10 22:38:31 +09:00
end
2019-06-09 05:08:27 +09:00
return description
2018-08-10 22:38:31 +09:00
end
2021-12-07 10:55:43 +09:00
def cache_annotation(id, annotations)
if !CONFIG.cache_annotations
return
end
body = XML.parse(annotations)
nodeset = body.xpath_nodes(%q(/document/annotations/annotation))
return if nodeset == 0
has_legacy_annotations = false
nodeset.each do |node|
if !{"branding", "card", "drawer"}.includes? node["type"]?
has_legacy_annotations = true
break
end
end
Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations
end
2019-05-19 09:14:58 +09:00
2020-06-16 07:10:30 +09:00
def create_notification_stream(env, topics, connection_channel)
2019-06-04 03:36:49 +09:00
connection = Channel(PQ::Notification).new(8)
connection_channel.send({true, connection})
locale = env.get("preferences").as(Preferences).locale
2019-05-21 23:01:17 +09:00
since = env.params.query["since"]?.try &.to_i?
id = 0
2019-05-21 23:01:17 +09:00
if topics.includes? "debug"
2019-05-21 23:01:17 +09:00
spawn do
begin
loop do
time_span = [0, 0, 0, 0]
time_span[rand(4)] = rand(30) + 5
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
2021-12-07 10:55:43 +09:00
video = get_video(video_id)
video.published = published
2021-10-29 21:53:06 +09:00
response = JSON.parse(video.to_json(locale, nil))
if fields_text = env.params.query["fields"]?
begin
JSONFilter.filter(response, fields_text)
rescue ex
env.response.status_code = 400
response = {"error" => ex.message}
end
2019-05-21 23:01:17 +09:00
end
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
2019-05-21 23:01:17 +09:00
id += 1
sleep 1.minute
2019-06-16 09:18:36 +09:00
Fiber.yield
end
rescue ex
end
end
end
spawn do
begin
if since
since_unix = Time.unix(since.not_nil!)
topics.try &.each do |topic|
case topic
when .match(/UC[A-Za-z0-9_-]{22}/)
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
2020-06-16 07:10:30 +09:00
response = JSON.parse(video.to_json(locale))
if fields_text = env.params.query["fields"]?
begin
JSONFilter.filter(response, fields_text)
rescue ex
env.response.status_code = 400
response = {"error" => ex.message}
end
end
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
id += 1
end
else
# TODO
end
2019-05-21 23:01:17 +09:00
end
end
end
end
2019-05-21 23:01:17 +09:00
spawn do
begin
loop do
event = connection.receive
notification = JSON.parse(event.payload)
topic = notification["topic"].as_s
video_id = notification["videoId"].as_s
published = notification["published"].as_i64
2019-06-04 03:36:49 +09:00
if !topics.try &.includes? topic
next
end
2021-12-07 10:55:43 +09:00
video = get_video(video_id)
video.published = Time.unix(published)
2021-10-29 21:53:06 +09:00
response = JSON.parse(video.to_json(locale, nil))
if fields_text = env.params.query["fields"]?
begin
JSONFilter.filter(response, fields_text)
rescue ex
env.response.status_code = 400
response = {"error" => ex.message}
end
end
2019-06-04 03:36:49 +09:00
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
2019-06-04 03:36:49 +09:00
id += 1
end
rescue ex
ensure
connection_channel.send({false, connection})
2019-05-21 23:01:17 +09:00
end
end
begin
# Send heartbeat
loop do
2019-06-08 09:56:41 +09:00
env.response.puts ":keepalive #{Time.utc.to_unix}"
env.response.puts
env.response.flush
sleep (20 + rand(11)).seconds
end
rescue ex
ensure
connection_channel.send({false, connection})
2019-05-21 23:01:17 +09:00
end
end
2019-07-11 21:27:42 +09:00
2020-06-16 07:33:23 +09:00
def extract_initial_data(body) : Hash(String, JSON::Any)
return JSON.parse(body.match(/(window\["ytInitialData"\]|var\s*ytInitialData)\s*=\s*(?<info>{.*?});<\/script>/mx).try &.["info"] || "{}").as_h
2019-07-11 21:27:42 +09:00
end
def proxy_file(response, env)
if response.headers.includes_word?("Content-Encoding", "gzip")
2020-06-16 07:57:20 +09:00
Compress::Gzip::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
elsif response.headers.includes_word?("Content-Encoding", "deflate")
2020-06-16 07:57:20 +09:00
Compress::Deflate::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
else
2020-06-16 07:57:20 +09:00
IO.copy response.body_io, env.response
end
end