Move import logic to its own module

このコミットが含まれているのは:
Samantaz Fox 2022-02-04 06:01:07 +01:00
コミット 2bbd424fce
この署名に対応する既知のキーがデータベースに存在しません
GPGキーID: F42821059186176E
3個のファイルの変更202行の追加135行の削除

ファイルの表示

@ -25,9 +25,9 @@ def csv_sample
CSV
end
Spectator.describe "Invidious::User::Imports" do
Spectator.describe Invidious::User::Import do
it "imports CSV" do
subscriptions = parse_subscription_export_csv(csv_sample)
subscriptions = Invidious::User::Import.parse_subscription_export_csv(csv_sample)
expect(subscriptions).to be_an(Array(String))
expect(subscriptions.size).to eq(13)

ファイルの表示

@ -321,149 +321,27 @@ module Invidious::Routes::PreferencesRoute
# TODO: Unify into single import based on content-type
case part.name
when "import_invidious"
body = JSON.parse(body)
if body["subscriptions"]?
user.subscriptions += body["subscriptions"].as_a.map(&.as_s)
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
end
if body["watch_history"]?
user.watched += body["watch_history"].as_a.map(&.as_s)
user.watched.uniq!
Invidious::Database::Users.update_watch_history(user)
end
if body["preferences"]?
user.preferences = Preferences.from_json(body["preferences"].to_json)
Invidious::Database::Users.update_preferences(user)
end
if playlists = body["playlists"]?.try &.as_a?
playlists.each do |item|
title = item["title"]?.try &.as_s?.try &.delete("<>")
description = item["description"]?.try &.as_s?.try &.delete("\r")
privacy = item["privacy"]?.try &.as_s?.try { |privacy| PlaylistPrivacy.parse? privacy }
next if !title
next if !description
next if !privacy
playlist = create_playlist(title, privacy, user)
Invidious::Database::Playlists.update_description(playlist.id, description)
videos = item["videos"]?.try &.as_a?.try &.each_with_index do |video_id, idx|
raise InfoException.new("Playlist cannot have more than 500 videos") if idx > 500
video_id = video_id.try &.as_s?
next if !video_id
begin
video = get_video(video_id)
rescue ex
next
end
playlist_video = PlaylistVideo.new({
title: video.title,
id: video.id,
author: video.author,
ucid: video.ucid,
length_seconds: video.length_seconds,
published: video.published,
plid: playlist.id,
live_now: video.live_now,
index: Random::Secure.rand(0_i64..Int64::MAX),
})
Invidious::Database::PlaylistVideos.insert(playlist_video)
Invidious::Database::Playlists.update_video_added(playlist.id, playlist_video.index)
end
end
end
Invidious::User::Import.from_invidious(user, body)
when "import_youtube"
filename = part.filename || ""
extension = filename.split(".").last
success = Invidious::User::Import.from_youtube(user, body, filename, type)
if extension == "xml" || type == "application/xml" || type == "text/xml"
subscriptions = XML.parse(body)
user.subscriptions += subscriptions.xpath_nodes(%q(//outline[@type="rss"])).map do |channel|
channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
end
elsif extension == "json" || type == "application/json"
subscriptions = JSON.parse(body)
user.subscriptions += subscriptions.as_a.compact_map do |entry|
entry["snippet"]["resourceId"]["channelId"].as_s
end
elsif extension == "csv" || type == "text/csv"
subscriptions = parse_subscription_export_csv(body)
user.subscriptions += subscriptions
else
if !success
haltf(env, status_code: 415,
response: error_template(415, "Invalid subscription file uploaded")
)
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
when "import_freetube"
user.subscriptions += body.scan(/"channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})"/).map do |md|
md["channel_id"]
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
Invidious::User::Import.from_freetube(user, body)
when "import_newpipe_subscriptions"
body = JSON.parse(body)
user.subscriptions += body["subscriptions"].as_a.compact_map do |channel|
if match = channel["url"].as_s.match(/\/channel\/(?<channel>UC[a-zA-Z0-9_-]{22})/)
next match["channel"]
elsif match = channel["url"].as_s.match(/\/user\/(?<user>.+)/)
response = YT_POOL.client &.get("/user/#{match["user"]}?disable_polymer=1&hl=en&gl=US")
html = XML.parse_html(response.body)
ucid = html.xpath_node(%q(//link[@rel="canonical"])).try &.["href"].split("/")[-1]
next ucid if ucid
end
nil
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
Invidious::User::Import.from_newpipe_subs(user, body)
when "import_newpipe"
Compress::Zip::Reader.open(IO::Memory.new(body)) do |file|
file.each_entry do |entry|
if entry.filename == "newpipe.db"
tempfile = File.tempfile(".db")
File.write(tempfile.path, entry.io.gets_to_end)
db = DB.open("sqlite3://" + tempfile.path)
success = Invidious::User::Import.from_newpipe(user, body)
user.watched += db.query_all("SELECT url FROM streams", as: String).map(&.lchop("https://www.youtube.com/watch?v="))
user.watched.uniq!
Invidious::Database::Users.update_watch_history(user)
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map(&.lchop("https://www.youtube.com/channel/"))
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
db.close
tempfile.delete
end
end
if !success
haltf(env, status_code: 415,
response: error_template(415, "Uploaded file is too large")
)
end
else nil # Ignore
end

ファイルの表示

@ -29,5 +29,194 @@ struct Invidious::User
return subscriptions
end
end
# -------------------
# Invidious
# -------------------
# Import from another invidious account
def from_invidious(user : User, body : String)
data = JSON.parse(body)
if data["subscriptions"]?
user.subscriptions += data["subscriptions"].as_a.map(&.as_s)
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
end
if data["watch_history"]?
user.watched += data["watch_history"].as_a.map(&.as_s)
user.watched.uniq!
Invidious::Database::Users.update_watch_history(user)
end
if data["preferences"]?
user.preferences = Preferences.from_json(data["preferences"].to_json)
Invidious::Database::Users.update_preferences(user)
end
if playlists = data["playlists"]?.try &.as_a?
playlists.each do |item|
title = item["title"]?.try &.as_s?.try &.delete("<>")
description = item["description"]?.try &.as_s?.try &.delete("\r")
privacy = item["privacy"]?.try &.as_s?.try { |privacy| PlaylistPrivacy.parse? privacy }
next if !title
next if !description
next if !privacy
playlist = create_playlist(title, privacy, user)
Invidious::Database::Playlists.update_description(playlist.id, description)
videos = item["videos"]?.try &.as_a?.try &.each_with_index do |video_id, idx|
raise InfoException.new("Playlist cannot have more than 500 videos") if idx > 500
video_id = video_id.try &.as_s?
next if !video_id
begin
video = get_video(video_id)
rescue ex
next
end
playlist_video = PlaylistVideo.new({
title: video.title,
id: video.id,
author: video.author,
ucid: video.ucid,
length_seconds: video.length_seconds,
published: video.published,
plid: playlist.id,
live_now: video.live_now,
index: Random::Secure.rand(0_i64..Int64::MAX),
})
Invidious::Database::PlaylistVideos.insert(playlist_video)
Invidious::Database::Playlists.update_video_added(playlist.id, playlist_video.index)
end
end
end
end
# -------------------
# Youtube
# -------------------
# Import subscribed channels from Youtube
# Returns success status
def from_youtube(user : User, body : String, filename : String, type : String) : Bool
extension = filename.split(".").last
if extension == "xml" || type == "application/xml" || type == "text/xml"
subscriptions = XML.parse(body)
user.subscriptions += subscriptions.xpath_nodes(%q(//outline[@type="rss"])).map do |channel|
channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
end
elsif extension == "json" || type == "application/json"
subscriptions = JSON.parse(body)
user.subscriptions += subscriptions.as_a.compact_map do |entry|
entry["snippet"]["resourceId"]["channelId"].as_s
end
elsif extension == "csv" || type == "text/csv"
subscriptions = parse_subscription_export_csv(body)
user.subscriptions += subscriptions
else
return false
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
return true
end
# -------------------
# Freetube
# -------------------
def from_freetube(user : User, body : String)
matches = body.scan(/"channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})"/)
user.subscriptions += matches.map(&.["channel_id"])
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
end
# -------------------
# Newpipe
# -------------------
def from_newpipe_subs(user : User, body : String)
data = JSON.parse(body)
user.subscriptions += data["subscriptions"].as_a.compact_map do |channel|
if match = channel["url"].as_s.match(/\/channel\/(?<channel>UC[a-zA-Z0-9_-]{22})/)
next match["channel"]
elsif match = channel["url"].as_s.match(/\/user\/(?<user>.+)/)
# Resolve URL using the API
resolved_url = YoutubeAPI.resolve_url("https://www.youtube.com/user/#{match["user"]}")
ucid = resolved_url.dig?("endpoint", "browseEndpoint", "browseId")
next ucid.as_s if ucid
end
nil
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
end
def from_newpipe(user : User, body : String) : Bool
io = IO::Memory.new(body)
Compress::Zip::File.open(io) do |file|
file.entries.each do |entry|
entry.open do |file_io|
# Ensure max size of 4MB
io_sized = IO::Sized.new(file_io, 0x400000)
next if entry.filename != "newpipe.db"
tempfile = File.tempfile(".db")
begin
File.write(tempfile.path, io_sized.gets_to_end)
rescue
return false
end
db = DB.open("sqlite3://" + tempfile.path)
user.watched += db.query_all("SELECT url FROM streams", as: String)
.map(&.lchop("https://www.youtube.com/watch?v="))
user.watched.uniq!
Invidious::Database::Users.update_watch_history(user)
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String)
.map(&.lchop("https://www.youtube.com/channel/"))
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions)
Invidious::Database::Users.update_subscriptions(user)
db.close
tempfile.delete
end
end
end
# Success!
return true
end
end # module
end