invidious/src/invidious/search.cr

250 行
5.9 KiB
Crystal
Raw 通常表示 履歴

2019-03-30 06:30:02 +09:00
struct SearchVideo
db_mapping({
title: String,
id: String,
author: String,
ucid: String,
published: Time,
views: Int64,
description: String,
description_html: String,
length_seconds: Int32,
live_now: Bool,
paid: Bool,
premium: Bool,
premiere_timestamp: Time?,
2018-08-05 13:07:38 +09:00
})
end
2019-03-30 06:30:02 +09:00
struct SearchPlaylistVideo
db_mapping({
title: String,
id: String,
length_seconds: Int32,
})
end
2019-03-30 06:30:02 +09:00
struct SearchPlaylist
db_mapping({
2019-03-17 23:00:00 +09:00
title: String,
id: String,
author: String,
ucid: String,
video_count: Int32,
videos: Array(SearchPlaylistVideo),
thumbnail_id: String?,
})
end
2019-03-30 06:30:02 +09:00
struct SearchChannel
db_mapping({
author: String,
ucid: String,
author_thumbnail: String,
subscriber_count: Int32,
video_count: Int32,
description: String,
description_html: String,
})
end
alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist
2018-09-14 07:47:31 +09:00
def channel_search(query, page, channel)
client = make_client(YT_URL)
2019-04-12 03:52:09 +09:00
response = client.get("/channel/#{channel}?disable_polymer=1&hl=en&gl=US")
2018-09-14 07:47:31 +09:00
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
if !canonical
2019-04-12 03:52:09 +09:00
response = client.get("/c/#{channel}?disable_polymer=1&hl=en&gl=US")
2018-09-14 07:47:31 +09:00
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
end
if !canonical
return 0, [] of SearchItem
2018-09-14 07:47:31 +09:00
end
ucid = canonical["href"].split("/")[-1]
url = produce_channel_search_url(ucid, query, page)
response = client.get(url)
json = JSON.parse(response.body)
if json["content_html"]? && !json["content_html"].as_s.empty?
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
count = nodeset.size
items = extract_items(nodeset)
2018-09-14 07:47:31 +09:00
else
count = 0
items = [] of SearchItem
2018-09-14 07:47:31 +09:00
end
return count, items
2018-09-14 07:47:31 +09:00
end
2019-02-07 09:21:40 +09:00
def search(query, page = 1, search_params = produce_search_params(content_type: "all"), proxies = nil, region = nil)
client = make_client(YT_URL, proxies, region)
2018-08-28 05:23:25 +09:00
if query.empty?
return {0, [] of SearchItem}
2018-08-28 05:23:25 +09:00
end
2018-09-26 07:55:32 +09:00
html = client.get("/results?q=#{URI.escape(query)}&page=#{page}&sp=#{search_params}&hl=en&disable_polymer=1").body
2018-08-05 13:07:38 +09:00
if html.empty?
return {0, [] of SearchItem}
2018-08-05 13:07:38 +09:00
end
2018-08-05 05:30:44 +09:00
html = XML.parse_html(html)
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li))
items = extract_items(nodeset)
2018-08-05 05:30:44 +09:00
return {nodeset.size, items}
2018-08-05 05:30:44 +09:00
end
2018-09-18 06:38:18 +09:00
def produce_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
duration : String = "", features : Array(String) = [] of String)
head = "\x08"
2018-08-28 05:23:25 +09:00
head += case sort
when "relevance"
"\x00"
when "rating"
"\x01"
2018-08-31 07:42:30 +09:00
when "upload_date", "date"
"\x02"
2018-08-31 07:42:30 +09:00
when "view_count", "views"
"\x03"
else
2018-08-28 05:23:25 +09:00
raise "No sort #{sort}"
end
body = ""
body += case date
when "hour"
"\x08\x01"
when "today"
"\x08\x02"
when "week"
"\x08\x03"
when "month"
"\x08\x04"
when "year"
"\x08\x05"
else
""
end
body += case content_type
when "video"
"\x10\x01"
when "channel"
"\x10\x02"
when "playlist"
"\x10\x03"
when "movie"
"\x10\x04"
when "show"
"\x10\x05"
when "all"
""
else
"\x10\x01"
end
body += case duration
when "short"
"\x18\x01"
when "long"
"\x18\x02"
else
""
end
features.each do |feature|
body += case feature
when "hd"
"\x20\x01"
when "subtitles"
"\x28\x01"
2018-08-31 07:42:30 +09:00
when "creative_commons", "cc"
"\x30\x01"
when "3d"
"\x38\x01"
when "live", "livestream"
"\x40\x01"
when "purchased"
"\x48\x01"
when "4k"
"\x70\x01"
when "360"
"\x78\x01"
when "location"
"\xb8\x01\x01"
when "hdr"
"\xc8\x01\x01"
else
raise "Unknown feature #{feature}"
end
end
2019-02-27 05:31:37 +09:00
if !body.empty?
2018-09-18 06:38:18 +09:00
token = head + "\x12" + body.size.unsafe_chr + body
else
token = head
end
token = Base64.urlsafe_encode(token)
token = URI.escape(token)
return token
end
2018-09-14 07:47:31 +09:00
def produce_channel_search_url(ucid, query, page)
page = "#{page}"
meta = IO::Memory.new
meta.write(Bytes[0x12, 0x06])
meta.print("search")
meta.write(Bytes[0x30, 0x02])
meta.write(Bytes[0x38, 0x01])
meta.write(Bytes[0x60, 0x01])
meta.write(Bytes[0x6a, 0x00])
meta.write(Bytes[0xb8, 0x01, 0x00])
meta.write(Bytes[0x7a, page.size])
meta.print(page)
meta.rewind
meta = Base64.urlsafe_encode(meta.to_slice)
2018-09-14 07:47:31 +09:00
meta = URI.escape(meta)
continuation = IO::Memory.new
continuation.write(Bytes[0x12, ucid.size])
continuation.print(ucid)
continuation.write(Bytes[0x1a, meta.size])
continuation.print(meta)
continuation.write(Bytes[0x5a, query.size])
continuation.print(query)
continuation.rewind
continuation = continuation.gets_to_end
2018-09-14 07:47:31 +09:00
wrapper = IO::Memory.new
wrapper.write(Bytes[0xe2, 0xa9, 0x85, 0xb2, 0x02, continuation.size])
wrapper.print(continuation)
wrapper.rewind
2018-09-14 07:47:31 +09:00
wrapper = Base64.urlsafe_encode(wrapper.to_slice)
wrapper = URI.escape(wrapper)
2018-09-14 07:47:31 +09:00
url = "/browse_ajax?continuation=#{wrapper}&gl=US&hl=en"
2018-09-14 07:47:31 +09:00
return url
end