Add proper redirect links and add support for timestamps
This commit is contained in:
parent
4e19194e6d
commit
42405a34f1
@ -934,9 +934,7 @@ get "/api/v1/trending" do |env|
|
|||||||
videos = JSON.build do |json|
|
videos = JSON.build do |json|
|
||||||
json.array do
|
json.array do
|
||||||
trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"])).each do |node|
|
trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"])).each do |node|
|
||||||
length_seconds = node.xpath_node(%q(.//span[@class="video-time"])).not_nil!.content
|
length_seconds = decode_length_seconds(node.xpath_node(%q(.//span[@class="video-time"])).not_nil!.content)
|
||||||
minutes, seconds = length_seconds.split(":")
|
|
||||||
length_seconds = minutes.to_i * 60 + seconds.to_i
|
|
||||||
|
|
||||||
video = node.xpath_node(%q(.//h3/a)).not_nil!
|
video = node.xpath_node(%q(.//h3/a)).not_nil!
|
||||||
title = video.content
|
title = video.content
|
||||||
@ -966,7 +964,7 @@ get "/api/v1/trending" do |env|
|
|||||||
end
|
end
|
||||||
|
|
||||||
published = published.content.split(" ")[-3..-1].join(" ")
|
published = published.content.split(" ")[-3..-1].join(" ")
|
||||||
published = decode_date(published)
|
published = decode_date(published).epoch
|
||||||
|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "title", title
|
json.field "title", title
|
||||||
@ -993,7 +991,7 @@ get "/api/v1/trending" do |env|
|
|||||||
json.field "viewCount", view_count
|
json.field "viewCount", view_count
|
||||||
json.field "author", author
|
json.field "author", author
|
||||||
json.field "authorUrl", author_url
|
json.field "authorUrl", author_url
|
||||||
json.field "published", published.epoch
|
json.field "published", published
|
||||||
json.field "description", description
|
json.field "description", description
|
||||||
json.field "descriptionHtml", descriptionHtml
|
json.field "descriptionHtml", descriptionHtml
|
||||||
end
|
end
|
||||||
@ -1216,12 +1214,12 @@ get "/api/v1/channels/:ucid/videos" do |env|
|
|||||||
|
|
||||||
videos = JSON.build do |json|
|
videos = JSON.build do |json|
|
||||||
json.array do
|
json.array do
|
||||||
document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])).each do |item|
|
document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])).each do |node|
|
||||||
anchor = item.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a)).not_nil!
|
anchor = node.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a)).not_nil!
|
||||||
title = anchor.content.strip
|
title = anchor.content.strip
|
||||||
video_id = anchor["href"].lchop("/watch?v=")
|
video_id = anchor["href"].lchop("/watch?v=")
|
||||||
|
|
||||||
published = item.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[1]))
|
published = node.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[1]))
|
||||||
if !published
|
if !published
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
@ -1229,9 +1227,9 @@ get "/api/v1/channels/:ucid/videos" do |env|
|
|||||||
if published.ends_with? "watching"
|
if published.ends_with? "watching"
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
published = decode_date(published)
|
published = decode_date(published).epoch
|
||||||
|
|
||||||
view_count = item.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[2])).not_nil!
|
view_count = node.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[2])).not_nil!
|
||||||
view_count = view_count.content.rchop(" views")
|
view_count = view_count.content.rchop(" views")
|
||||||
if view_count = "No"
|
if view_count = "No"
|
||||||
view_count = 0
|
view_count = 0
|
||||||
@ -1239,7 +1237,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
|
|||||||
view_count = view_count.delete(",").to_i
|
view_count = view_count.delete(",").to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
descriptionHtml = item.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
descriptionHtml = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
||||||
if !descriptionHtml
|
if !descriptionHtml
|
||||||
description = ""
|
description = ""
|
||||||
descriptionHtml = ""
|
descriptionHtml = ""
|
||||||
@ -1250,10 +1248,7 @@ get "/api/v1/channels/:ucid/videos" do |env|
|
|||||||
description = XML.parse_html(description).content.strip("\n ")
|
description = XML.parse_html(description).content.strip("\n ")
|
||||||
end
|
end
|
||||||
|
|
||||||
length_seconds = item.xpath_node(%q(.//span[@class="video-time"]/span)).not_nil!.content
|
length_seconds = decode_length_seconds(node.xpath_node(%q(.//span[@class="video-time"])).not_nil!.content)
|
||||||
length_seconds = length_seconds.split(":").map { |a| a.to_i }
|
|
||||||
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
|
||||||
length_seconds = Time::Span.new(length_seconds[0], length_seconds[1], length_seconds[2])
|
|
||||||
|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "title", title
|
json.field "title", title
|
||||||
@ -1281,8 +1276,8 @@ get "/api/v1/channels/:ucid/videos" do |env|
|
|||||||
json.field "descriptionHtml", descriptionHtml
|
json.field "descriptionHtml", descriptionHtml
|
||||||
|
|
||||||
json.field "viewCount", view_count
|
json.field "viewCount", view_count
|
||||||
json.field "published", published.epoch
|
json.field "published", published
|
||||||
json.field "lengthSeconds", length_seconds.total_seconds.to_i
|
json.field "lengthSeconds", length_seconds
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -1308,8 +1303,8 @@ get "/api/v1/search" do |env|
|
|||||||
|
|
||||||
results = JSON.build do |json|
|
results = JSON.build do |json|
|
||||||
json.array do
|
json.array do
|
||||||
html.xpath_nodes(%q(//ol[@class="item-section"]/li)).each do |item|
|
html.xpath_nodes(%q(//ol[@class="item-section"]/li)).each do |node|
|
||||||
anchor = item.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a)).not_nil!
|
anchor = node.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a)).not_nil!
|
||||||
if anchor["href"].starts_with? "https://www.googleadservices.com"
|
if anchor["href"].starts_with? "https://www.googleadservices.com"
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
@ -1317,11 +1312,11 @@ get "/api/v1/search" do |env|
|
|||||||
title = anchor.content.strip
|
title = anchor.content.strip
|
||||||
video_id = anchor["href"].lchop("/watch?v=")
|
video_id = anchor["href"].lchop("/watch?v=")
|
||||||
|
|
||||||
anchor = item.xpath_node(%q(.//div[contains(@class, "yt-lockup-byline")]/a)).not_nil!
|
anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-byline")]/a)).not_nil!
|
||||||
author = anchor.content
|
author = anchor.content
|
||||||
author_url = anchor["href"]
|
author_url = anchor["href"]
|
||||||
|
|
||||||
published = item.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[1]))
|
published = node.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[1]))
|
||||||
if !published
|
if !published
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
@ -1329,9 +1324,9 @@ get "/api/v1/search" do |env|
|
|||||||
if published.ends_with? "watching"
|
if published.ends_with? "watching"
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
published = decode_date(published)
|
published = decode_date(published).epoch
|
||||||
|
|
||||||
view_count = item.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[2])).not_nil!
|
view_count = node.xpath_node(%q(.//div[contains(@class,"yt-lockup-meta")]/ul/li[2])).not_nil!
|
||||||
view_count = view_count.content.rchop(" views")
|
view_count = view_count.content.rchop(" views")
|
||||||
if view_count = "No"
|
if view_count = "No"
|
||||||
view_count = 0
|
view_count = 0
|
||||||
@ -1339,7 +1334,7 @@ get "/api/v1/search" do |env|
|
|||||||
view_count = view_count.delete(",").to_i
|
view_count = view_count.delete(",").to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
descriptionHtml = item.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
descriptionHtml = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
||||||
if !descriptionHtml
|
if !descriptionHtml
|
||||||
description = ""
|
description = ""
|
||||||
descriptionHtml = ""
|
descriptionHtml = ""
|
||||||
@ -1350,10 +1345,7 @@ get "/api/v1/search" do |env|
|
|||||||
description = XML.parse_html(description).content.strip("\n ")
|
description = XML.parse_html(description).content.strip("\n ")
|
||||||
end
|
end
|
||||||
|
|
||||||
length_seconds = item.xpath_node(%q(.//span[@class="video-time"])).not_nil!.content
|
length_seconds = decode_length_seconds(node.xpath_node(%q(.//span[@class="video-time"])).not_nil!.content)
|
||||||
length_seconds = length_seconds.split(":").map { |a| a.to_i }
|
|
||||||
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
|
||||||
length_seconds = Time::Span.new(length_seconds[0], length_seconds[1], length_seconds[2])
|
|
||||||
|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "title", title
|
json.field "title", title
|
||||||
@ -1384,8 +1376,8 @@ get "/api/v1/search" do |env|
|
|||||||
json.field "descriptionHtml", descriptionHtml
|
json.field "descriptionHtml", descriptionHtml
|
||||||
|
|
||||||
json.field "viewCount", view_count
|
json.field "viewCount", view_count
|
||||||
json.field "published", published.epoch
|
json.field "published", published
|
||||||
json.field "lengthSeconds", length_seconds.total_seconds.to_i
|
json.field "lengthSeconds", length_seconds
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -2134,12 +2126,12 @@ get "/feed/channel/:ucid" do |env|
|
|||||||
xml.element("uri") { xml.text "#{scheme}#{host}/channel/#{ucid}" }
|
xml.element("uri") { xml.text "#{scheme}#{host}/channel/#{ucid}" }
|
||||||
end
|
end
|
||||||
|
|
||||||
document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])).each do |item|
|
document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])).each do |node|
|
||||||
anchor = item.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a)).not_nil!
|
anchor = node.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a)).not_nil!
|
||||||
title = anchor.content.strip
|
title = anchor.content.strip
|
||||||
video_id = anchor["href"].lchop("/watch?v=")
|
video_id = anchor["href"].lchop("/watch?v=")
|
||||||
|
|
||||||
view_count = item.xpath_node(%q(.//div[@class="yt-lockup-meta"]/ul/li[2])).not_nil!
|
view_count = node.xpath_node(%q(.//div[@class="yt-lockup-meta"]/ul/li[2])).not_nil!
|
||||||
view_count = view_count.content.rchop(" views")
|
view_count = view_count.content.rchop(" views")
|
||||||
if view_count = "No"
|
if view_count = "No"
|
||||||
view_count = 0
|
view_count = 0
|
||||||
@ -2147,7 +2139,7 @@ get "/feed/channel/:ucid" do |env|
|
|||||||
view_count = view_count.delete(",").to_i
|
view_count = view_count.delete(",").to_i
|
||||||
end
|
end
|
||||||
|
|
||||||
descriptionHtml = item.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
descriptionHtml = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
||||||
if !descriptionHtml
|
if !descriptionHtml
|
||||||
description = ""
|
description = ""
|
||||||
descriptionHtml = ""
|
descriptionHtml = ""
|
||||||
@ -2158,7 +2150,7 @@ get "/feed/channel/:ucid" do |env|
|
|||||||
description = XML.parse_html(description).content.strip("\n ")
|
description = XML.parse_html(description).content.strip("\n ")
|
||||||
end
|
end
|
||||||
|
|
||||||
published = item.xpath_node(%q(.//div[@class="yt-lockup-meta"]/ul/li[1]))
|
published = node.xpath_node(%q(.//div[@class="yt-lockup-meta"]/ul/li[1]))
|
||||||
if !published
|
if !published
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
@ -2202,12 +2194,12 @@ get "/feed/private" do |env|
|
|||||||
token = env.params.query["token"]?
|
token = env.params.query["token"]?
|
||||||
|
|
||||||
if !token
|
if !token
|
||||||
halt env, status_code: 401
|
halt env, status_code: 403
|
||||||
end
|
end
|
||||||
|
|
||||||
user = PG_DB.query_one?("SELECT * FROM users WHERE token = $1", token, as: User)
|
user = PG_DB.query_one?("SELECT * FROM users WHERE token = $1", token.strip, as: User)
|
||||||
if !user
|
if !user
|
||||||
halt env, status_code: 401
|
halt env, status_code: 403
|
||||||
end
|
end
|
||||||
|
|
||||||
max_results = env.params.query["max_results"]?.try &.to_i?
|
max_results = env.params.query["max_results"]?.try &.to_i?
|
||||||
@ -2715,10 +2707,10 @@ get "/channel/:ucid" do |env|
|
|||||||
author = document.xpath_node(%q(//div[@class="pl-video-owner"]/a)).not_nil!.content
|
author = document.xpath_node(%q(//div[@class="pl-video-owner"]/a)).not_nil!.content
|
||||||
|
|
||||||
videos = [] of ChannelVideo
|
videos = [] of ChannelVideo
|
||||||
document.xpath_nodes(%q(//a[contains(@class,"pl-video-title-link")])).each do |item|
|
document.xpath_nodes(%q(//a[contains(@class,"pl-video-title-link")])).each do |node|
|
||||||
href = URI.parse(item["href"])
|
href = URI.parse(node["href"])
|
||||||
id = HTTP::Params.parse(href.query.not_nil!)["v"]
|
id = HTTP::Params.parse(href.query.not_nil!)["v"]
|
||||||
title = item.content
|
title = node.content
|
||||||
|
|
||||||
videos << ChannelVideo.new(id, title, Time.now, Time.now, ucid, author)
|
videos << ChannelVideo.new(id, title, Time.now, Time.now, ucid, author)
|
||||||
end
|
end
|
||||||
@ -2726,14 +2718,6 @@ get "/channel/:ucid" do |env|
|
|||||||
templated "channel"
|
templated "channel"
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/redirect" do |env|
|
|
||||||
if env.params.query["q"]?
|
|
||||||
env.redirect env.params.query["q"]
|
|
||||||
else
|
|
||||||
env.redirect "/"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
get "/api/manifest/dash/id/:id" do |env|
|
get "/api/manifest/dash/id/:id" do |env|
|
||||||
env.response.headers.add("Access-Control-Allow-Origin", "*")
|
env.response.headers.add("Access-Control-Allow-Origin", "*")
|
||||||
env.response.content_type = "application/dash+xml"
|
env.response.content_type = "application/dash+xml"
|
||||||
|
@ -663,19 +663,27 @@ def arg_array(array, start = 1)
|
|||||||
end
|
end
|
||||||
|
|
||||||
def add_alt_links(html)
|
def add_alt_links(html)
|
||||||
alt_links = [] of {Int32, String}
|
alt_links = [] of {String, String}
|
||||||
|
|
||||||
# This is painful but is likely the only way to accomplish this in Crystal,
|
# This is painful but likely the only way to accomplish this in Crystal,
|
||||||
# as Crystigiri and others are not able to insert XML Nodes into a document.
|
# as Crystigiri and others are not able to insert XML Nodes into a document.
|
||||||
# The goal here is to use as little regex as possible
|
# The goal here is to use as little regex as possible
|
||||||
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
|
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
|
||||||
anchor = XML.parse_html(match[0])
|
anchor = XML.parse_html(match[0])
|
||||||
anchor = anchor.xpath_node("//a").not_nil!
|
anchor = anchor.xpath_node("//a").not_nil!
|
||||||
url = URI.parse(HTML.unescape(anchor["href"]))
|
url = URI.parse(anchor["href"])
|
||||||
|
|
||||||
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
|
if ["www.youtube.com", "m.youtube.com"].includes?(url.host)
|
||||||
|
if url.path == "/redirect"
|
||||||
|
params = HTTP::Params.parse(url.query.not_nil!)
|
||||||
|
alt_url = params["q"]?
|
||||||
|
alt_url ||= "/"
|
||||||
|
else
|
||||||
|
alt_url = url.full_path
|
||||||
|
end
|
||||||
|
|
||||||
alt_link = <<-END_HTML
|
alt_link = <<-END_HTML
|
||||||
<a href="#{url.full_path}">
|
<a href="#{alt_url}">
|
||||||
<i class="icon ion-ios-link" aria-hidden="true"></i>
|
<i class="icon ion-ios-link" aria-hidden="true"></i>
|
||||||
</a>
|
</a>
|
||||||
END_HTML
|
END_HTML
|
||||||
@ -685,16 +693,23 @@ def add_alt_links(html)
|
|||||||
<i class="icon ion-ios-link" aria-hidden="true"></i>
|
<i class="icon ion-ios-link" aria-hidden="true"></i>
|
||||||
</a>
|
</a>
|
||||||
END_HTML
|
END_HTML
|
||||||
|
elsif url.to_s == "#"
|
||||||
|
length_seconds = decode_length_seconds(anchor.content)
|
||||||
|
alt_anchor = <<-END_HTML
|
||||||
|
<a href="javascript:void(0)" onclick="player.currentTime(#{length_seconds})">#{anchor.content}</a>
|
||||||
|
END_HTML
|
||||||
|
|
||||||
|
html = html.sub(anchor.to_s, alt_anchor)
|
||||||
|
next
|
||||||
else
|
else
|
||||||
alt_link = ""
|
alt_link = ""
|
||||||
end
|
end
|
||||||
|
|
||||||
alt_links << {match.end.not_nil!, alt_link}
|
alt_links << {anchor.to_s, alt_link}
|
||||||
end
|
end
|
||||||
|
|
||||||
alt_links.reverse!
|
alt_links.each do |original, alternate|
|
||||||
alt_links.each do |position, alt_link|
|
html = html.sub(original, original + alternate)
|
||||||
html = html.insert(position, alt_link)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
return html
|
return html
|
||||||
@ -706,7 +721,7 @@ def fill_links(html, scheme, host)
|
|||||||
html.xpath_nodes("//a").each do |match|
|
html.xpath_nodes("//a").each do |match|
|
||||||
url = URI.parse(match["href"])
|
url = URI.parse(match["href"])
|
||||||
# Reddit links don't have host
|
# Reddit links don't have host
|
||||||
if !url.host && !match["href"].starts_with?("javascript")
|
if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
|
||||||
url.scheme = scheme
|
url.scheme = scheme
|
||||||
url.host = host
|
url.host = host
|
||||||
match["href"] = url
|
match["href"] = url
|
||||||
@ -915,6 +930,15 @@ def create_user(sid, email, password)
|
|||||||
return user
|
return user
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def decode_length_seconds(string)
|
||||||
|
length_seconds = string.split(":").map { |a| a.to_i }
|
||||||
|
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
||||||
|
length_seconds = Time::Span.new(length_seconds[0], length_seconds[1], length_seconds[2])
|
||||||
|
length_seconds = length_seconds.total_seconds.to_i
|
||||||
|
|
||||||
|
return length_seconds
|
||||||
|
end
|
||||||
|
|
||||||
def decode_time(string)
|
def decode_time(string)
|
||||||
time = string.try &.to_f?
|
time = string.try &.to_f?
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user