Encapsulate helpers constants and functions inside it's own module

This commit is contained in:
Fijxu 2026-02-19 21:29:29 -03:00
parent fda8d1b528
commit 5df96989a3
No known key found for this signature in database
GPG Key ID: 32C1DDF333EDA6A4
14 changed files with 159 additions and 155 deletions

View File

@ -127,7 +127,7 @@ def extract_channel_community(items, *, ucid, locale, format, thin_mode, is_sing
reply_count = short_text_to_number(post.dig?("actionButtons", "commentActionButtonsRenderer", "replyButton", "buttonRenderer", "text", "simpleText").try &.as_s || "0") reply_count = short_text_to_number(post.dig?("actionButtons", "commentActionButtonsRenderer", "replyButton", "buttonRenderer", "text", "simpleText").try &.as_s || "0")
json.field "content", html_to_content(content_html) json.field "content", Helpers.html_to_content(content_html)
json.field "contentHtml", content_html json.field "contentHtml", content_html
json.field "published", published.to_unix json.field "published", published.to_unix

View File

@ -254,7 +254,7 @@ module Invidious::Comments
end end
content_html = html_content || "" content_html = html_content || ""
json.field "content", html_to_content(content_html) json.field "content", Helpers.html_to_content(content_html)
json.field "contentHtml", content_html json.field "contentHtml", content_html
if published_text != nil if published_text != nil

View File

@ -1,7 +1,5 @@
require "./macros" require "./macros"
TEST_IDS = {"AgbeGFYluEA", "BaW_jenozKc", "a9LDPn-MO4I", "ddFvjfvPnqk", "iqKdEhx-dD4"}
struct Nonce struct Nonce
include DB::Serializable include DB::Serializable
@ -24,60 +22,124 @@ struct Annotation
property annotations : String property annotations : String
end end
def html_to_content(description_html : String) module Helpers
description = description_html.gsub(/(<br>)|(<br\/>)/, { extend self
"<br>": "\n",
"<br/>": "\n",
})
if !description.empty? private TEST_IDS = {"AgbeGFYluEA", "BaW_jenozKc", "a9LDPn-MO4I", "ddFvjfvPnqk", "iqKdEhx-dD4"}
description = XML.parse_html(description).content.strip("\n ")
end
return description def html_to_content(description_html : String)
end description = description_html.gsub(/(<br>)|(<br\/>)/, {
"<br>": "\n",
"<br/>": "\n",
})
def cache_annotation(id, annotations) if !description.empty?
if !CONFIG.cache_annotations description = XML.parse_html(description).content.strip("\n ")
return
end
body = XML.parse(annotations)
nodeset = body.xpath_nodes(%q(/document/annotations/annotation))
return if nodeset == 0
has_legacy_annotations = false
nodeset.each do |node|
if !{"branding", "card", "drawer"}.includes? node["type"]?
has_legacy_annotations = true
break
end end
return description
end end
Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations def cache_annotation(id, annotations)
end if !CONFIG.cache_annotations
return
end
def create_notification_stream(env, topics, connection_channel) body = XML.parse(annotations)
connection = Channel(PQ::Notification).new(8) nodeset = body.xpath_nodes(%q(/document/annotations/annotation))
connection_channel.send({true, connection})
locale = env.get("preferences").as(Preferences).locale return if nodeset == 0
since = env.params.query["since"]?.try &.to_i? has_legacy_annotations = false
id = 0 nodeset.each do |node|
if !{"branding", "card", "drawer"}.includes? node["type"]?
has_legacy_annotations = true
break
end
end
Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations
end
def create_notification_stream(env, topics, connection_channel)
connection = Channel(PQ::Notification).new(8)
connection_channel.send({true, connection})
locale = env.get("preferences").as(Preferences).locale
since = env.params.query["since"]?.try &.to_i?
id = 0
if topics.includes? "debug"
spawn do
begin
loop do
time_span = [0, 0, 0, 0]
time_span[rand(4)] = rand(30) + 5
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
video = get_video(video_id)
video.published = published
response = JSON.parse(video.to_json(locale, nil))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
id += 1
sleep 1.minute
Fiber.yield
end
rescue ex
end
end
end
spawn do
begin
if since
since_unix = Time.unix(since.not_nil!)
topics.try &.each do |topic|
case topic
when .match(/UC[A-Za-z0-9_-]{22}/)
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
response = JSON.parse(video.to_json(locale))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
id += 1
end
else
# TODO
end
end
end
end
end
if topics.includes? "debug"
spawn do spawn do
begin begin
loop do loop do
time_span = [0, 0, 0, 0] event = connection.receive
time_span[rand(4)] = rand(30) + 5
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3]) notification = JSON.parse(event.payload)
video_id = TEST_IDS[rand(TEST_IDS.size)] topic = notification["topic"].as_s
video_id = notification["videoId"].as_s
published = notification["published"].as_i64
if !topics.try &.includes? topic
next
end
video = get_video(video_id) video = get_video(video_id)
video.published = published video.published = Time.unix(published)
response = JSON.parse(video.to_json(locale, nil)) response = JSON.parse(video.to_json(locale, nil))
env.response.puts "id: #{id}" env.response.puts "id: #{id}"
@ -86,65 +148,20 @@ def create_notification_stream(env, topics, connection_channel)
env.response.flush env.response.flush
id += 1 id += 1
sleep 1.minute
Fiber.yield
end end
rescue ex rescue ex
ensure
connection_channel.send({false, connection})
end end
end end
end
spawn do
begin
if since
since_unix = Time.unix(since.not_nil!)
topics.try &.each do |topic|
case topic
when .match(/UC[A-Za-z0-9_-]{22}/)
Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
response = JSON.parse(video.to_json(locale))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
env.response.flush
id += 1
end
else
# TODO
end
end
end
end
end
spawn do
begin begin
# Send heartbeat
loop do loop do
event = connection.receive env.response.puts ":keepalive #{Time.utc.to_unix}"
notification = JSON.parse(event.payload)
topic = notification["topic"].as_s
video_id = notification["videoId"].as_s
published = notification["published"].as_i64
if !topics.try &.includes? topic
next
end
video = get_video(video_id)
video.published = Time.unix(published)
response = JSON.parse(video.to_json(locale, nil))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts env.response.puts
env.response.flush env.response.flush
sleep (20 + rand(11)).seconds
id += 1
end end
rescue ex rescue ex
ensure ensure
@ -152,51 +169,38 @@ def create_notification_stream(env, topics, connection_channel)
end end
end end
begin def extract_initial_data(body) : Hash(String, JSON::Any)
# Send heartbeat return JSON.parse(body.match(/(window\["ytInitialData"\]|var\s*ytInitialData)\s*=\s*(?<info>{.*?});<\/script>/mx).try &.["info"] || "{}").as_h
loop do
env.response.puts ":keepalive #{Time.utc.to_unix}"
env.response.puts
env.response.flush
sleep (20 + rand(11)).seconds
end
rescue ex
ensure
connection_channel.send({false, connection})
end
end
def extract_initial_data(body) : Hash(String, JSON::Any)
return JSON.parse(body.match(/(window\["ytInitialData"\]|var\s*ytInitialData)\s*=\s*(?<info>{.*?});<\/script>/mx).try &.["info"] || "{}").as_h
end
def proxy_file(response, env)
if response.headers.includes_word?("Content-Encoding", "gzip")
Compress::Gzip::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
elsif response.headers.includes_word?("Content-Encoding", "deflate")
Compress::Deflate::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
else
IO.copy response.body_io, env.response
end
end
# Fetch the playback requests tracker from the statistics endpoint.
#
# Creates a new tracker when unavailable.
def get_playback_statistic
if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
tracker = {
"totalRequests" => 0_i64,
"successfulRequests" => 0_i64,
"ratio" => 0_f64,
}
Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
end end
return tracker.as(Hash(String, Int64 | Float64)) def proxy_file(response, env)
if response.headers.includes_word?("Content-Encoding", "gzip")
Compress::Gzip::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
elsif response.headers.includes_word?("Content-Encoding", "deflate")
Compress::Deflate::Writer.open(env.response) do |deflate|
IO.copy response.body_io, deflate
end
else
IO.copy response.body_io, env.response
end
end
# Fetch the playback requests tracker from the statistics endpoint.
#
# Creates a new tracker when unavailable.
def get_playback_statistic
if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
tracker = {
"totalRequests" => 0_i64,
"successfulRequests" => 0_i64,
"ratio" => 0_f64,
}
Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
end
return tracker.as(Hash(String, Int64 | Float64))
end
end end

View File

@ -53,7 +53,7 @@ struct SearchVideo
xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg") xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg")
end end
xml.element("p", style: "word-break:break-word;white-space:pre-wrap") { xml.text html_to_content(self.description_html) } xml.element("p", style: "word-break:break-word;white-space:pre-wrap") { xml.text Helpers.html_to_content(self.description_html) }
end end
end end
@ -63,7 +63,7 @@ struct SearchVideo
xml.element("media:title") { xml.text self.title } xml.element("media:title") { xml.text self.title }
xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg", xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg",
width: "320", height: "180") width: "320", height: "180")
xml.element("media:description") { xml.text html_to_content(self.description_html) } xml.element("media:description") { xml.text Helpers.html_to_content(self.description_html) }
end end
xml.element("media:community") do xml.element("media:community") do
@ -111,7 +111,7 @@ struct SearchVideo
Invidious::JSONify::APIv1.thumbnails(json, self.id) Invidious::JSONify::APIv1.thumbnails(json, self.id)
end end
json.field "description", html_to_content(self.description_html) json.field "description", Helpers.html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html json.field "descriptionHtml", self.description_html
json.field "viewCount", self.views json.field "viewCount", self.views
@ -255,7 +255,7 @@ struct SearchChannel
json.field "videoCount", self.video_count json.field "videoCount", self.video_count
json.field "channelHandle", self.channel_handle json.field "channelHandle", self.channel_handle
json.field "description", html_to_content(self.description_html) json.field "description", Helpers.html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html json.field "descriptionHtml", self.description_html
end end
end end

View File

@ -27,7 +27,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
video_id = "CvFH_6DNRCY" if rdid.starts_with? "OLAK5uy_" video_id = "CvFH_6DNRCY" if rdid.starts_with? "OLAK5uy_"
response = YT_POOL.client &.get("/watch?v=#{video_id}&list=#{rdid}&gl=US&hl=en", headers) response = YT_POOL.client &.get("/watch?v=#{video_id}&list=#{rdid}&gl=US&hl=en", headers)
initial_data = extract_initial_data(response.body) initial_data = Helpers.extract_initial_data(response.body)
if !initial_data["contents"]["twoColumnWatchNextResults"]["playlist"]? if !initial_data["contents"]["twoColumnWatchNextResults"]["playlist"]?
raise InfoException.new("Could not create mix.") raise InfoException.new("Could not create mix.")

View File

@ -199,7 +199,7 @@ struct InvidiousPlaylist
json.field "authorUrl", nil json.field "authorUrl", nil
json.field "authorThumbnails", [] of String json.field "authorThumbnails", [] of String
json.field "description", html_to_content(self.description_html) json.field "description", Helpers.html_to_content(self.description_html)
json.field "descriptionHtml", self.description_html json.field "descriptionHtml", self.description_html
json.field "videoCount", self.video_count json.field "videoCount", self.video_count

View File

@ -8,7 +8,7 @@ module Invidious::Routes::API::V1::Authenticated
# topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000) # topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
# topics ||= [] of String # topics ||= [] of String
# create_notification_stream(env, topics, connection_channel) # Helpers.create_notification_stream(env, topics, connection_channel)
# end # end
def self.get_preferences(env) def self.get_preferences(env)
@ -485,6 +485,6 @@ module Invidious::Routes::API::V1::Authenticated
topics = raw_topics.try &.split(",").uniq.first(1000) topics = raw_topics.try &.split(",").uniq.first(1000)
topics ||= [] of String topics ||= [] of String
create_notification_stream(env, topics, CONNECTION_CHANNEL) Helpers.create_notification_stream(env, topics, CONNECTION_CHANNEL)
end end
end end

View File

@ -97,7 +97,7 @@ module Invidious::Routes::API::V1::Channels
json.field "autoGenerated", channel.auto_generated json.field "autoGenerated", channel.auto_generated
json.field "ageGated", channel.is_age_gated json.field "ageGated", channel.is_age_gated
json.field "isFamilyFriendly", channel.is_family_friendly json.field "isFamilyFriendly", channel.is_family_friendly
json.field "description", html_to_content(channel.description_html) json.field "description", Helpers.html_to_content(channel.description_html)
json.field "descriptionHtml", channel.description_html json.field "descriptionHtml", channel.description_html
json.field "allowedRegions", channel.allowed_regions json.field "allowedRegions", channel.allowed_regions

View File

@ -300,7 +300,7 @@ module Invidious::Routes::API::V1::Videos
annotations = response.body annotations = response.body
cache_annotation(id, annotations) Helpers.cache_annotation(id, annotations)
end end
else # "youtube" else # "youtube"
response = YT_POOL.client &.get("/annotations_invideo?video_id=#{id}") response = YT_POOL.client &.get("/annotations_invideo?video_id=#{id}")

View File

@ -96,7 +96,7 @@ module Invidious::Routes::Images
break break
end end
proxy_file(response, env) Helpers.proxy_file(response, env)
end end
rescue ex rescue ex
end end
@ -148,6 +148,6 @@ module Invidious::Routes::Images
return env.response.headers.delete("Transfer-Encoding") return env.response.headers.delete("Transfer-Encoding")
end end
return proxy_file(response, env) return Helpers.proxy_file(response, env)
end end
end end

View File

@ -83,7 +83,7 @@ module Invidious::Routes::VideoPlayback
# Remove the Range header added previously. # Remove the Range header added previously.
headers.delete("Range") if range_header.nil? headers.delete("Range") if range_header.nil?
playback_statistics = get_playback_statistic() playback_statistics = Helpers.get_playback_statistic()
playback_statistics["totalRequests"] += 1 playback_statistics["totalRequests"] += 1
if response.status_code >= 400 if response.status_code >= 400
@ -195,7 +195,7 @@ module Invidious::Routes::VideoPlayback
end end
end end
proxy_file(resp, env) Helpers.proxy_file(resp, env)
end end
rescue ex rescue ex
if ex.message != "Error reading socket: Connection reset by peer" if ex.message != "Error reading socket: Connection reset by peer"

View File

@ -21,7 +21,7 @@ module Invidious::Search
if response.status_code == 404 if response.status_code == 404
response = YT_POOL.client &.get("/user/#{query.channel}") response = YT_POOL.client &.get("/user/#{query.channel}")
response = YT_POOL.client &.get("/c/#{query.channel}") if response.status_code == 404 response = YT_POOL.client &.get("/c/#{query.channel}") if response.status_code == 404
initial_data = extract_initial_data(response.body) initial_data = Helpers.extract_initial_data(response.body)
ucid = initial_data.dig?("header", "c4TabbedHeaderRenderer", "channelId").try(&.as_s?) ucid = initial_data.dig?("header", "c4TabbedHeaderRenderer", "channelId").try(&.as_s?)
raise ChannelSearchException.new(query.channel) if !ucid raise ChannelSearchException.new(query.channel) if !ucid
else else

View File

@ -15,7 +15,7 @@ struct Invidious::User
playlists.each do |playlist| playlists.each do |playlist|
json.object do json.object do
json.field "title", playlist.title json.field "title", playlist.title
json.field "description", html_to_content(playlist.description_html) json.field "description", Helpers.html_to_content(playlist.description_html)
json.field "privacy", playlist.privacy.to_s json.field "privacy", playlist.privacy.to_s
json.field "videos" do json.field "videos" do
json.array do json.array do

View File

@ -84,7 +84,7 @@ def extract_video_info(video_id : String)
# Although technically not a call to /videoplayback the fact that YouTube is returning the # Although technically not a call to /videoplayback the fact that YouTube is returning the
# wrong video means that we should count it as a failure. # wrong video means that we should count it as a failure.
get_playback_statistic()["totalRequests"] += 1 Helpers.get_playback_statistic()["totalRequests"] += 1
return { return {
"version" => JSON::Any.new(Video::SCHEMA_VERSION.to_i64), "version" => JSON::Any.new(Video::SCHEMA_VERSION.to_i64),