invidious/src/invidious.cr

5229 lines
152 KiB
Crystal
Raw Normal View History

2018-09-04 07:22:10 -07:00
# "Invidious" (which is an alternative front-end to YouTube)
2019-03-15 09:44:53 -07:00
# Copyright (C) 2019 Omar Roth
2018-01-28 09:32:40 -08:00
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2018-11-22 11:26:08 -08:00
require "digest/md5"
2019-01-23 12:15:19 -08:00
require "file_utils"
2017-11-22 23:48:55 -08:00
require "kemal"
2019-03-12 18:51:23 -07:00
require "markdown"
2018-07-18 12:26:02 -07:00
require "openssl/hmac"
require "option_parser"
require "pg"
2018-11-21 15:12:13 -08:00
require "sqlite3"
2018-01-16 12:02:35 -08:00
require "xml"
2018-03-09 10:42:23 -08:00
require "yaml"
require "zip"
2018-08-04 13:30:44 -07:00
require "./invidious/helpers/*"
2018-07-06 05:59:56 -07:00
require "./invidious/*"
2017-11-29 13:33:46 -08:00
2018-07-18 12:26:02 -07:00
CONFIG = Config.from_yaml(File.read("config/config.yml"))
HMAC_KEY = CONFIG.hmac_key || Random::Secure.hex(32)
2018-03-09 10:42:23 -08:00
PG_URL = URI.new(
scheme: "postgres",
2019-05-21 07:00:35 -07:00
user: CONFIG.db.user,
password: CONFIG.db.password,
host: CONFIG.db.host,
port: CONFIG.db.port,
path: CONFIG.db.dbname,
2018-03-09 10:42:23 -08:00
)
2018-11-22 11:26:08 -08:00
PG_DB = DB.open PG_URL
2019-03-31 20:07:06 -07:00
ARCHIVE_URL = URI.parse("https://archive.org")
2018-11-22 11:26:08 -08:00
LOGIN_URL = URI.parse("https://accounts.google.com")
PUBSUB_URL = URI.parse("https://pubsubhubbub.appspot.com")
2019-03-31 20:07:06 -07:00
REDDIT_URL = URI.parse("https://www.reddit.com")
TEXTCAPTCHA_URL = URI.parse("http://textcaptcha.com")
2019-03-31 20:07:06 -07:00
YT_URL = URI.parse("https://www.youtube.com")
CHARS_SAFE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
TEST_IDS = {"AgbeGFYluEA", "BaW_jenozKc", "a9LDPn-MO4I", "ddFvjfvPnqk", "iqKdEhx-dD4"}
2019-03-19 18:50:34 -07:00
CURRENT_BRANCH = {{ "#{`git branch | sed -n '/\* /s///p'`.strip}" }}
CURRENT_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit`.strip}" }}
CURRENT_VERSION = {{ "#{`git describe --tags --abbrev=0`.strip}" }}
2018-03-04 20:25:03 -08:00
# This is used to determine the `?v=` on the end of file URLs (for cache busting). We
# only need to expire modified assets, so we can use this to find the last commit that changes
# any assets
ASSET_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit -- assets`.strip}" }}
2019-04-06 06:28:53 -07:00
SOFTWARE = {
"name" => "invidious",
"version" => "#{CURRENT_VERSION}-#{CURRENT_COMMIT}",
"branch" => "#{CURRENT_BRANCH}",
}
2018-12-20 13:32:09 -08:00
LOCALES = {
"ar" => load_locale("ar"),
"de" => load_locale("de"),
2019-05-20 11:06:54 -07:00
"el" => load_locale("el"),
2018-12-20 13:32:09 -08:00
"en-US" => load_locale("en-US"),
2019-04-19 09:20:18 -07:00
"eo" => load_locale("eo"),
2019-04-05 15:24:06 -07:00
"es" => load_locale("es"),
2019-03-01 17:24:53 -08:00
"eu" => load_locale("eu"),
2019-01-21 13:04:09 -08:00
"fr" => load_locale("fr"),
2019-02-19 15:46:31 -08:00
"it" => load_locale("it"),
2018-12-26 07:29:12 -08:00
"nb_NO" => load_locale("nb_NO"),
2018-12-20 13:32:09 -08:00
"nl" => load_locale("nl"),
"pl" => load_locale("pl"),
"ru" => load_locale("ru"),
2019-04-19 09:20:18 -07:00
"uk" => load_locale("uk"),
2018-12-20 13:32:09 -08:00
}
2019-04-06 06:28:53 -07:00
config = CONFIG
logger = Invidious::LogHandler.new
Kemal.config.extra_options do |parser|
parser.banner = "Usage: invidious [arguments]"
parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{config.channel_threads})") do |number|
begin
config.channel_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{config.feed_threads})") do |number|
begin
config.feed_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output|
FileUtils.mkdir_p(File.dirname(output))
logger = Invidious::LogHandler.new(File.open(output, mode: "a"))
end
parser.on("-v", "--version", "Print version") do |output|
puts SOFTWARE.to_pretty_json
exit
end
end
Kemal::CLI.new ARGV
# Check table integrity
2019-04-11 10:13:25 -07:00
if CONFIG.check_tables
2019-04-11 20:31:45 -07:00
analyze_table(PG_DB, logger, "channel_videos", ChannelVideo)
analyze_table(PG_DB, logger, "nonces", Nonce)
analyze_table(PG_DB, logger, "session_ids", SessionId)
analyze_table(PG_DB, logger, "users", User)
analyze_table(PG_DB, logger, "videos", Video)
if CONFIG.cache_annotations
analyze_table(PG_DB, logger, "annotations", Annotation)
end
2019-04-11 10:13:25 -07:00
end
2018-03-25 20:18:29 -07:00
2019-04-10 14:23:37 -07:00
# Start jobs
2019-05-15 10:26:29 -07:00
2019-04-10 14:23:37 -07:00
refresh_channels(PG_DB, logger, config.channel_threads, config.full_refresh)
2019-03-01 14:06:45 -08:00
refresh_feeds(PG_DB, logger, config.feed_threads)
subscribe_to_feeds(PG_DB, logger, HMAC_KEY, config)
2019-03-01 17:25:16 -08:00
statistics = {
"error" => "Statistics are not availabile.",
}
if config.statistics_enabled
spawn do
loop do
statistics = {
2019-04-06 06:28:53 -07:00
"version" => "2.0",
"software" => SOFTWARE,
2019-03-01 17:25:16 -08:00
"openRegistrations" => config.registration_enabled,
"usage" => {
2019-03-01 19:03:57 -08:00
"users" => {
"total" => PG_DB.query_one("SELECT count(*) FROM users", as: Int64),
"activeHalfyear" => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '6 months'", as: Int64),
"activeMonth" => PG_DB.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '1 month'", as: Int64),
},
},
"metadata" => {
"updatedAt" => Time.now.to_unix,
"lastChannelRefreshedAt" => PG_DB.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0,
2019-03-01 17:25:16 -08:00
},
}
sleep 1.minute
end
end
end
2019-03-01 14:47:06 -08:00
2018-02-07 20:04:47 -08:00
top_videos = [] of Video
2019-03-01 14:06:45 -08:00
if config.top_enabled
spawn do
pull_top_videos(config, PG_DB) do |videos|
top_videos = videos
sleep 1.minute
2019-03-01 14:06:45 -08:00
end
2018-02-07 20:04:47 -08:00
end
end
2018-11-08 18:08:03 -08:00
popular_videos = [] of ChannelVideo
spawn do
pull_popular_videos(PG_DB) do |videos|
popular_videos = videos
sleep 1.minute
2018-11-08 18:08:03 -08:00
end
end
decrypt_function = [] of {name: String, value: Int32}
spawn do
2018-08-04 13:30:44 -07:00
update_decrypt_function do |function|
decrypt_function = function
end
end
proxies = PROXY_LIST
2018-09-25 15:56:59 -07:00
2018-03-24 20:56:41 -07:00
before_all do |env|
2019-05-10 13:29:10 -07:00
host_url = make_host_url(config, Kemal.config)
2019-05-10 14:48:38 -07:00
env.response.headers["X-XSS-Protection"] = "1; mode=block"
env.response.headers["X-Content-Type-Options"] = "nosniff"
2019-05-10 13:29:10 -07:00
env.response.headers["Content-Security-Policy"] = "default-src blob: data: 'self' #{host_url} 'unsafe-inline' 'unsafe-eval'; media-src blob: 'self' #{host_url} https://*.googlevideo.com:443"
2019-04-07 12:01:08 -07:00
env.response.headers["Referrer-Policy"] = "same-origin"
2019-05-14 06:21:01 -07:00
if (Kemal.config.ssl || config.https_only) && config.hsts
2019-04-30 18:53:56 -07:00
env.response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains; preload"
2019-04-07 12:01:08 -07:00
end
2019-03-28 11:43:40 -07:00
begin
preferences = Preferences.from_json(env.request.cookies["PREFS"]?.try &.value || "{}")
rescue
preferences = Preferences.from_json("{}")
end
2018-07-16 09:24:24 -07:00
if env.request.cookies.has_key? "SID"
2018-03-31 17:09:27 -07:00
sid = env.request.cookies["SID"].value
2018-07-05 16:43:26 -07:00
2019-04-18 14:23:50 -07:00
if sid.starts_with? "v1:"
raise "Cannot use token as SID"
end
2018-07-18 12:26:02 -07:00
# Invidious users only have SID
if !env.request.cookies.has_key? "SSID"
if email = PG_DB.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
2019-02-11 18:52:47 -08:00
user = PG_DB.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
2019-04-18 14:23:50 -07:00
csrf_token = generate_response(sid, {":signout", ":watch_ajax", ":subscription_ajax", ":token_ajax", ":authorize_token"}, HMAC_KEY, PG_DB, 1.week)
2018-11-08 15:42:25 -08:00
preferences = user.preferences
2018-08-15 10:40:42 -07:00
env.set "sid", sid
2019-04-18 14:23:50 -07:00
env.set "csrf_token", csrf_token
env.set "user", user
2018-07-18 12:26:02 -07:00
end
else
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-07-18 12:26:02 -07:00
begin
2019-02-10 10:33:29 -08:00
user, sid = get_user(sid, headers, PG_DB, false)
2019-04-18 14:23:50 -07:00
csrf_token = generate_response(sid, {":signout", ":watch_ajax", ":subscription_ajax", ":token_ajax", ":authorize_token"}, HMAC_KEY, PG_DB, 1.week)
2018-11-15 18:23:17 -08:00
preferences = user.preferences
2018-08-15 10:40:42 -07:00
env.set "sid", sid
2019-04-18 14:23:50 -07:00
env.set "csrf_token", csrf_token
env.set "user", user
2018-07-18 12:26:02 -07:00
rescue ex
end
2018-07-16 10:50:41 -07:00
end
2018-04-13 19:32:14 -07:00
end
2018-08-17 08:19:20 -07:00
dark_mode = env.params.query["dark_mode"]? || preferences.dark_mode.to_s
dark_mode = dark_mode == "true"
thin_mode = env.params.query["thin_mode"]? || preferences.thin_mode.to_s
thin_mode = thin_mode == "true"
locale = env.params.query["hl"]? || preferences.locale
preferences.dark_mode = dark_mode
preferences.thin_mode = thin_mode
preferences.locale = locale
env.set "preferences", preferences
2018-12-20 13:32:09 -08:00
2018-08-17 08:19:20 -07:00
current_page = env.request.path
if env.request.query
query = HTTP::Params.parse(env.request.query.not_nil!)
if query["referer"]?
query["referer"] = get_referer(env, "/")
end
current_page += "?#{query}"
end
env.set "current_page", URI.escape(current_page)
2018-03-22 10:44:36 -07:00
end
2018-02-07 20:04:47 -08:00
get "/" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
2018-12-20 13:32:09 -08:00
if user
user = user.as(User)
if user.preferences.redirect_feed
2019-03-06 06:45:04 -08:00
next env.redirect "/feed/subscriptions"
end
end
2019-03-01 14:06:45 -08:00
case config.default_home
when "Popular"
templated "popular"
when "Top"
templated "top"
when "Trending"
env.redirect "/feed/trending"
when "Subscriptions"
if user
env.redirect "/feed/subscriptions"
else
templated "popular"
end
end
end
2019-03-12 18:51:23 -07:00
get "/privacy" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
templated "privacy"
end
get "/licenses" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
rendered "licenses"
end
2018-08-04 13:30:44 -07:00
# Videos
get "/watch" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
region = env.params.query["region"]?
2018-12-20 13:32:09 -08:00
2018-08-06 18:52:37 -07:00
if env.params.query.to_s.includes?("%20") || env.params.query.to_s.includes?("+")
url = "/watch?" + env.params.query.to_s.gsub("%20", "").delete("+")
next env.redirect url
end
2018-11-06 07:55:52 -08:00
if env.params.query["v"]?
2018-07-28 18:40:59 -07:00
id = env.params.query["v"]
2018-08-04 15:19:42 -07:00
2018-11-06 07:55:52 -08:00
if env.params.query["v"].empty?
error_message = "Invalid parameters."
next templated "error"
end
2018-08-04 15:19:42 -07:00
if id.size > 11
url = "/watch?v=#{id[0, 11]}"
env.params.query.delete_all("v")
if env.params.query.size > 0
url += "&#{env.params.query}"
end
next env.redirect url
2018-08-04 15:19:42 -07:00
end
2018-07-28 18:40:59 -07:00
else
next env.redirect "/"
end
2018-10-07 19:11:33 -07:00
plid = env.params.query["list"]?
nojs = env.params.query["nojs"]?
nojs ||= "0"
nojs = nojs == "1"
2018-10-07 19:11:33 -07:00
2019-03-27 09:31:05 -07:00
preferences = env.get("preferences").as(Preferences)
2018-07-28 18:40:59 -07:00
2019-04-30 21:39:04 -07:00
user = env.get?("user").try &.as(User)
if user
2018-08-04 21:07:38 -07:00
subscriptions = user.subscriptions
watched = user.watched
2018-07-05 16:43:26 -07:00
end
subscriptions ||= [] of String
params = process_video_params(env.params.query, preferences)
2018-10-30 07:41:23 -07:00
env.params.query.delete_all("listen")
begin
2019-04-30 21:39:04 -07:00
video = get_video(id, PG_DB, proxies, region: params.region)
rescue ex : VideoRedirect
next env.redirect "/watch?v=#{ex.message}"
rescue ex
error_message = ex.message
2019-01-23 12:15:19 -08:00
logger.write("#{id} : #{ex.message}\n")
next templated "error"
end
2019-05-01 05:38:42 -07:00
if preferences.annotations_subscribed &&
subscriptions.includes?(video.ucid) &&
(env.params.query["iv_load_policy"]? || "1") == "1"
2019-04-30 21:39:04 -07:00
params.annotations = true
end
2019-05-01 05:38:42 -07:00
env.params.query.delete_all("iv_load_policy")
2019-04-30 21:39:04 -07:00
if watched && !watched.includes? id
2019-02-10 10:33:29 -08:00
PG_DB.exec("UPDATE users SET watched = watched || $1 WHERE email = $2", [id], user.as(User).email)
end
if nojs
if preferences
source = preferences.comments[0]
if source.empty?
source = preferences.comments[1]
end
if source == "youtube"
begin
2019-03-27 09:31:05 -07:00
comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"]
rescue ex
if preferences.comments[1] == "reddit"
comments, reddit_thread = fetch_reddit_comments(id)
2018-12-20 13:32:09 -08:00
comment_html = template_reddit_comments(comments, locale)
comment_html = fill_links(comment_html, "https", "www.reddit.com")
comment_html = replace_links(comment_html)
end
end
elsif source == "reddit"
begin
comments, reddit_thread = fetch_reddit_comments(id)
2018-12-20 13:32:09 -08:00
comment_html = template_reddit_comments(comments, locale)
comment_html = fill_links(comment_html, "https", "www.reddit.com")
comment_html = replace_links(comment_html)
rescue ex
if preferences.comments[1] == "youtube"
2019-03-27 09:31:05 -07:00
comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
end
end
else
2019-03-27 09:31:05 -07:00
comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
comment_html ||= ""
end
2018-08-04 21:07:38 -07:00
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
2019-04-30 21:39:04 -07:00
if params.local
2019-03-11 10:55:05 -07:00
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
end
2018-08-07 09:39:56 -07:00
video_streams = video.video_streams(adaptive_fmts)
2018-08-04 21:07:38 -07:00
audio_streams = video.audio_streams(adaptive_fmts)
2018-01-21 09:07:32 -08:00
# Older videos may not have audio sources available.
# We redirect here so they're not unplayable
if params.listen && audio_streams.empty?
next env.redirect "/watch?#{env.params.query}&listen=0"
end
2018-08-04 21:07:38 -07:00
captions = video.captions
2018-08-06 11:23:36 -07:00
preferred_captions = captions.select { |caption|
2019-04-30 21:39:04 -07:00
params.preferred_captions.includes?(caption.name.simpleText) ||
params.preferred_captions.includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
2019-04-30 21:39:04 -07:00
(params.preferred_captions.index(caption.name.simpleText) ||
params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = "16:9"
2018-05-29 16:40:36 -07:00
2018-08-04 21:07:38 -07:00
video.description = fill_links(video.description, "https", "www.youtube.com")
2018-09-03 20:15:47 -07:00
video.description = replace_links(video.description)
2018-08-04 21:07:38 -07:00
description = video.short_description
2018-03-13 16:37:56 -07:00
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-08-04 21:07:38 -07:00
host_params = env.request.query_params
host_params.delete_all("v")
2018-07-22 09:09:43 -07:00
2019-01-12 10:00:44 -08:00
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
2018-08-04 21:07:38 -07:00
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
2018-07-27 16:25:58 -07:00
end
2018-09-14 19:24:28 -07:00
thumbnail = "/vi/#{video.id}/maxres.jpg"
2018-08-04 21:07:38 -07:00
2019-04-30 21:39:04 -07:00
if params.raw
2018-08-05 12:03:13 -07:00
url = fmt_stream[0]["url"]
fmt_stream.each do |fmt|
2019-04-30 21:39:04 -07:00
if fmt["label"].split(" - ")[0] == params.quality
2018-08-05 12:03:13 -07:00
url = fmt["url"]
end
end
next env.redirect url
end
rvs = [] of Hash(String, String)
2018-08-13 08:50:09 -07:00
video.info["rvs"]?.try &.split(",").each do |rv|
rvs << HTTP::Params.parse(rv).to_h
end
2018-01-21 09:07:32 -08:00
rating = video.info["avg_rating"].to_f64
2018-01-27 18:09:27 -08:00
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
playability_status = video.player_response["playabilityStatus"]?
if playability_status && playability_status["status"] == "LIVE_STREAM_OFFLINE"
reason = playability_status["reason"]?.try &.as_s
end
reason ||= ""
2017-11-22 23:48:55 -08:00
templated "watch"
end
2018-08-04 13:30:44 -07:00
get "/embed/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-06 18:52:37 -07:00
id = env.params.url["id"]
plid = env.params.query["list"]?
if md = env.params.query["playlist"]?
.try &.match(/[a-zA-Z0-9_-]{11}(,[a-zA-Z0-9_-]{11})*/)
video_series = md[0].split(",")
env.params.query.delete("playlist")
end
2018-08-04 15:19:42 -07:00
2019-03-27 09:31:05 -07:00
preferences = env.get("preferences").as(Preferences)
2018-08-06 18:52:37 -07:00
if id.includes?("%20") || id.includes?("+") || env.params.query.to_s.includes?("%20") || env.params.query.to_s.includes?("+")
id = env.params.url["id"].gsub("%20", "").delete("+")
2018-08-06 18:52:37 -07:00
url = "/embed/#{id}"
2018-08-06 18:52:37 -07:00
if env.params.query.size > 0
url += "?#{env.params.query.to_s.gsub("%20", "").delete("+")}"
2018-08-04 15:19:42 -07:00
end
2018-08-06 18:52:37 -07:00
next env.redirect url
end
# YouTube embed supports `videoseries` with either `list=PLID`
# or `playlist=VIDEO_ID,VIDEO_ID`
if id == "videoseries"
url = ""
if plid
begin
videos = fetch_playlist_videos(plid, 1, 1, locale: locale)
rescue ex
error_message = ex.message
next templated "error"
end
url = "/embed/#{videos[0].id}"
elsif video_series
url = "/embed/#{video_series.shift}"
env.params.query["playlist"] = video_series.join(",")
else
next env.redirect "/"
end
if env.params.query.size > 0
url += "?#{env.params.query}"
end
next env.redirect url
elsif id.size > 11
2018-08-06 18:52:37 -07:00
url = "/embed/#{id[0, 11]}"
if env.params.query.size > 0
url += "?#{env.params.query}"
end
next env.redirect url
2018-08-04 13:30:44 -07:00
end
params = process_video_params(env.params.query, preferences)
2018-07-22 09:09:43 -07:00
2019-04-30 21:39:04 -07:00
user = env.get?("user").try &.as(User)
if user
subscriptions = user.subscriptions
watched = user.watched
end
subscriptions ||= [] of String
2018-07-22 09:09:43 -07:00
begin
2019-04-30 21:39:04 -07:00
video = get_video(id, PG_DB, proxies, region: params.region)
rescue ex : VideoRedirect
next env.redirect "/embed/#{ex.message}"
2018-07-22 09:09:43 -07:00
rescue ex
2018-08-04 13:30:44 -07:00
error_message = ex.message
next templated "error"
2018-07-22 09:09:43 -07:00
end
2019-05-01 05:38:42 -07:00
if preferences.annotations_subscribed &&
subscriptions.includes?(video.ucid) &&
(env.params.query["iv_load_policy"]? || "1") == "1"
2019-04-30 21:39:04 -07:00
params.annotations = true
end
if watched && !watched.includes? id
PG_DB.exec("UPDATE users SET watched = watched || $1 WHERE email = $2", [id], user.as(User).email)
end
2018-08-04 21:07:38 -07:00
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
2019-04-30 21:39:04 -07:00
if params.local
2019-03-11 10:55:05 -07:00
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
end
2018-08-07 09:39:56 -07:00
video_streams = video.video_streams(adaptive_fmts)
2018-08-04 21:07:38 -07:00
audio_streams = video.audio_streams(adaptive_fmts)
2018-07-22 09:09:43 -07:00
2018-08-04 21:07:38 -07:00
captions = video.captions
2018-07-22 09:09:43 -07:00
preferred_captions = captions.select { |caption|
2019-04-30 21:39:04 -07:00
params.preferred_captions.includes?(caption.name.simpleText) ||
params.preferred_captions.includes?(caption.languageCode.split("-")[0])
}
preferred_captions.sort_by! { |caption|
2019-04-30 21:39:04 -07:00
(params.preferred_captions.index(caption.name.simpleText) ||
params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
aspect_ratio = nil
2018-08-04 21:07:38 -07:00
video.description = fill_links(video.description, "https", "www.youtube.com")
2018-09-03 20:15:47 -07:00
video.description = replace_links(video.description)
2018-08-04 21:07:38 -07:00
description = video.short_description
2018-07-22 09:09:43 -07:00
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-08-04 21:07:38 -07:00
host_params = env.request.query_params
host_params.delete_all("v")
2018-07-22 09:09:43 -07:00
2019-01-12 10:00:44 -08:00
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
2018-08-04 21:07:38 -07:00
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
2018-08-04 13:30:44 -07:00
end
2018-07-22 09:09:43 -07:00
2018-09-14 19:24:28 -07:00
thumbnail = "/vi/#{video.id}/maxres.jpg"
2018-07-22 09:09:43 -07:00
2019-04-30 21:39:04 -07:00
if params.raw
2018-08-04 13:30:44 -07:00
url = fmt_stream[0]["url"]
2018-07-22 09:09:43 -07:00
2018-08-04 13:30:44 -07:00
fmt_stream.each do |fmt|
2019-04-30 21:39:04 -07:00
if fmt["label"].split(" - ")[0] == params.quality
2018-08-04 13:30:44 -07:00
url = fmt["url"]
end
2018-07-22 09:09:43 -07:00
end
2018-08-04 13:30:44 -07:00
next env.redirect url
end
2018-07-22 09:09:43 -07:00
2018-08-04 13:30:44 -07:00
rendered "embed"
end
2018-08-15 08:22:36 -07:00
# Playlists
2018-09-28 21:12:35 -07:00
2018-08-15 08:22:36 -07:00
get "/playlist" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-15 08:22:36 -07:00
plid = env.params.query["list"]?
if !plid
next env.redirect "/"
end
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-10-06 20:18:50 -07:00
if plid.starts_with? "RD"
next env.redirect "/mix?list=#{plid}"
end
2018-09-17 18:07:32 -07:00
begin
2018-12-20 13:32:09 -08:00
playlist = fetch_playlist(plid, locale)
2018-09-17 18:07:32 -07:00
rescue ex
error_message = ex.message
next templated "error"
2018-08-15 08:22:36 -07:00
end
begin
videos = fetch_playlist_videos(plid, page, playlist.video_count, locale: locale)
rescue ex
videos = [] of PlaylistVideo
end
2018-08-15 08:22:36 -07:00
templated "playlist"
end
2018-09-28 21:12:35 -07:00
get "/mix" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-28 21:12:35 -07:00
rdid = env.params.query["list"]?
if !rdid
next env.redirect "/"
end
continuation = env.params.query["continuation"]?
continuation ||= rdid.lchop("RD")
begin
2018-12-20 13:32:09 -08:00
mix = fetch_mix(rdid, continuation, locale: locale)
2018-09-28 21:12:35 -07:00
rescue ex
error_message = ex.message
next templated "error"
end
templated "mix"
end
2018-08-04 13:30:44 -07:00
# Search
2018-11-21 18:00:17 -08:00
get "/opensearch.xml" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-11-21 18:00:17 -08:00
env.response.content_type = "application/opensearchdescription+xml"
2019-03-05 10:56:59 -08:00
host = make_host_url(config, Kemal.config)
2018-11-21 18:00:17 -08:00
XML.build(indent: " ", encoding: "UTF-8") do |xml|
xml.element("OpenSearchDescription", xmlns: "http://a9.com/-/spec/opensearch/1.1/") do
xml.element("ShortName") { xml.text "Invidious" }
xml.element("LongName") { xml.text "Invidious Search" }
xml.element("Description") { xml.text "Search for videos, channels, and playlists on Invidious" }
xml.element("InputEncoding") { xml.text "UTF-8" }
xml.element("Image", width: 48, height: 48, type: "image/x-icon") { xml.text "#{host}/favicon.ico" }
xml.element("Url", type: "text/html", method: "get", template: "#{host}/search?q={searchTerms}")
2018-11-21 18:00:17 -08:00
end
end
end
2018-08-04 13:30:44 -07:00
get "/results" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-05 16:35:52 -07:00
query = env.params.query["search_query"]?
query ||= env.params.query["q"]?
query ||= ""
2018-08-04 21:07:38 -07:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-08-05 16:35:52 -07:00
if query
env.redirect "/search?q=#{URI.escape(query)}&page=#{page}"
2018-08-04 13:30:44 -07:00
else
env.redirect "/"
end
end
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
get "/search" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-06 16:21:40 -08:00
region = env.params.query["region"]?
2018-12-20 13:32:09 -08:00
2018-08-05 16:35:52 -07:00
query = env.params.query["search_query"]?
query ||= env.params.query["q"]?
2018-08-04 21:07:38 -07:00
query ||= ""
2018-07-21 18:56:11 -07:00
2019-01-02 18:14:31 -08:00
if query.empty?
next env.redirect "/"
end
2018-08-04 13:30:44 -07:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-07-21 18:56:11 -07:00
user = env.get? "user"
if user
user = user.as(User)
2019-04-10 17:56:38 -07:00
view_name = "subscriptions_#{sha256(user.email)}"
end
2018-09-13 15:47:31 -07:00
channel = nil
content_type = "all"
2018-08-27 13:23:25 -07:00
date = ""
duration = ""
features = [] of String
2018-09-13 15:47:31 -07:00
sort = "relevance"
2018-09-16 19:28:00 -07:00
subscriptions = nil
2018-08-27 13:23:25 -07:00
operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) }
operators.each do |operator|
key, value = operator.downcase.split(":")
2018-08-27 13:23:25 -07:00
case key
2018-09-13 15:47:31 -07:00
when "channel", "user"
channel = operator.split(":")[-1]
when "content_type", "type"
content_type = value
2018-08-27 13:23:25 -07:00
when "date"
date = value
when "duration"
duration = value
2018-09-17 14:38:18 -07:00
when "feature", "features"
2018-08-27 13:23:25 -07:00
features = value.split(",")
2018-09-13 15:47:31 -07:00
when "sort"
sort = value
2018-09-16 19:28:00 -07:00
when "subscriptions"
subscriptions = value == "true"
else
operators.delete(operator)
2018-08-27 13:23:25 -07:00
end
end
2018-08-30 15:42:30 -07:00
search_query = (query.split(" ") - operators).join(" ")
2018-08-27 13:23:25 -07:00
2018-09-13 15:47:31 -07:00
if channel
count, videos = channel_search(search_query, page, channel)
2018-09-16 19:28:00 -07:00
elsif subscriptions
if view_name
videos = PG_DB.query_all("SELECT id,title,published,updated,ucid,author,length_seconds FROM (
2018-09-16 19:28:00 -07:00
SELECT *,
to_tsvector(#{view_name}.title) ||
to_tsvector(#{view_name}.author)
as document
FROM #{view_name}
) v_search WHERE v_search.document @@ plainto_tsquery($1) LIMIT 20 OFFSET $2;", search_query, (page - 1) * 20, as: ChannelVideo)
count = videos.size
else
videos = [] of ChannelVideo
count = 0
end
2018-09-13 15:47:31 -07:00
else
2018-09-17 14:38:18 -07:00
begin
search_params = produce_search_params(sort: sort, date: date, content_type: content_type,
2018-09-17 14:38:18 -07:00
duration: duration, features: features)
rescue ex
error_message = ex.message
next templated "error"
end
2019-02-06 16:21:40 -08:00
count, videos = search(search_query, page, search_params, proxies, region).as(Tuple)
2018-09-13 15:47:31 -07:00
end
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
templated "search"
end
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
# Users
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
get "/login" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
if user
next env.redirect "/feed/subscriptions"
end
2018-07-21 18:56:11 -07:00
2019-03-01 14:06:45 -08:00
if !config.login_enabled
error_message = "Login has been disabled by administrator."
next templated "error"
end
2018-08-08 18:26:02 -07:00
referer = get_referer(env, "/feed/subscriptions")
2018-07-21 18:56:11 -07:00
2019-03-19 14:13:23 -07:00
email = nil
password = nil
captcha = nil
2018-08-04 13:30:44 -07:00
account_type = env.params.query["type"]?
account_type ||= "invidious"
2018-11-22 11:26:08 -08:00
captcha_type = env.params.query["captcha"]?
captcha_type ||= "image"
2018-08-04 13:30:44 -07:00
tfa = env.params.query["tfa"]?
tfa ||= false
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
templated "login"
end
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
post "/login" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-17 08:19:20 -07:00
referer = get_referer(env, "/feed/subscriptions")
2018-07-21 18:56:11 -07:00
2019-03-01 14:06:45 -08:00
if !config.login_enabled
error_message = "Login has been disabled by administrator."
next templated "error"
end
2019-04-18 17:17:50 -07:00
email = env.params.body["email"]?.try &.downcase
2018-08-04 13:30:44 -07:00
password = env.params.body["password"]?
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
account_type = env.params.query["type"]?
2019-03-19 14:13:23 -07:00
account_type ||= "invidious"
2018-07-21 18:56:11 -07:00
2019-03-19 14:13:23 -07:00
case account_type
when "google"
2018-08-04 13:30:44 -07:00
tfa_code = env.params.body["tfa"]?.try &.lchop("G-")
2018-07-21 18:56:11 -07:00
# See https://github.com/ytdl-org/youtube-dl/blob/2019.04.07/youtube_dl/extractor/youtube.py#L82
2018-08-04 13:30:44 -07:00
begin
client = make_client(LOGIN_URL)
headers = HTTP::Headers.new
headers["Content-Type"] = "application/x-www-form-urlencoded;charset=utf-8"
headers["Google-Accounts-XSRF"] = "1"
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
login_page = client.get("/ServiceLogin")
headers = login_page.cookies.add_request_headers(headers)
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
login_page = XML.parse_html(login_page.body)
inputs = {} of String => String
login_page.xpath_nodes(%q(//input[@type="submit"])).each do |node|
name = node["id"]? || node["name"]?
name ||= ""
value = node["value"]?
value ||= ""
if name != "" && value != ""
inputs[name] = value
2018-07-21 18:56:11 -07:00
end
end
2018-08-04 13:30:44 -07:00
login_page.xpath_nodes(%q(//input[@type="hidden"])).each do |node|
name = node["id"]? || node["name"]?
name ||= ""
value = node["value"]?
value ||= ""
2018-07-28 07:49:58 -07:00
2018-08-04 13:30:44 -07:00
if name != "" && value != ""
inputs[name] = value
end
end
lookup_req = {
email, nil, [] of String, nil, "US", nil, nil, 2, false, true,
{nil, nil,
{2, 1, nil, 1, "https://accounts.google.com/ServiceLogin?passive=1209600&continue=https%3A%2F%2Faccounts.google.com%2FManageAccount&followup=https%3A%2F%2Faccounts.google.com%2FManageAccount", nil, [] of String, 4, [] of String},
1,
{nil, nil, [] of String},
nil, nil, nil, true,
}, email,
}.to_json
2018-08-04 13:30:44 -07:00
lookup_results = client.post("/_/signin/sl/lookup", headers, login_req(inputs, lookup_req))
headers = lookup_results.cookies.add_request_headers(headers)
2018-08-04 13:30:44 -07:00
lookup_results = lookup_results.body
lookup_results = lookup_results[5..-1]
lookup_results = JSON.parse(lookup_results)
2018-08-04 13:30:44 -07:00
user_hash = lookup_results[0][2]
2018-07-23 13:09:11 -07:00
challenge_req = {
user_hash, nil, 1, nil,
2018-11-20 08:07:50 -08:00
{1, nil, nil, nil,
{password, nil, true},
},
{nil, nil,
{2, 1, nil, 1, "https://accounts.google.com/ServiceLogin?passive=1209600&continue=https%3A%2F%2Faccounts.google.com%2FManageAccount&followup=https%3A%2F%2Faccounts.google.com%2FManageAccount", nil, [] of String, 4, [] of String},
1,
{nil, nil, [] of String},
nil, nil, nil, true},
}.to_json
2018-07-23 13:09:11 -07:00
2018-08-04 13:30:44 -07:00
challenge_results = client.post("/_/signin/sl/challenge", headers, login_req(inputs, challenge_req))
headers = challenge_results.cookies.add_request_headers(headers)
2018-07-23 13:09:11 -07:00
2018-08-04 13:30:44 -07:00
challenge_results = challenge_results.body
challenge_results = challenge_results[5..-1]
challenge_results = JSON.parse(challenge_results)
2018-07-23 13:09:11 -07:00
2018-08-04 13:30:44 -07:00
headers["Cookie"] = URI.unescape(headers["Cookie"])
2018-07-23 13:09:11 -07:00
2018-08-04 13:30:44 -07:00
if challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED"
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "Incorrect password")
2018-08-04 13:30:44 -07:00
next templated "error"
2018-07-23 13:09:11 -07:00
end
2018-07-26 19:42:12 -07:00
2018-08-04 13:30:44 -07:00
if challenge_results[0][-1][0].as_a?
# Prefer Authenticator app and SMS over unsupported protocols
2019-02-05 06:49:24 -08:00
if challenge_results[0][-1][0][0][8] != 6 && challenge_results[0][-1][0][0][8] != 9
2018-08-04 13:30:44 -07:00
tfa = challenge_results[0][-1][0].as_a.select { |auth_type| auth_type[8] == 6 || auth_type[8] == 9 }[0]
select_challenge = {2, nil, nil, nil, {tfa[8]}}.to_json
2018-07-26 19:42:12 -07:00
2018-08-04 13:30:44 -07:00
tl = challenge_results[1][2]
2018-07-26 19:42:12 -07:00
2018-08-04 13:30:44 -07:00
tfa = client.post("/_/signin/selectchallenge?TL=#{tl}", headers, login_req(inputs, select_challenge)).body
tfa = tfa[5..-1]
tfa = JSON.parse(tfa)[0][-1]
else
2018-08-04 13:30:44 -07:00
tfa = challenge_results[0][-1][0][0]
2018-07-27 07:49:34 -07:00
end
2018-08-04 13:30:44 -07:00
if tfa[2] == "TWO_STEP_VERIFICATION"
if tfa[5] == "QUOTA_EXCEEDED"
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "Quota exceeded, try again in a few hours")
2018-08-04 13:30:44 -07:00
next templated "error"
end
2018-08-04 13:30:44 -07:00
if !tfa_code
2019-03-19 14:13:23 -07:00
account_type = "google"
captcha_type = "image"
tfa = true
captcha = nil
next templated "login"
end
2018-08-04 13:30:44 -07:00
tl = challenge_results[1][2]
2018-08-04 13:30:44 -07:00
request_type = tfa[8]
case request_type
when 6
# Authenticator app
tfa_req = %(["#{user_hash}",null,2,null,[6,null,null,null,null,["#{tfa_code}",false]]])
when 9
# Voice or text message
tfa_req = %(["#{user_hash}",null,2,null,[9,null,null,null,null,null,null,null,[null,"#{tfa_code}",false,2]]])
else
2019-04-19 09:14:11 -07:00
error_message = translate(locale, "Unable to log in, make sure two-factor authentication (Authenticator or SMS) is turned on.")
2018-08-04 13:30:44 -07:00
next templated "error"
end
2018-08-04 13:30:44 -07:00
challenge_results = client.post("/_/signin/challenge?hl=en&TL=#{tl}", headers, login_req(inputs, tfa_req))
headers = challenge_results.cookies.add_request_headers(headers)
2018-08-04 13:30:44 -07:00
challenge_results = challenge_results.body
challenge_results = challenge_results[5..-1]
challenge_results = JSON.parse(challenge_results)
2018-08-04 13:30:44 -07:00
if challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED"
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "Invalid TFA code")
2018-08-04 13:30:44 -07:00
next templated "error"
end
end
end
2018-08-04 13:30:44 -07:00
login_res = challenge_results[0][13][2].to_s
2018-08-04 13:30:44 -07:00
login = client.get(login_res, headers)
headers = login.cookies.add_request_headers(headers)
2018-08-04 13:30:44 -07:00
login = client.get(login.headers["Location"], headers)
headers = login.cookies.add_request_headers(headers)
cookies = HTTP::Cookies.from_headers(headers)
2018-07-29 19:01:28 -07:00
sid = cookies["SID"].value
2019-02-10 10:33:29 -08:00
user, sid = get_user(sid, headers, PG_DB)
2018-08-04 13:30:44 -07:00
# We are now logged in
2018-07-31 21:56:17 -07:00
2018-08-04 13:30:44 -07:00
host = URI.parse(env.request.headers["Host"]).host
2019-03-01 14:06:45 -08:00
if Kemal.config.ssl || config.https_only
secure = true
else
secure = false
end
cookies.each do |cookie|
2019-03-01 14:06:45 -08:00
if Kemal.config.ssl || config.https_only
cookie.secure = secure
else
cookie.secure = secure
end
if cookie.extension
cookie.extension = cookie.extension.not_nil!.gsub(".youtube.com", host)
cookie.extension = cookie.extension.not_nil!.gsub("Secure; ", "")
end
2019-03-19 14:13:23 -07:00
env.response.cookies << cookie
2018-08-04 13:30:44 -07:00
end
2018-07-31 21:56:17 -07:00
if env.request.cookies["PREFS"]?
preferences = env.get("preferences").as(Preferences)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
2019-03-17 10:40:24 -07:00
cookie = env.request.cookies["PREFS"]
cookie.expires = Time.new(1990, 1, 1)
env.response.cookies << cookie
end
2018-08-04 13:30:44 -07:00
env.redirect referer
rescue ex
2019-04-19 09:14:11 -07:00
error_message = translate(locale, "Login failed. This may be because two-factor authentication is not turned on for your account.")
2018-08-04 13:30:44 -07:00
next templated "error"
2018-07-31 21:56:17 -07:00
end
2019-03-19 14:13:23 -07:00
when "invidious"
2018-08-04 13:30:44 -07:00
if !email
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "User ID is a required field")
2018-08-04 13:30:44 -07:00
next templated "error"
end
2018-07-31 22:01:01 -07:00
2018-08-04 13:30:44 -07:00
if !password
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "Password is a required field")
2018-08-04 13:30:44 -07:00
next templated "error"
end
2018-07-31 22:01:01 -07:00
2019-04-18 17:17:50 -07:00
user = PG_DB.query_one?("SELECT * FROM users WHERE email = $1", email, as: User)
2018-07-31 22:01:01 -07:00
2019-03-19 14:13:23 -07:00
if user
2018-08-04 13:30:44 -07:00
if !user.password
2019-04-19 09:14:11 -07:00
error_message = translate(locale, "Please sign in using 'Log in with Google'")
2018-08-04 13:30:44 -07:00
next templated "error"
end
2018-08-04 13:30:44 -07:00
if Crypto::Bcrypt::Password.new(user.password.not_nil!) == password
2018-08-15 10:40:42 -07:00
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
2019-02-10 10:33:29 -08:00
PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.now)
2018-07-31 22:01:01 -07:00
2019-03-01 14:06:45 -08:00
if Kemal.config.ssl || config.https_only
2018-08-04 13:30:44 -07:00
secure = true
2018-07-31 22:01:01 -07:00
else
2018-08-04 13:30:44 -07:00
secure = false
2018-07-31 22:01:01 -07:00
end
2019-03-01 14:06:45 -08:00
if config.domain
2019-03-03 09:55:14 -08:00
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.now + 2.years,
2018-11-15 14:41:43 -08:00
secure: secure, http_only: true)
else
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.now + 2.years,
secure: secure, http_only: true)
end
2018-08-04 13:30:44 -07:00
else
2019-04-19 09:14:11 -07:00
error_message = translate(locale, "Wrong username or password")
2018-08-04 13:30:44 -07:00
next templated "error"
end
# Since this user has already registered, we don't want to overwrite their preferences
if env.request.cookies["PREFS"]?
2019-03-17 10:40:24 -07:00
cookie = env.request.cookies["PREFS"]
cookie.expires = Time.new(1990, 1, 1)
env.response.cookies << cookie
end
2019-03-19 14:13:23 -07:00
else
2019-03-01 14:06:45 -08:00
if !config.registration_enabled
error_message = "Registration has been disabled by administrator."
next templated "error"
end
2019-03-19 14:13:23 -07:00
if config.captcha_enabled
captcha_type = env.params.body["captcha_type"]?
answer = env.params.body["answer"]?
change_type = env.params.body["change_type"]?
if !captcha_type || change_type
if change_type
captcha_type = change_type
end
captcha_type ||= "image"
account_type = "invidious"
tfa = false
if captcha_type == "image"
captcha = generate_captcha(HMAC_KEY, PG_DB)
else
captcha = generate_text_captcha(HMAC_KEY, PG_DB)
end
next templated "login"
end
tokens = env.params.body.select { |k, v| k.match(/^token\[\d+\]$/) }.map { |k, v| v }
2019-03-19 14:13:23 -07:00
answer ||= ""
captcha_type ||= "image"
case captcha_type
when "image"
answer = answer.lstrip('0')
answer = OpenSSL::HMAC.hexdigest(:sha256, HMAC_KEY, answer)
begin
2019-04-18 14:23:50 -07:00
validate_request(tokens[0], answer, env.request, HMAC_KEY, PG_DB, locale)
2019-03-19 14:13:23 -07:00
rescue ex
error_message = ex.message
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
2019-03-19 14:13:23 -07:00
next templated "error"
end
when "text"
answer = Digest::MD5.hexdigest(answer.downcase.strip)
found_valid_captcha = false
2019-04-19 09:14:11 -07:00
error_message = translate(locale, "Erroneous CAPTCHA")
tokens.each_with_index do |token, i|
2019-03-19 14:13:23 -07:00
begin
2019-04-18 14:23:50 -07:00
validate_request(token, answer, env.request, HMAC_KEY, PG_DB, locale)
2019-03-19 14:13:23 -07:00
found_valid_captcha = true
rescue ex
error_message = ex.message
end
end
if !found_valid_captcha
next templated "error"
end
end
end
if password.empty?
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "Password cannot be empty")
next templated "error"
end
# See https://security.stackexchange.com/a/39851
if password.size > 55
2018-12-20 13:32:09 -08:00
error_message = translate(locale, "Password cannot be longer than 55 characters")
next templated "error"
end
2018-08-15 10:40:42 -07:00
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
2019-02-10 10:33:29 -08:00
user, sid = create_user(sid, email, password)
2018-08-04 13:30:44 -07:00
user_array = user.to_a
2018-07-31 22:01:01 -07:00
2019-02-11 18:47:26 -08:00
user_array[4] = user_array[4].to_json
2018-08-04 13:30:44 -07:00
args = arg_array(user_array)
2018-08-04 13:30:44 -07:00
PG_DB.exec("INSERT INTO users VALUES (#{args})", user_array)
2019-02-10 10:33:29 -08:00
PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.now)
2019-04-10 17:56:38 -07:00
view_name = "subscriptions_#{sha256(user.email)}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
2019-04-19 07:38:27 -07:00
SELECT * FROM channel_videos WHERE \
ucid = ANY ((SELECT subscriptions FROM users WHERE email = E'#{user.email.gsub("'", "\\'")}')::text[]) \
ORDER BY published DESC;")
2019-03-01 14:06:45 -08:00
if Kemal.config.ssl || config.https_only
2018-08-04 13:30:44 -07:00
secure = true
else
secure = false
end
2018-08-04 13:30:44 -07:00
2019-03-01 14:06:45 -08:00
if config.domain
2019-03-03 09:55:14 -08:00
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.now + 2.years,
2018-11-15 14:41:43 -08:00
secure: secure, http_only: true)
else
env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.now + 2.years,
secure: secure, http_only: true)
end
if env.request.cookies["PREFS"]?
preferences = env.get("preferences").as(Preferences)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
2019-03-17 10:40:24 -07:00
cookie = env.request.cookies["PREFS"]
cookie.expires = Time.new(1990, 1, 1)
env.response.cookies << cookie
end
end
2019-03-19 14:13:23 -07:00
env.redirect referer
else
2018-08-04 13:30:44 -07:00
env.redirect referer
end
end
post "/signout" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-11-08 15:42:25 -08:00
user = env.get? "user"
sid = env.get? "sid"
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
2018-07-14 06:36:31 -07:00
2018-11-08 15:42:25 -08:00
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
token = env.params.body["csrf_token"]?
2018-11-08 15:42:25 -08:00
begin
2019-04-18 14:23:50 -07:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
2018-11-08 15:42:25 -08:00
rescue ex
error_message = ex.message
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
2018-11-08 15:42:25 -08:00
next templated "error"
2018-11-09 06:48:02 -08:00
end
2018-07-14 06:36:31 -07:00
2019-02-10 10:33:29 -08:00
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", sid)
2018-11-08 15:42:25 -08:00
env.request.cookies.each do |cookie|
cookie.expires = Time.new(1990, 1, 1)
2019-03-17 10:40:24 -07:00
env.response.cookies << cookie
2018-11-09 06:48:02 -08:00
end
2018-11-08 15:42:25 -08:00
end
env.redirect referer
2018-08-04 13:30:44 -07:00
end
2018-07-14 06:36:31 -07:00
2018-08-04 13:30:44 -07:00
get "/preferences" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
preferences = env.get("preferences").as(Preferences)
templated "preferences"
2018-08-04 13:30:44 -07:00
end
2018-08-04 13:30:44 -07:00
post "/preferences" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
video_loop = env.params.body["video_loop"]?.try &.as(String)
video_loop ||= "off"
video_loop = video_loop == "on"
2019-04-30 21:39:04 -07:00
annotations = env.params.body["annotations"]?.try &.as(String)
annotations ||= "off"
annotations = annotations == "on"
annotations_subscribed = env.params.body["annotations_subscribed"]?.try &.as(String)
annotations_subscribed ||= "off"
annotations_subscribed = annotations_subscribed == "on"
autoplay = env.params.body["autoplay"]?.try &.as(String)
autoplay ||= "off"
autoplay = autoplay == "on"
continue = env.params.body["continue"]?.try &.as(String)
continue ||= "off"
continue = continue == "on"
2019-04-19 07:38:27 -07:00
continue_autoplay = env.params.body["continue_autoplay"]?.try &.as(String)
continue_autoplay ||= "off"
continue_autoplay = continue_autoplay == "on"
listen = env.params.body["listen"]?.try &.as(String)
listen ||= "off"
listen = listen == "on"
2019-03-12 19:05:49 -07:00
local = env.params.body["local"]?.try &.as(String)
local ||= "off"
local = local == "on"
speed = env.params.body["speed"]?.try &.as(String).to_f?
speed ||= CONFIG.default_user_preferences.speed
quality = env.params.body["quality"]?.try &.as(String)
quality ||= CONFIG.default_user_preferences.quality
volume = env.params.body["volume"]?.try &.as(String).to_i?
volume ||= CONFIG.default_user_preferences.volume
2019-03-01 14:06:45 -08:00
comments = [] of String
2.times do |i|
comments << (env.params.body["comments[#{i}]"]?.try &.as(String) || CONFIG.default_user_preferences.comments[i])
2019-03-01 14:06:45 -08:00
end
2019-03-01 14:06:45 -08:00
captions = [] of String
3.times do |i|
captions << (env.params.body["captions[#{i}]"]?.try &.as(String) || CONFIG.default_user_preferences.captions[i])
2019-03-01 14:06:45 -08:00
end
related_videos = env.params.body["related_videos"]?.try &.as(String)
related_videos ||= "off"
related_videos = related_videos == "on"
redirect_feed = env.params.body["redirect_feed"]?.try &.as(String)
redirect_feed ||= "off"
redirect_feed = redirect_feed == "on"
locale = env.params.body["locale"]?.try &.as(String)
locale ||= CONFIG.default_user_preferences.locale
dark_mode = env.params.body["dark_mode"]?.try &.as(String)
dark_mode ||= "off"
dark_mode = dark_mode == "on"
thin_mode = env.params.body["thin_mode"]?.try &.as(String)
thin_mode ||= "off"
thin_mode = thin_mode == "on"
max_results = env.params.body["max_results"]?.try &.as(String).to_i?
max_results ||= CONFIG.default_user_preferences.max_results
sort = env.params.body["sort"]?.try &.as(String)
sort ||= CONFIG.default_user_preferences.sort
latest_only = env.params.body["latest_only"]?.try &.as(String)
latest_only ||= "off"
latest_only = latest_only == "on"
unseen_only = env.params.body["unseen_only"]?.try &.as(String)
unseen_only ||= "off"
unseen_only = unseen_only == "on"
notifications_only = env.params.body["notifications_only"]?.try &.as(String)
notifications_only ||= "off"
notifications_only = notifications_only == "on"
preferences = {
2019-04-30 21:39:04 -07:00
"video_loop" => video_loop,
"annotations" => annotations,
"annotations_subscribed" => annotations_subscribed,
"autoplay" => autoplay,
"continue" => continue,
"continue_autoplay" => continue_autoplay,
"listen" => listen,
"local" => local,
"speed" => speed,
"quality" => quality,
"volume" => volume,
"comments" => comments,
"captions" => captions,
"related_videos" => related_videos,
"redirect_feed" => redirect_feed,
"locale" => locale,
"dark_mode" => dark_mode,
"thin_mode" => thin_mode,
"max_results" => max_results,
"sort" => sort,
"latest_only" => latest_only,
"unseen_only" => unseen_only,
"notifications_only" => notifications_only,
}.to_json
if user = env.get? "user"
2018-08-04 13:30:44 -07:00
user = user.as(User)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences, user.email)
2019-03-01 14:06:45 -08:00
if config.admins.includes? user.email
config.default_home = env.params.body["default_home"]?.try &.as(String) || config.default_home
feed_menu = [] of String
4.times do |index|
option = env.params.body["feed_menu[#{index}]"]?.try &.as(String) || ""
if !option.empty?
feed_menu << option
end
end
config.feed_menu = feed_menu
top_enabled = env.params.body["top_enabled"]?.try &.as(String)
top_enabled ||= "off"
config.top_enabled = top_enabled == "on"
captcha_enabled = env.params.body["captcha_enabled"]?.try &.as(String)
captcha_enabled ||= "off"
config.captcha_enabled = captcha_enabled == "on"
login_enabled = env.params.body["login_enabled"]?.try &.as(String)
login_enabled ||= "off"
config.login_enabled = login_enabled == "on"
registration_enabled = env.params.body["registration_enabled"]?.try &.as(String)
registration_enabled ||= "off"
config.registration_enabled = registration_enabled == "on"
2019-03-01 17:25:16 -08:00
statistics_enabled = env.params.body["statistics_enabled"]?.try &.as(String)
statistics_enabled ||= "off"
config.statistics_enabled = statistics_enabled == "on"
2019-03-01 14:06:45 -08:00
File.write("config/config.yml", config.to_yaml)
end
else
2019-03-27 09:12:39 -07:00
if Kemal.config.ssl || config.https_only
secure = true
else
secure = false
end
if config.domain
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.now + 2.years,
secure: secure, http_only: true)
else
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.now + 2.years,
secure: secure, http_only: true)
end
2018-07-31 11:40:26 -07:00
end
2018-08-04 13:30:44 -07:00
env.redirect referer
end
2018-07-31 11:40:26 -07:00
get "/toggle_theme" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if user = env.get? "user"
user = user.as(User)
preferences = user.preferences
preferences.dark_mode = !preferences.dark_mode
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
else
preferences = env.get("preferences").as(Preferences)
preferences.dark_mode = !preferences.dark_mode
2019-03-27 09:12:39 -07:00
preferences = preferences.to_json
2019-03-27 09:12:39 -07:00
if Kemal.config.ssl || config.https_only
secure = true
else
secure = false
end
if config.domain
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.now + 2.years,
secure: secure, http_only: true)
else
env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.now + 2.years,
secure: secure, http_only: true)
end
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
post "/watch_ajax" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/feed/subscriptions")
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if !user
2019-04-18 14:23:50 -07:00
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
2018-12-20 13:32:09 -08:00
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
token = env.params.body["csrf_token"]?
id = env.params.query["id"]?
if !id
2019-03-23 08:24:30 -07:00
env.response.status_code = 400
next
end
begin
2019-04-18 14:23:50 -07:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
if redirect
error_message = ex.message
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
next templated "error"
else
error_message = {"error" => ex.message}.to_json
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
next error_message
end
end
if env.params.query["action_mark_watched"]?
action = "action_mark_watched"
elsif env.params.query["action_mark_unwatched"]?
action = "action_mark_unwatched"
else
next env.redirect referer
end
case action
when "action_mark_watched"
if !user.watched.includes? id
PG_DB.exec("UPDATE users SET watched = watched || $1 WHERE email = $2", [id], user.email)
end
when "action_mark_unwatched"
2018-11-21 15:12:13 -08:00
PG_DB.exec("UPDATE users SET watched = array_remove(watched, $1) WHERE email = $2", id, user.email)
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
2018-08-04 21:07:38 -07:00
# /modify_notifications
# will "ding" all subscriptions.
2018-08-04 13:30:44 -07:00
# /modify_notifications?receive_all_updates=false&receive_no_updates=false
# will "unding" all subscriptions.
get "/modify_notifications" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/")
2018-07-14 06:36:31 -07:00
redirect = env.params.query["redirect"]?
redirect ||= "false"
redirect = redirect == "true"
2018-07-29 19:05:40 -07:00
2019-04-18 14:23:50 -07:00
if !user
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
user = user.as(User)
if !user.password
2018-08-04 13:30:44 -07:00
channel_req = {} of String => String
2018-02-26 16:59:02 -08:00
2018-08-04 13:30:44 -07:00
channel_req["receive_all_updates"] = env.params.query["receive_all_updates"]? || "true"
channel_req["receive_no_updates"] = env.params.query["receive_no_updates"]? || ""
channel_req["receive_post_updates"] = env.params.query["receive_post_updates"]? || "true"
2018-01-07 09:42:24 -08:00
2018-08-04 13:30:44 -07:00
channel_req.reject! { |k, v| v != "true" && v != "false" }
2018-08-04 13:30:44 -07:00
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2017-12-30 13:21:43 -08:00
2018-08-04 13:30:44 -07:00
client = make_client(YT_URL)
html = client.get("/subscription_manager?disable_polymer=1", headers)
cookies = HTTP::Cookies.from_headers(headers)
html.cookies.each do |cookie|
if {"VISITOR_INFO1_LIVE", "YSC", "SIDCC"}.includes? cookie.name
if cookies[cookie.name]?
cookies[cookie.name] = cookie
else
cookies << cookie
end
end
end
headers = cookies.add_request_headers(headers)
match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
2018-08-04 13:30:44 -07:00
if match
session_token = match["session_token"]
else
next env.redirect referer
end
2018-07-18 12:26:02 -07:00
headers["content-type"] = "application/x-www-form-urlencoded"
2018-08-04 13:30:44 -07:00
channel_req["session_token"] = session_token
2018-04-07 19:36:09 -07:00
subs = XML.parse_html(html.body)
2018-08-04 13:30:44 -07:00
subs.xpath_nodes(%q(//a[@class="subscription-title yt-uix-sessionlink"]/@href)).each do |channel|
channel_id = channel.content.lstrip("/channel/").not_nil!
channel_req["channel_id"] = channel_id
client.post("/subscription_ajax?action_update_subscription_preferences=1", headers, form: channel_req)
2018-08-04 13:30:44 -07:00
end
2018-07-18 12:26:02 -07:00
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
post "/subscription_ajax" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/")
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if !user
2019-04-18 14:23:50 -07:00
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
token = env.params.body["csrf_token"]?
begin
2019-04-18 14:23:50 -07:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
if redirect
error_message = ex.message
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
next templated "error"
else
error_message = {"error" => ex.message}.to_json
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
next error_message
end
end
if env.params.query["action_create_subscription_to_channel"]?
action = "action_create_subscription_to_channel"
elsif env.params.query["action_remove_subscriptions"]?
action = "action_remove_subscriptions"
else
next env.redirect referer
end
channel_id = env.params.query["c"]?
channel_id ||= ""
if !user.password
# Sync subscriptions with YouTube
2019-05-15 10:26:29 -07:00
subscribe_ajax(channel_id, action, env.request.headers)
end
2019-05-15 10:26:29 -07:00
email = user.email
case action
when .starts_with? "action_create"
if !user.subscriptions.includes? channel_id
get_channel(channel_id, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", channel_id, email)
end
when .starts_with? "action_remove"
PG_DB.exec("UPDATE users SET subscriptions = array_remove(subscriptions,$1) WHERE email = $2", channel_id, email)
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
2018-08-04 13:30:44 -07:00
end
2018-04-29 07:40:33 -07:00
2018-08-04 13:30:44 -07:00
get "/subscription_manager" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
2019-02-10 10:33:29 -08:00
sid = env.get? "sid"
2019-04-18 14:23:50 -07:00
referer = get_referer(env)
2018-08-08 18:26:02 -07:00
2019-04-18 14:23:50 -07:00
if !user
2018-08-08 18:26:02 -07:00
next env.redirect referer
2018-04-28 07:27:05 -07:00
end
2018-08-04 13:30:44 -07:00
user = user.as(User)
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if !user.password
# Refresh account
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
2018-04-07 19:36:09 -07:00
2019-02-10 10:33:29 -08:00
user, sid = get_user(sid, headers, PG_DB)
2018-08-04 13:30:44 -07:00
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
action_takeout = env.params.query["action_takeout"]?.try &.to_i?
action_takeout ||= 0
action_takeout = action_takeout == 1
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
format = env.params.query["format"]?
format ||= "rss"
2018-07-18 12:26:02 -07:00
2019-04-22 08:40:29 -07:00
if user.subscriptions.empty?
values = "'{}'"
else
values = "VALUES #{user.subscriptions.map { |id| %(('#{id}')) }.join(",")}"
end
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
2018-08-04 13:30:44 -07:00
subscriptions.sort_by! { |channel| channel.author.downcase }
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if action_takeout
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if format == "json"
env.response.content_type = "application/json"
env.response.headers["content-disposition"] = "attachment"
next {
"subscriptions" => user.subscriptions,
"watch_history" => user.watched,
"preferences" => user.preferences,
}.to_json
else
env.response.content_type = "application/xml"
env.response.headers["content-disposition"] = "attachment"
export = XML.build do |xml|
xml.element("opml", version: "1.1") do
xml.element("body") do
if format == "newpipe"
title = "YouTube Subscriptions"
else
title = "Invidious Subscriptions"
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
xml.element("outline", text: title, title: title) do
subscriptions.each do |channel|
if format == "newpipe"
xmlUrl = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel.id}"
else
2018-08-04 21:07:38 -07:00
xmlUrl = "#{host_url}/feed/channel/#{channel.id}"
2018-08-04 13:30:44 -07:00
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
xml.element("outline", text: channel.author, title: channel.author,
"type": "rss", xmlUrl: xmlUrl)
end
end
end
2018-07-18 12:26:02 -07:00
end
2018-03-16 09:40:29 -07:00
end
2018-08-04 13:30:44 -07:00
next export.gsub(%(<?xml version="1.0"?>\n), "")
end
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
templated "subscription_manager"
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
get "/data_control" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if user
user = user.as(User)
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
templated "data_control"
else
env.redirect referer
end
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
post "/data_control" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if user
user = user.as(User)
2018-04-29 07:40:33 -07:00
spawn do
# Since import can take a while, if we're not done after 20 seconds
# push out content to prevent timeout.
# Interesting to note is that Chrome will try to render before the content has finished loading,
# which is why we include a loading icon. Firefox and its derivatives will not see this page,
# instead redirecting immediately once the connection has closed.
# https://stackoverflow.com/q/2091239 is helpful but not directly applicable here.
sleep 20.seconds
env.response.puts %(<meta http-equiv="refresh" content="0; url=#{referer}">)
env.response.puts %(<link rel="stylesheet" href="/css/ionicons.min.css?v=#{ASSET_COMMIT}">)
env.response.puts %(<link rel="stylesheet" href="/css/default.css?v=#{ASSET_COMMIT}">)
if env.get("preferences").as(Preferences).dark_mode
env.response.puts %(<link rel="stylesheet" href="/css/darktheme.css?v=#{ASSET_COMMIT}">)
else
env.response.puts %(<link rel="stylesheet" href="/css/lighttheme.css?v=#{ASSET_COMMIT}">)
end
env.response.puts %(<h3><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3>)
env.response.flush
loop do
env.response.puts %(<!-- keepalive #{Time.now.to_unix} -->)
env.response.flush
sleep (20 + rand(11)).seconds
end
end
2018-08-04 13:30:44 -07:00
HTTP::FormData.parse(env.request) do |part|
body = part.body.gets_to_end
if body.empty?
next
2018-07-18 12:26:02 -07:00
end
2018-08-04 13:30:44 -07:00
case part.name
when "import_invidious"
body = JSON.parse(body)
2018-11-09 15:25:24 -08:00
if body["subscriptions"]?
user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s }
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-09 15:25:24 -08:00
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-08-04 13:30:44 -07:00
end
2018-11-08 14:43:28 -08:00
if body["watch_history"]?
2018-11-09 15:25:24 -08:00
user.watched += body["watch_history"].as_a.map { |a| a.as_s }
user.watched.uniq!
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
end
2018-04-29 07:40:33 -07:00
2018-11-08 14:35:26 -08:00
if body["preferences"]?
2018-11-09 15:25:24 -08:00
user.preferences = Preferences.from_json(body["preferences"].to_json)
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", user.preferences.to_json, user.email)
2018-11-08 14:35:26 -08:00
end
2018-08-04 13:30:44 -07:00
when "import_youtube"
subscriptions = XML.parse(body)
2018-11-09 15:25:24 -08:00
user.subscriptions += subscriptions.xpath_nodes(%q(//outline[@type="rss"])).map do |channel|
channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-09 15:25:24 -08:00
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
when "import_freetube"
user.subscriptions += body.scan(/"channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})"/).map do |md|
md["channel_id"]
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-09 15:25:24 -08:00
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-08-04 13:30:44 -07:00
when "import_newpipe_subscriptions"
body = JSON.parse(body)
user.subscriptions += body["subscriptions"].as_a.compact_map do |channel|
if match = channel["url"].as_s.match(/\/channel\/(?<channel>UC[a-zA-Z0-9_-]{22})/)
next match["channel"]
elsif match = channel["url"].as_s.match(/\/user\/(?<user>.+)/)
client = make_client(YT_URL)
response = client.get("/user/#{match["user"]}?disable_polymer=1&hl=en&gl=US")
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
if canonical
ucid = canonical["href"].split("/")[-1]
next ucid
end
end
nil
2018-11-09 15:25:24 -08:00
end
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-09 15:25:24 -08:00
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-08-04 13:30:44 -07:00
when "import_newpipe"
2018-11-09 15:25:24 -08:00
Zip::Reader.open(IO::Memory.new(body)) do |file|
2018-08-04 13:30:44 -07:00
file.each_entry do |entry|
if entry.filename == "newpipe.db"
2018-11-21 15:12:13 -08:00
tempfile = File.tempfile(".db")
File.write(tempfile.path, entry.io.gets_to_end)
db = DB.open("sqlite3://" + tempfile.path)
2018-04-29 07:40:33 -07:00
2018-11-21 15:12:13 -08:00
user.watched += db.query_all("SELECT url FROM streams", as: String).map { |url| url.lchop("https://www.youtube.com/watch?v=") }
2018-11-09 15:25:24 -08:00
user.watched.uniq!
2018-07-18 12:26:02 -07:00
2018-11-09 15:25:24 -08:00
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
2018-11-21 15:12:13 -08:00
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
2018-11-09 15:25:24 -08:00
user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
2018-11-09 15:25:24 -08:00
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
2018-11-21 15:12:13 -08:00
db.close
tempfile.delete
2018-08-04 13:30:44 -07:00
end
2018-07-18 12:26:02 -07:00
end
end
2018-07-18 12:26:02 -07:00
end
2018-08-04 13:30:44 -07:00
end
end
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
env.redirect referer
end
2018-07-18 12:26:02 -07:00
2019-04-22 08:18:17 -07:00
get "/change_password" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
csrf_token = generate_response(sid, {":change_password"}, HMAC_KEY, PG_DB)
templated "change_password"
else
env.redirect referer
end
end
post "/change_password" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
token = env.params.body["csrf_token"]?
# We don't store passwords for Google accounts
if !user.password
error_message = "Cannot change password for Google accounts"
next templated "error"
end
begin
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
env.response.status_code = 400
next templated "error"
end
password = env.params.body["password"]?
if !password
error_message = translate(locale, "Password is a required field")
next templated "error"
end
new_passwords = env.params.body.select { |k, v| k.match(/^new_password\[\d+\]$/) }.map { |k, v| v }
if new_passwords.size <= 1 || new_passwords.uniq.size != 1
error_message = translate(locale, "New passwords must match")
next templated "error"
end
new_password = new_passwords.uniq[0]
if new_password.empty?
error_message = translate(locale, "Password cannot be empty")
next templated "error"
end
if new_password.size > 55
error_message = translate(locale, "Password cannot be longer than 55 characters")
next templated "error"
end
if Crypto::Bcrypt::Password.new(user.password.not_nil!) != password
error_message = translate(locale, "Incorrect password")
next templated "error"
end
new_password = Crypto::Bcrypt::Password.create(new_password, cost: 10)
PG_DB.exec("UPDATE users SET password = $1 WHERE email = $2", new_password.to_s, user.email)
end
env.redirect referer
end
get "/delete_account" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
csrf_token = generate_response(sid, {":delete_account"}, HMAC_KEY, PG_DB)
templated "delete_account"
else
env.redirect referer
end
end
post "/delete_account" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
token = env.params.body["csrf_token"]?
begin
2019-04-18 14:23:50 -07:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
next templated "error"
end
2019-04-10 17:56:38 -07:00
view_name = "subscriptions_#{sha256(user.email)}"
PG_DB.exec("DELETE FROM users * WHERE email = $1", user.email)
PG_DB.exec("DELETE FROM session_ids * WHERE email = $1", user.email)
2019-03-20 09:01:54 -07:00
PG_DB.exec("DROP MATERIALIZED VIEW #{view_name}")
env.request.cookies.each do |cookie|
cookie.expires = Time.new(1990, 1, 1)
2019-03-17 10:40:24 -07:00
env.response.cookies << cookie
end
end
env.redirect referer
end
2018-08-04 13:30:44 -07:00
get "/clear_watch_history" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
2018-08-08 18:26:02 -07:00
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
csrf_token = generate_response(sid, {":clear_watch_history"}, HMAC_KEY, PG_DB)
templated "clear_watch_history"
else
env.redirect referer
end
end
post "/clear_watch_history" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
user = env.get? "user"
sid = env.get? "sid"
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if user
user = user.as(User)
sid = sid.as(String)
2019-04-18 14:23:50 -07:00
token = env.params.body["csrf_token"]?
begin
2019-04-18 14:23:50 -07:00
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
2019-04-18 14:23:50 -07:00
env.response.status_code = 400
next templated "error"
end
2018-08-25 19:49:18 -07:00
PG_DB.exec("UPDATE users SET watched = '{}' WHERE email = $1", user.email)
2018-08-04 13:30:44 -07:00
end
env.redirect referer
end
2019-05-15 10:26:29 -07:00
get "/authorize_token" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = user.as(User)
sid = sid.as(String)
csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, PG_DB)
scopes = env.params.query["scopes"]?.try &.split(",")
scopes ||= [] of String
callback_url = env.params.query["callback_url"]?
if callback_url
callback_url = URI.parse(callback_url)
end
expire = env.params.query["expire"]?.try &.to_i?
templated "authorize_token"
else
env.redirect referer
end
end
2019-04-18 14:23:50 -07:00
post "/authorize_token" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
if user
user = env.get("user").as(User)
sid = sid.as(String)
token = env.params.body["csrf_token"]?
begin
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
error_message = ex.message
env.response.status_code = 400
next templated "error"
end
scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
callback_url = env.params.body["callbackUrl"]?
expire = env.params.body["expire"]?.try &.to_i?
access_token = generate_token(user.email, scopes, expire, HMAC_KEY, PG_DB)
if callback_url
access_token = URI.escape(access_token)
url = URI.parse(callback_url)
if url.query
query = HTTP::Params.parse(url.query.not_nil!)
else
query = HTTP::Params.new
end
query["token"] = access_token
url.query = query.to_s
env.redirect url.to_s
else
csrf_token = ""
env.set "access_token", access_token
templated "authorize_token"
end
end
end
get "/token_manager" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env, "/subscription_manager")
if !user
next env.redirect referer
end
user = user.as(User)
tokens = PG_DB.query_all("SELECT id, issued FROM session_ids WHERE email = $1 ORDER BY issued DESC", user.email, as: {session: String, issued: Time})
templated "token_manager"
end
post "/token_ajax" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
user = env.get? "user"
sid = env.get? "sid"
referer = get_referer(env)
redirect = env.params.query["redirect"]?
redirect ||= "true"
redirect = redirect == "true"
if !user
if redirect
next env.redirect referer
else
error_message = {"error" => "No such user"}.to_json
env.response.status_code = 403
next error_message
end
end
user = user.as(User)
sid = sid.as(String)
token = env.params.body["csrf_token"]?
begin
validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
rescue ex
if redirect
error_message = ex.message
next templated "error"
else
error_message = {"error" => ex.message}.to_json
env.response.status_code = 400
next error_message
end
end
if env.params.query["action_revoke_token"]?
action = "action_revoke_token"
else
next env.redirect referer
end
session = env.params.query["session"]?
session ||= ""
case action
when .starts_with? "action_revoke_token"
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1 AND email = $2", session, user.email)
end
if redirect
env.redirect referer
else
env.response.content_type = "application/json"
"{}"
end
end
2018-08-04 13:30:44 -07:00
# Feeds
2018-11-26 08:50:34 -08:00
get "/feed/top" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-03-01 14:06:45 -08:00
if config.top_enabled
templated "top"
else
env.redirect "/"
end
2018-11-26 08:50:34 -08:00
end
get "/feed/popular" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-11-26 08:50:34 -08:00
templated "popular"
end
2018-11-20 09:18:12 -08:00
get "/feed/trending" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-11-20 09:18:12 -08:00
trending_type = env.params.query["type"]?
2018-12-20 14:48:45 -08:00
trending_type ||= "Default"
2018-11-20 09:18:12 -08:00
region = env.params.query["region"]?
2018-12-20 14:48:45 -08:00
region ||= "US"
2018-11-20 09:18:12 -08:00
begin
trending, plid = fetch_trending(trending_type, proxies, region, locale)
2018-11-20 09:18:12 -08:00
rescue ex
error_message = "#{ex.message}"
next templated "error"
end
templated "trending"
end
2018-08-04 13:30:44 -07:00
get "/feed/subscriptions" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
2019-02-10 10:33:29 -08:00
sid = env.get? "sid"
2018-08-08 18:26:02 -07:00
referer = get_referer(env)
2018-08-04 13:30:44 -07:00
if user
user = user.as(User)
2019-02-10 10:33:29 -08:00
sid = sid.as(String)
2018-08-04 13:30:44 -07:00
preferences = user.preferences
token = user.token
if preferences.unseen_only
env.set "show_watched", true
end
2018-08-04 13:30:44 -07:00
# Refresh account
headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"]
if !user.password
2019-02-10 10:33:29 -08:00
user, sid = get_user(sid, headers, PG_DB)
2018-07-18 12:26:02 -07:00
end
2018-03-25 20:21:24 -07:00
2018-08-04 13:30:44 -07:00
max_results = preferences.max_results
max_results ||= env.params.query["max_results"]?.try &.to_i?
max_results ||= 40
page = env.params.query["page"]?.try &.to_i?
page ||= 1
if max_results < 0
limit = nil
offset = (page - 1) * 1
2018-07-18 12:26:02 -07:00
else
2018-08-04 13:30:44 -07:00
limit = max_results
offset = (page - 1) * max_results
2018-07-18 12:26:02 -07:00
end
2018-04-07 19:36:09 -07:00
2018-08-04 13:30:44 -07:00
notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email,
as: Array(String))
2019-04-10 17:56:38 -07:00
view_name = "subscriptions_#{sha256(user.email)}"
2018-08-04 13:30:44 -07:00
if preferences.notifications_only && !notifications.empty?
# Only show notifications
2018-08-04 13:30:44 -07:00
args = arg_array(notifications)
2018-07-18 12:26:02 -07:00
notifications = PG_DB.query_all("SELECT * FROM channel_videos WHERE id IN (#{args})
2019-03-08 08:34:52 -08:00
ORDER BY published DESC", notifications, as: ChannelVideo)
videos = [] of ChannelVideo
2018-07-18 12:26:02 -07:00
notifications.sort_by! { |video| video.published }.reverse!
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
case preferences.sort
when "alphabetically"
notifications.sort_by! { |video| video.title }
2018-08-04 13:30:44 -07:00
when "alphabetically - reverse"
notifications.sort_by! { |video| video.title }.reverse!
2018-08-04 13:30:44 -07:00
when "channel name"
notifications.sort_by! { |video| video.author }
2018-08-04 13:30:44 -07:00
when "channel name - reverse"
notifications.sort_by! { |video| video.author }.reverse!
2018-08-04 13:30:44 -07:00
end
else
if preferences.latest_only
if preferences.unseen_only
# Show latest video from a channel that a user hasn't watched
# "unseen_only" isn't really correct here, more accurate would be "unwatched_only"
if user.watched.empty?
values = "'{}'"
else
values = "VALUES #{user.watched.map { |id| %(('#{id}')) }.join(",")}"
end
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} WHERE \
NOT id = ANY (#{values}) \
2019-03-08 08:34:52 -08:00
ORDER BY ucid, published DESC", as: ChannelVideo)
2018-07-18 12:26:02 -07:00
else
# Show latest video from each channel
2018-10-09 16:39:19 -07:00
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} \
2019-03-05 05:17:29 -08:00
ORDER BY ucid, published DESC", as: ChannelVideo)
2018-07-18 12:26:02 -07:00
end
2018-04-07 19:36:09 -07:00
2018-08-04 13:30:44 -07:00
videos.sort_by! { |video| video.published }.reverse!
2018-07-18 12:26:02 -07:00
else
2018-08-04 13:30:44 -07:00
if preferences.unseen_only
# Only show unwatched
if user.watched.empty?
values = "'{}'"
else
values = "VALUES #{user.watched.map { |id| %(('#{id}')) }.join(",")}"
end
videos = PG_DB.query_all("SELECT * FROM #{view_name} WHERE \
NOT id = ANY (#{values}) \
2019-03-08 08:34:52 -08:00
ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
2018-08-04 13:30:44 -07:00
else
# Sort subscriptions as normal
videos = PG_DB.query_all("SELECT * FROM #{view_name} \
2019-03-08 08:34:52 -08:00
ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
2018-08-04 13:30:44 -07:00
end
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
case preferences.sort
when "published - reverse"
videos.sort_by! { |video| video.published }
2018-08-04 13:30:44 -07:00
when "alphabetically"
videos.sort_by! { |video| video.title }
when "alphabetically - reverse"
videos.sort_by! { |video| video.title }.reverse!
when "channel name"
videos.sort_by! { |video| video.author }
when "channel name - reverse"
videos.sort_by! { |video| video.author }.reverse!
end
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email,
as: Array(String))
2018-03-24 20:38:35 -07:00
2018-08-04 13:30:44 -07:00
notifications = videos.select { |v| notifications.includes? v.id }
videos = videos - notifications
end
2018-08-04 13:30:44 -07:00
if !limit
videos = videos[0..max_results]
2018-03-22 10:44:36 -07:00
end
# Clear user's notifications and set updated to the current time.
# "updated" here is used for delivering new notifications, so if
# we know a user has looked at their feed e.g. in the past 10 minutes,
# they've already seen a video posted 20 minutes ago, and don't need
# to be notified.
2018-11-21 15:12:13 -08:00
PG_DB.exec("UPDATE users SET notifications = $1, updated = $2 WHERE email = $3", [] of String, Time.now,
user.email)
2018-08-04 13:30:44 -07:00
user.notifications = [] of String
env.set "user", user
templated "subscriptions"
else
2018-08-08 18:26:02 -07:00
env.redirect referer
2018-03-16 09:40:29 -07:00
end
end
get "/feed/history" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
user = env.get? "user"
referer = get_referer(env)
page = env.params.query["page"]?.try &.to_i?
page ||= 1
if user
user = user.as(User)
2018-11-09 18:37:46 -08:00
limit = user.preferences.max_results
if user.watched[(page - 1) * limit]?
watched = user.watched.reverse[(page - 1) * limit, limit]
else
watched = [] of String
end
2018-11-09 18:37:46 -08:00
templated "history"
else
env.redirect referer
end
end
2018-11-09 15:25:24 -08:00
2018-08-04 13:30:44 -07:00
get "/feed/channel/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-03-05 10:56:59 -08:00
env.response.content_type = "application/atom+xml"
2018-08-04 13:30:44 -07:00
ucid = env.params.url["ucid"]
2018-04-07 19:36:09 -07:00
2018-09-21 07:40:04 -07:00
begin
2018-12-20 13:32:09 -08:00
author, ucid, auto_generated = get_about_info(ucid, locale)
2018-09-21 07:40:04 -07:00
rescue ex
error_message = ex.message
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-09-04 19:04:40 -07:00
end
2019-02-18 14:06:00 -08:00
client = make_client(YT_URL)
rss = client.get("/feeds/videos.xml?channel_id=#{ucid}").body
rss = XML.parse_html(rss)
videos = [] of SearchVideo
rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content
title = entry.xpath_node("title").not_nil!.content
2019-03-07 19:13:54 -08:00
published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
2019-02-18 14:06:00 -08:00
author = entry.xpath_node("author/name").not_nil!.content
ucid = entry.xpath_node("channelid").not_nil!.content
description = entry.xpath_node("group/description").not_nil!.content
views = entry.xpath_node("group/community/statistics").not_nil!.["views"].to_i64
videos << SearchVideo.new(
title: title,
id: video_id,
author: author,
ucid: ucid,
published: published,
views: views,
description: description,
description_html: "",
length_seconds: 0,
live_now: false,
paid: false,
premium: false,
premiere_timestamp: nil
2019-02-18 14:06:00 -08:00
)
end
2018-07-16 09:24:24 -07:00
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-08-04 13:30:44 -07:00
path = env.request.path
2018-07-16 09:24:24 -07:00
2018-08-04 13:30:44 -07:00
feed = XML.build(indent: " ", encoding: "UTF-8") do |xml|
xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
2018-12-23 10:07:04 -08:00
"xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
"xml:lang": "en-US") do
2018-08-04 21:07:38 -07:00
xml.element("link", rel: "self", href: "#{host_url}#{path}")
2018-08-04 13:30:44 -07:00
xml.element("id") { xml.text "yt:channel:#{ucid}" }
xml.element("yt:channelId") { xml.text ucid }
2018-09-21 07:40:04 -07:00
xml.element("title") { xml.text author }
2018-08-04 21:07:38 -07:00
xml.element("link", rel: "alternate", href: "#{host_url}/channel/#{ucid}")
2018-07-28 07:49:58 -07:00
2018-08-04 13:30:44 -07:00
xml.element("author") do
2018-09-21 07:40:04 -07:00
xml.element("name") { xml.text author }
2018-08-04 21:07:38 -07:00
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
2018-08-04 13:30:44 -07:00
end
2018-09-04 19:04:40 -07:00
videos.each do |video|
2018-08-04 13:30:44 -07:00
xml.element("entry") do
2018-08-10 06:38:31 -07:00
xml.element("id") { xml.text "yt:video:#{video.id}" }
xml.element("yt:videoId") { xml.text video.id }
2018-09-04 19:35:25 -07:00
xml.element("yt:channelId") { xml.text video.ucid }
2018-08-10 06:38:31 -07:00
xml.element("title") { xml.text video.title }
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{video.id}")
2018-08-04 13:30:44 -07:00
xml.element("author") do
2018-09-04 19:35:25 -07:00
if auto_generated
xml.element("name") { xml.text video.author }
xml.element("uri") { xml.text "#{host_url}/channel/#{video.ucid}" }
else
xml.element("name") { xml.text author }
xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
end
2018-08-04 13:30:44 -07:00
end
2018-07-28 18:40:59 -07:00
2019-03-01 08:44:41 -08:00
xml.element("content", type: "xhtml") do
xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
xml.element("a", href: "#{host_url}/watch?v=#{video.id}") do
xml.element("img", src: "#{host_url}/vi/#{video.id}/mqdefault.jpg")
end
end
end
2018-08-10 06:38:31 -07:00
xml.element("published") { xml.text video.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
2018-07-31 08:44:07 -07:00
2018-08-04 13:30:44 -07:00
xml.element("media:group") do
2018-08-10 06:38:31 -07:00
xml.element("media:title") { xml.text video.title }
2019-02-27 14:18:47 -08:00
xml.element("media:thumbnail", url: "#{host_url}/vi/#{video.id}/mqdefault.jpg",
2018-08-10 06:38:31 -07:00
width: "320", height: "180")
xml.element("media:description") { xml.text video.description }
2018-08-04 13:30:44 -07:00
end
2018-07-16 09:24:24 -07:00
2018-08-04 13:30:44 -07:00
xml.element("media:community") do
2018-08-10 06:38:31 -07:00
xml.element("media:statistics", views: video.views)
2018-08-04 13:30:44 -07:00
end
end
end
end
2018-07-16 09:24:24 -07:00
end
2018-08-04 13:30:44 -07:00
feed
2018-07-16 09:24:24 -07:00
end
2018-08-04 13:30:44 -07:00
get "/feed/private" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-03-05 10:56:59 -08:00
env.response.content_type = "application/atom+xml"
2018-08-04 13:30:44 -07:00
token = env.params.query["token"]?
2018-07-16 09:24:24 -07:00
2018-08-04 13:30:44 -07:00
if !token
2019-03-23 08:24:30 -07:00
env.response.status_code = 403
next
2018-08-04 13:30:44 -07:00
end
2018-03-24 20:38:35 -07:00
2018-08-04 13:30:44 -07:00
user = PG_DB.query_one?("SELECT * FROM users WHERE token = $1", token.strip, as: User)
if !user
2019-03-23 08:24:30 -07:00
env.response.status_code = 403
next
2018-08-04 13:30:44 -07:00
end
2018-08-04 13:30:44 -07:00
max_results = env.params.query["max_results"]?.try &.to_i?
max_results ||= 40
2018-08-04 13:30:44 -07:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-03-24 20:38:35 -07:00
2018-08-04 13:30:44 -07:00
if max_results < 0
limit = nil
offset = (page - 1) * 1
else
limit = max_results
offset = (page - 1) * max_results
end
2018-03-24 20:38:35 -07:00
2018-08-04 13:30:44 -07:00
latest_only = env.params.query["latest_only"]?.try &.to_i?
latest_only ||= 0
latest_only = latest_only == 1
2019-02-21 11:26:36 -08:00
sort = env.params.query["sort"]?
sort ||= "published"
2019-04-10 17:56:38 -07:00
view_name = "subscriptions_#{sha256(user.email)}"
2018-08-04 13:30:44 -07:00
if latest_only
2019-02-21 11:26:36 -08:00
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} \
2019-03-05 05:17:29 -08:00
ORDER BY ucid, published DESC", as: ChannelVideo)
2019-02-21 11:26:36 -08:00
2018-08-04 13:30:44 -07:00
videos.sort_by! { |video| video.published }.reverse!
else
videos = PG_DB.query_all("SELECT * FROM #{view_name} \
2019-03-08 08:34:52 -08:00
ORDER BY published DESC LIMIT $1 OFFSET $2", limit, offset, as: ChannelVideo)
2018-08-04 13:30:44 -07:00
end
2018-07-28 18:40:59 -07:00
2018-08-04 13:30:44 -07:00
case sort
2019-02-21 11:26:36 -08:00
when "reverse_published"
videos.sort_by! { |video| video.published }
2018-08-04 13:30:44 -07:00
when "alphabetically"
videos.sort_by! { |video| video.title }
when "reverse_alphabetically"
videos.sort_by! { |video| video.title }.reverse!
when "channel_name"
videos.sort_by! { |video| video.author }
when "reverse_channel_name"
videos.sort_by! { |video| video.author }.reverse!
end
2018-07-31 08:44:07 -07:00
2018-08-04 13:30:44 -07:00
if !limit
videos = videos[0..max_results]
end
2018-07-28 18:40:59 -07:00
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-08-04 13:30:44 -07:00
path = env.request.path
query = env.request.query.not_nil!
2018-07-31 08:44:07 -07:00
2018-08-04 13:30:44 -07:00
feed = XML.build(indent: " ", encoding: "UTF-8") do |xml|
2018-12-23 10:07:04 -08:00
xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
"xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
2018-08-04 13:30:44 -07:00
"xml:lang": "en-US") do
2018-08-04 21:07:38 -07:00
xml.element("link", "type": "text/html", rel: "alternate", href: "#{host_url}/feed/subscriptions")
xml.element("link", "type": "application/atom+xml", rel: "self", href: "#{host_url}#{path}?#{query}")
2018-12-20 13:32:09 -08:00
xml.element("title") { xml.text translate(locale, "Invidious Private Feed for `x`", user.email) }
2018-08-04 13:30:44 -07:00
videos.each do |video|
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{video.id}" }
xml.element("yt:videoId") { xml.text video.id }
xml.element("yt:channelId") { xml.text video.ucid }
xml.element("title") { xml.text video.title }
2018-08-04 21:07:38 -07:00
xml.element("link", rel: "alternate", href: "#{host_url}/watch?v=#{video.id}")
2018-08-04 13:30:44 -07:00
xml.element("author") do
xml.element("name") { xml.text video.author }
2018-08-04 21:07:38 -07:00
xml.element("uri") { xml.text "#{host_url}/channel/#{video.ucid}" }
2018-08-04 13:30:44 -07:00
end
2019-03-01 08:44:41 -08:00
xml.element("content", type: "xhtml") do
xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
xml.element("a", href: "#{host_url}/watch?v=#{video.id}") do
xml.element("img", src: "#{host_url}/vi/#{video.id}/mqdefault.jpg")
end
end
end
2018-08-04 13:30:44 -07:00
xml.element("published") { xml.text video.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
xml.element("updated") { xml.text video.updated.to_s("%Y-%m-%dT%H:%M:%S%:z") }
2018-08-04 13:30:44 -07:00
xml.element("media:group") do
xml.element("media:title") { xml.text video.title }
2019-02-27 14:18:47 -08:00
xml.element("media:thumbnail", url: "#{host_url}/vi/#{video.id}/mqdefault.jpg",
2018-08-10 06:38:31 -07:00
width: "320", height: "180")
2018-08-04 13:30:44 -07:00
end
end
end
end
2018-08-04 13:30:44 -07:00
end
2018-03-31 17:09:27 -07:00
2018-08-04 13:30:44 -07:00
feed
end
2018-03-24 20:38:35 -07:00
2018-09-17 16:13:24 -07:00
get "/feed/playlist/:plid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-03-05 10:56:59 -08:00
env.response.content_type = "application/atom+xml"
2018-09-17 16:13:24 -07:00
plid = env.params.url["plid"]
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-09-17 16:13:24 -07:00
path = env.request.path
client = make_client(YT_URL)
response = client.get("/feeds/videos.xml?playlist_id=#{plid}")
document = XML.parse(response.body)
document.xpath_nodes(%q(//*[@href]|//*[@url])).each do |node|
node.attributes.each do |attribute|
case attribute.name
when "url"
node["url"] = "#{host_url}#{URI.parse(node["url"]).full_path}"
when "href"
node["href"] = "#{host_url}#{URI.parse(node["href"]).full_path}"
end
end
end
document = document.to_xml(options: XML::SaveOptions::NO_DECL)
document.scan(/<uri>(?<url>[^<]+)<\/uri>/).each do |match|
content = "#{host_url}#{URI.parse(match["url"]).full_path}"
document = document.gsub(match[0], "<uri>#{content}</uri>")
end
document
end
2019-03-29 13:50:18 -07:00
get "/feeds/videos.xml" do |env|
if ucid = env.params.query["channel_id"]?
env.redirect "/feed/channel/#{ucid}"
elsif user = env.params.query["user"]?
env.redirect "/feed/channel/#{user}"
elsif plid = env.params.query["playlist_id"]?
env.redirect "/feed/playlist/#{plid}"
end
end
# Support push notifications via PubSubHubbub
2019-03-03 18:40:24 -08:00
get "/feed/webhook/:token" do |env|
verify_token = env.params.url["token"]
mode = env.params.query["hub.mode"]
topic = env.params.query["hub.topic"]
challenge = env.params.query["hub.challenge"]
2019-03-04 05:53:31 -08:00
if verify_token.starts_with? "v1"
_, time, nonce, signature = verify_token.split(":")
data = "#{time}:#{nonce}"
else
time, signature = verify_token.split(":")
data = "#{time}"
end
2019-04-04 05:49:53 -07:00
# The hub will sometimes check if we're still subscribed after delivery errors,
# so we reply with a 200 as long as the request hasn't expired
2019-03-11 08:48:38 -07:00
if Time.now.to_unix - time.to_i > 432000
2019-03-23 08:24:30 -07:00
env.response.status_code = 400
next
end
2019-03-04 05:53:31 -08:00
if OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, data) != signature
2019-03-23 08:24:30 -07:00
env.response.status_code = 400
next
end
ucid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["channel_id"]
2019-03-03 18:40:24 -08:00
PG_DB.exec("UPDATE channels SET subscribed = $1 WHERE id = $2", Time.now, ucid)
2019-03-23 08:24:30 -07:00
env.response.status_code = 200
next challenge
end
2019-03-03 18:40:24 -08:00
post "/feed/webhook/:token" do |env|
2019-04-10 15:58:42 -07:00
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
token = env.params.url["token"]
body = env.request.body.not_nil!.gets_to_end
signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
2019-04-10 15:58:42 -07:00
logger.write("#{token} : Invalid signature\n")
2019-03-23 08:24:30 -07:00
env.response.status_code = 200
next
end
spawn do
rss = XML.parse_html(body)
rss.xpath_nodes("//feed/entry").each do |entry|
id = entry.xpath_node("videoid").not_nil!.content
2019-04-04 05:49:53 -07:00
author = entry.xpath_node("author/name").not_nil!.content
2019-03-07 19:49:52 -08:00
published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
2019-03-07 19:13:54 -08:00
updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
video = get_video(id, PG_DB, proxies, force_refresh: true)
2019-04-10 15:58:42 -07:00
# Deliver notifications to `/api/v1/auth/notifications`
payload = {
"topic" => video.ucid,
"videoId" => video.id,
"published" => published.to_unix,
2019-04-10 15:58:42 -07:00
}.to_json
PG_DB.exec("NOTIFY notifications, E'#{payload}'")
video = ChannelVideo.new(
id: id,
title: video.title,
published: published,
updated: updated,
ucid: video.ucid,
author: author,
length_seconds: video.length_seconds,
live_now: video.live_now,
premiere_timestamp: video.premiere_timestamp,
)
PG_DB.exec("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications)", video.id, video.published, video.ucid)
video_array = video.to_a
args = arg_array(video_array)
PG_DB.exec("INSERT INTO channel_videos VALUES (#{args}) \
ON CONFLICT (id) DO UPDATE SET title = $2, published = $3, \
updated = $4, ucid = $5, author = $6, length_seconds = $7, \
live_now = $8, premiere_timestamp = $9", video_array)
end
end
2019-03-03 17:50:23 -08:00
2019-03-23 08:24:30 -07:00
env.response.status_code = 200
next
end
2018-08-04 13:30:44 -07:00
# Channels
2019-04-28 09:47:16 -07:00
{"/channel/:ucid/live", "/user/:user/live", "/c/:user/live"}.each do |route|
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
client = make_client(YT_URL)
# Appears to be a bug in routing, having several routes configured
# as `/a/:a`, `/b/:a`, `/c/:a` results in 404
value = env.request.resource.split("/")[2]
body = ""
{"channel", "user", "c"}.each do |type|
response = client.get("/#{type}/#{value}/live?disable_polymer=1")
if response.status_code == 200
body = response.body
end
end
video_id = body.match(/'VIDEO_ID': "(?<id>[a-zA-Z0-9_-]{11})"/).try &.["id"]?
if video_id
params = [] of String
env.params.query.each do |k, v|
params << "#{k}=#{v}"
end
params = params.join("&")
url = "/watch?v=#{video_id}"
if !params.empty?
url += "&#{params}"
end
env.redirect url
else
env.redirect "/channel/#{value}"
end
end
end
2018-09-04 07:13:58 -07:00
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get "/c/:user" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-04 07:13:58 -07:00
client = make_client(YT_URL)
user = env.params.url["user"]
response = client.get("/c/#{user}")
document = XML.parse_html(response.body)
anchor = document.xpath_node(%q(//a[contains(@class,"branded-page-header-title-link")]))
if !anchor
next env.redirect "/"
end
env.redirect anchor["href"]
end
# Legacy endpoint for /user/:username
get "/profile" do |env|
user = env.params.query["user"]?
if !user
env.redirect "/"
else
env.redirect "/user/#{user}"
end
end
2018-08-04 13:30:44 -07:00
get "/user/:user" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}"
2018-03-24 20:38:35 -07:00
end
2018-09-05 21:12:11 -07:00
get "/user/:user/videos" do |env|
user = env.params.url["user"]
env.redirect "/channel/#{user}/videos"
end
2018-08-04 13:30:44 -07:00
get "/channel/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env.get? "user"
if user
user = user.as(User)
subscriptions = user.subscriptions
end
subscriptions ||= [] of String
2018-07-28 06:24:53 -07:00
ucid = env.params.url["ucid"]
2018-08-04 13:30:44 -07:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
continuation = env.params.query["continuation"]?
2018-11-13 17:04:25 -08:00
sort_by = env.params.query["sort_by"]?.try &.downcase
2018-09-21 07:40:04 -07:00
begin
2018-12-20 13:32:09 -08:00
author, ucid, auto_generated, sub_count = get_about_info(ucid, locale)
2018-09-21 07:40:04 -07:00
rescue ex
error_message = ex.message
2018-09-21 07:40:04 -07:00
next templated "error"
2018-08-04 13:30:44 -07:00
end
2018-09-13 15:47:31 -07:00
if !auto_generated
2019-04-11 11:52:09 -07:00
env.set "search", "channel:#{ucid} "
2018-09-13 15:47:31 -07:00
end
if auto_generated
2019-03-03 08:54:23 -08:00
sort_options = {"last", "oldest", "newest"}
sort_by ||= "last"
items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
2019-03-17 16:31:11 -07:00
items.uniq! do |item|
if item.responds_to?(:title)
item.title
elsif item.responds_to?(:author)
item.author
end
end
items.select! { |item| item.responds_to?(:thumbnail_id) && item.thumbnail_id }
items = items.map { |item| item.as(SearchPlaylist) }
items.each { |item| item.author = "" }
else
2019-03-03 08:54:23 -08:00
sort_options = {"newest", "oldest", "popular"}
sort_by ||= "newest"
items, count = get_60_videos(ucid, page, auto_generated, sort_by)
items.select! { |item| !item.paid }
end
2018-08-04 13:30:44 -07:00
templated "channel"
end
get "/channel/:ucid/videos" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
ucid = env.params.url["ucid"]
params = env.request.query
if !params || params.empty?
params = ""
else
params = "?#{params}"
end
2018-08-04 13:30:44 -07:00
env.redirect "/channel/#{ucid}#{params}"
end
2018-07-28 06:24:53 -07:00
2019-03-03 08:54:23 -08:00
get "/channel/:ucid/playlists" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-03-03 08:54:23 -08:00
user = env.get? "user"
if user
user = user.as(User)
subscriptions = user.subscriptions
end
subscriptions ||= [] of String
ucid = env.params.url["ucid"]
continuation = env.params.query["continuation"]?
sort_by = env.params.query["sort_by"]?.try &.downcase
sort_by ||= "last"
begin
author, ucid, auto_generated, sub_count = get_about_info(ucid, locale)
rescue ex
error_message = ex.message
next templated "error"
end
if auto_generated
next env.redirect "/channel/#{ucid}"
end
items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
items.select! { |item| item.is_a?(SearchPlaylist) && !item.videos.empty? }
items = items.map { |item| item.as(SearchPlaylist) }
items.each { |item| item.author = "" }
templated "playlists"
end
2018-08-04 13:30:44 -07:00
# API Endpoints
2018-07-28 06:24:53 -07:00
2019-03-01 17:25:16 -08:00
get "/api/v1/stats" do |env|
env.response.content_type = "application/json"
if !config.statistics_enabled
error_message = {"error" => "Statistics are not enabled."}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 400
next error_message
2019-03-01 17:25:16 -08:00
end
2019-03-04 12:43:17 -08:00
if statistics["error"]?
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next statistics.to_json
2019-03-04 12:43:17 -08:00
end
statistics.to_json
end
2019-03-01 17:25:16 -08:00
2019-05-20 18:22:01 -07:00
# YouTube provides "storyboards", which are sprites containing x * y
2019-05-02 12:20:19 -07:00
# preview thumbnails for individual scenes in a video.
# See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails
get "/api/v1/storyboards/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
env.response.content_type = "application/json"
id = env.params.url["id"]
region = env.params.query["region"]?
client = make_client(YT_URL)
begin
video = get_video(id, PG_DB, proxies, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/storyboards/#{ex.message}"
rescue ex
env.response.status_code = 500
next
end
storyboards = video.storyboards
width = env.params.query["width"]?
height = env.params.query["height"]?
if !width && !height
response = JSON.build do |json|
json.object do
json.field "storyboards" do
generate_storyboards(json, id, storyboards, config, Kemal.config)
end
end
end
next response
end
env.response.content_type = "text/vtt"
storyboard = storyboards.select { |storyboard| width == "#{storyboard[:width]}" || height == "#{storyboard[:height]}" }
if storyboard.empty?
env.response.status_code = 404
next
else
storyboard = storyboard[0]
end
webvtt = <<-END_VTT
WEBVTT
END_VTT
start_time = 0.milliseconds
end_time = storyboard[:interval].milliseconds
storyboard[:storyboard_count].times do |i|
host_url = make_host_url(config, Kemal.config)
url = storyboard[:url].gsub("$M", i).gsub("https://i9.ytimg.com", host_url)
storyboard[:storyboard_height].times do |j|
storyboard[:storyboard_width].times do |k|
webvtt += <<-END_CUE
#{start_time}.000 --> #{end_time}.000
#{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width]},#{storyboard[:height]}
END_CUE
start_time += storyboard[:interval].milliseconds
end_time += storyboard[:interval].milliseconds
end
end
end
webvtt
end
2018-08-04 13:30:44 -07:00
get "/api/v1/captions/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-26 16:44:37 -07:00
env.response.content_type = "application/json"
2018-08-04 13:30:44 -07:00
id = env.params.url["id"]
region = env.params.query["region"]?
2018-07-29 19:01:28 -07:00
2019-05-20 18:22:01 -07:00
# See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354
# It is possible to use `/api/timedtext?type=list&v=#{id}` and
# `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly,
# but this does not provide links for auto-generated captions.
#
# In future this should be investigated as an alternative, since it does not require
# getting video info.
2018-08-04 13:30:44 -07:00
client = make_client(YT_URL)
begin
video = get_video(id, PG_DB, proxies, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/captions/#{ex.message}"
2018-08-04 13:30:44 -07:00
rescue ex
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next
2018-07-28 06:24:53 -07:00
end
2018-08-04 21:07:38 -07:00
captions = video.captions
2018-07-28 06:24:53 -07:00
2018-08-04 13:30:44 -07:00
label = env.params.query["label"]?
lang = env.params.query["lang"]?
tlang = env.params.query["tlang"]?
if !label && !lang
2018-08-04 13:30:44 -07:00
response = JSON.build do |json|
json.object do
json.field "captions" do
json.array do
2018-08-04 21:07:38 -07:00
captions.each do |caption|
2018-08-04 13:30:44 -07:00
json.object do
2018-08-06 16:25:25 -07:00
json.field "label", caption.name.simpleText
json.field "languageCode", caption.languageCode
2018-09-19 12:08:59 -07:00
json.field "url", "/api/v1/captions/#{id}?label=#{URI.escape(caption.name.simpleText)}"
2018-08-04 13:30:44 -07:00
end
end
end
end
2018-07-28 06:24:53 -07:00
end
2018-08-04 13:30:44 -07:00
end
2018-07-28 06:24:53 -07:00
next response
end
2018-07-28 06:24:53 -07:00
2019-05-20 18:22:01 -07:00
env.response.content_type = "text/vtt; charset=UTF-8"
2018-08-06 16:25:25 -07:00
caption = captions.select { |caption| caption.name.simpleText == label }
2018-07-28 06:24:53 -07:00
if lang
caption = captions.select { |caption| caption.languageCode == lang }
end
2018-08-04 21:07:38 -07:00
if caption.empty?
2019-03-23 08:24:30 -07:00
env.response.status_code = 404
next
2018-08-04 13:30:44 -07:00
else
2018-08-04 21:07:38 -07:00
caption = caption[0]
2018-08-04 13:30:44 -07:00
end
2018-07-28 06:24:53 -07:00
url = caption.baseUrl + "&tlang=#{tlang}"
2018-07-28 06:24:53 -07:00
# Auto-generated captions often have cues that aren't aligned properly with the video,
# as well as some other markup that makes it cumbersome, so we try to fix that here
if caption.name.simpleText.includes? "auto-generated"
caption_xml = client.get(url).body
caption_xml = XML.parse(caption_xml)
2018-07-28 06:24:53 -07:00
webvtt = <<-END_VTT
WEBVTT
Kind: captions
Language: #{tlang || caption.languageCode}
2018-07-28 06:24:53 -07:00
END_VTT
caption_nodes = caption_xml.xpath_nodes("//transcript/text")
caption_nodes.each_with_index do |node, i|
start_time = node["start"].to_f.seconds
duration = node["dur"]?.try &.to_f.seconds
duration ||= start_time
if caption_nodes.size > i + 1
end_time = caption_nodes[i + 1]["start"].to_f.seconds
else
end_time = start_time + duration
end
2018-07-28 06:24:53 -07:00
start_time = "#{start_time.hours.to_s.rjust(2, '0')}:#{start_time.minutes.to_s.rjust(2, '0')}:#{start_time.seconds.to_s.rjust(2, '0')}.#{start_time.milliseconds.to_s.rjust(3, '0')}"
end_time = "#{end_time.hours.to_s.rjust(2, '0')}:#{end_time.minutes.to_s.rjust(2, '0')}:#{end_time.seconds.to_s.rjust(2, '0')}.#{end_time.milliseconds.to_s.rjust(3, '0')}"
2018-08-04 13:30:44 -07:00
text = HTML.unescape(node.content)
text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "")
text = text.gsub(/<\/font>/, "")
if md = text.match(/(?<name>.*) : (?<text>.*)/)
text = "<v #{md["name"]}>#{md["text"]}</v>"
end
2018-08-04 13:30:44 -07:00
webvtt += <<-END_CUE
2018-08-04 13:30:44 -07:00
#{start_time} --> #{end_time}
#{text}
END_CUE
end
else
url += "&format=vtt"
webvtt = client.get(url).body
2018-07-28 06:24:53 -07:00
end
2019-04-11 10:08:43 -07:00
if title = env.params.query["title"]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}"
end
2018-08-04 13:30:44 -07:00
webvtt
2018-07-28 06:24:53 -07:00
end
2018-08-04 13:30:44 -07:00
get "/api/v1/comments/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
region = env.params.query["region"]?
2018-12-20 13:32:09 -08:00
2018-09-26 16:44:37 -07:00
env.response.content_type = "application/json"
2018-08-04 13:30:44 -07:00
id = env.params.url["id"]
2018-07-20 09:19:49 -07:00
2018-08-04 13:30:44 -07:00
source = env.params.query["source"]?
source ||= "youtube"
2018-07-20 09:19:49 -07:00
2019-03-27 09:31:05 -07:00
thin_mode = env.params.query["thin_mode"]?
thin_mode = thin_mode == "true"
2018-08-04 13:30:44 -07:00
format = env.params.query["format"]?
format ||= "json"
2018-07-20 09:19:49 -07:00
continuation = env.params.query["continuation"]?
sort_by = env.params.query["sort_by"]?.try &.downcase
2018-08-04 13:30:44 -07:00
if source == "youtube"
sort_by ||= "top"
begin
2019-04-03 16:42:12 -07:00
comments = fetch_youtube_comments(id, PG_DB, continuation, proxies, format, locale, thin_mode, region, sort_by: sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-07-20 09:19:49 -07:00
end
next comments
2018-08-04 13:30:44 -07:00
elsif source == "reddit"
sort_by ||= "confidence"
2018-08-04 13:30:44 -07:00
begin
comments, reddit_thread = fetch_reddit_comments(id, sort_by: sort_by)
2018-12-20 13:32:09 -08:00
content_html = template_reddit_comments(comments, locale)
2018-08-04 13:30:44 -07:00
content_html = fill_links(content_html, "https", "www.reddit.com")
2018-09-03 20:15:47 -07:00
content_html = replace_links(content_html)
2018-08-04 13:30:44 -07:00
rescue ex
2018-09-06 08:19:28 -07:00
comments = nil
2018-08-04 13:30:44 -07:00
reddit_thread = nil
content_html = ""
end
2018-07-16 09:24:24 -07:00
2018-09-06 08:19:28 -07:00
if !reddit_thread || !comments
2019-03-23 08:24:30 -07:00
env.response.status_code = 404
next
2018-08-04 13:30:44 -07:00
end
2018-09-06 08:19:28 -07:00
if format == "json"
reddit_thread = JSON.parse(reddit_thread.to_json).as_h
reddit_thread["comments"] = JSON.parse(comments.to_json)
2019-01-25 08:50:18 -08:00
next reddit_thread.to_json
2018-09-06 08:19:28 -07:00
else
2019-01-25 08:50:18 -08:00
response = {
2018-09-06 08:19:28 -07:00
"title" => reddit_thread.title,
2018-09-06 16:18:36 -07:00
"permalink" => reddit_thread.permalink,
"contentHtml" => content_html,
2019-01-25 08:50:18 -08:00
}
next response.to_json
2018-09-06 16:18:36 -07:00
end
2018-08-04 13:30:44 -07:00
end
end
2018-09-17 18:08:26 -07:00
get "/api/v1/insights/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-17 18:08:26 -07:00
id = env.params.url["id"]
env.response.content_type = "application/json"
error_message = {"error" => "YouTube has removed publicly-available analytics."}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 410
next error_message
2018-09-17 18:08:26 -07:00
client = make_client(YT_URL)
headers = HTTP::Headers.new
2018-09-25 15:55:32 -07:00
html = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1")
2018-09-17 18:08:26 -07:00
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
body = html.body
session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
post_req = {
"session_token" => session_token,
}
post_req = HTTP::Params.encode(post_req)
response = client.post("/insight_ajax?action_get_statistics_and_data=1&v=#{id}", headers, post_req).body
response = XML.parse(response)
html_content = XML.parse_html(response.xpath_node(%q(//html_content)).not_nil!.content)
graph_data = response.xpath_node(%q(//graph_data))
if !graph_data
error = html_content.xpath_node(%q(//p)).not_nil!.content
next {"error" => error}.to_json
end
graph_data = JSON.parse(graph_data.content)
view_count = 0_i64
time_watched = 0_i64
subscriptions_driven = 0
shares = 0
stats_nodes = html_content.xpath_nodes(%q(//table/tr/td))
stats_nodes.each do |node|
key = node.xpath_node(%q(.//span))
value = node.xpath_node(%q(.//div))
if !key || !value
next
end
key = key.content
value = value.content
case key
when "Views"
view_count = value.delete(", ").to_i64
when "Time watched"
time_watched = value
when "Subscriptions driven"
subscriptions_driven = value.delete(", ").to_i
when "Shares"
shares = value.delete(", ").to_i
end
end
avg_view_duration_seconds = html_content.xpath_node(%q(//div[@id="stats-chart-tab-watch-time"]/span/span[2])).not_nil!.content
avg_view_duration_seconds = decode_length_seconds(avg_view_duration_seconds)
2019-01-25 08:50:18 -08:00
response = {
2018-09-17 18:08:26 -07:00
"viewCount" => view_count,
"timeWatchedText" => time_watched,
"subscriptionsDriven" => subscriptions_driven,
"shares" => shares,
"avgViewDurationSeconds" => avg_view_duration_seconds,
"graphData" => graph_data,
2019-01-25 08:50:18 -08:00
}
next response.to_json
end
2018-09-17 18:08:26 -07:00
2019-03-31 20:07:06 -07:00
get "/api/v1/annotations/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
env.response.content_type = "text/xml"
id = env.params.url["id"]
source = env.params.query["source"]?
source ||= "archive"
if !id.match(/[a-zA-Z0-9_-]{11}/)
env.response.status_code = 400
next
end
annotations = ""
case source
when "archive"
if CONFIG.cache_annotations && (cached_annotation = PG_DB.query_one?("SELECT * FROM annotations WHERE id = $1", id, as: Annotation))
annotations = cached_annotation.annotations
else
index = CHARS_SAFE.index(id[0]).not_nil!.to_s.rjust(2, '0')
2019-03-31 20:07:06 -07:00
# IA doesn't handle leading hyphens,
# so we use https://archive.org/details/youtubeannotations_64
if index == "62"
index = "64"
id = id.sub(/^-/, 'A')
end
2019-03-31 20:07:06 -07:00
file = URI.escape("#{id[0, 3]}/#{id}.xml")
2019-03-31 20:07:06 -07:00
client = make_client(ARCHIVE_URL)
location = client.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}")
2019-03-31 20:07:06 -07:00
if !location.headers["Location"]?
env.response.status_code = location.status_code
end
2019-03-31 20:07:06 -07:00
response = make_client(URI.parse(location.headers["Location"])).get(location.headers["Location"])
2019-03-31 20:07:06 -07:00
if response.body.empty?
env.response.status_code = 404
next
end
2019-04-13 06:28:59 -07:00
if response.status_code != 200
env.response.status_code = response.status_code
next
end
2019-03-31 20:07:06 -07:00
annotations = response.body
cache_annotation(PG_DB, id, annotations)
end
2019-03-31 20:07:06 -07:00
when "youtube"
client = make_client(YT_URL)
response = client.get("/annotations_invideo?video_id=#{id}")
if response.status_code != 200
env.response.status_code = response.status_code
next
end
annotations = response.body
end
annotations
end
2018-08-04 13:30:44 -07:00
get "/api/v1/videos/:id" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-28 21:12:35 -07:00
env.response.content_type = "application/json"
2018-08-04 13:30:44 -07:00
id = env.params.url["id"]
region = env.params.query["region"]?
2018-08-04 13:30:44 -07:00
begin
video = get_video(id, PG_DB, proxies, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/videos/#{ex.message}"
2018-08-04 13:30:44 -07:00
rescue ex
2018-09-21 07:40:04 -07:00
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-08-04 13:30:44 -07:00
end
2019-04-10 15:58:42 -07:00
video.to_json(locale, config, Kemal.config, decrypt_function)
end
2018-08-04 13:30:44 -07:00
get "/api/v1/trending" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-01-25 08:50:18 -08:00
env.response.content_type = "application/json"
2018-11-20 09:18:12 -08:00
region = env.params.query["region"]?
trending_type = env.params.query["type"]?
begin
trending, plid = fetch_trending(trending_type, proxies, region, locale)
2018-11-20 09:18:12 -08:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-11-20 09:18:12 -08:00
end
2018-08-04 13:30:44 -07:00
videos = JSON.build do |json|
json.array do
2018-11-20 09:18:12 -08:00
trending.each do |video|
2018-08-04 13:30:44 -07:00
json.object do
json.field "title", video.title
json.field "videoId", video.id
2018-08-04 13:30:44 -07:00
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2018-08-04 13:30:44 -07:00
end
json.field "lengthSeconds", video.length_seconds
json.field "viewCount", video.views
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
2018-11-04 07:37:12 -08:00
json.field "published", video.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
json.field "description", video.description
json.field "descriptionHtml", video.description_html
2018-11-20 09:18:12 -08:00
json.field "liveNow", video.live_now
json.field "paid", video.paid
json.field "premium", video.premium
2018-08-04 13:30:44 -07:00
end
end
end
end
videos
end
2018-08-04 13:30:44 -07:00
2018-11-25 16:13:11 -08:00
get "/api/v1/popular" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-01-25 08:50:18 -08:00
env.response.content_type = "application/json"
JSON.build do |json|
2018-11-25 16:13:11 -08:00
json.array do
popular_videos.each do |video|
video.to_json(locale, config, Kemal.config, json)
2018-11-25 16:13:11 -08:00
end
end
end
end
2018-11-25 16:13:11 -08:00
2018-08-04 13:30:44 -07:00
get "/api/v1/top" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2019-01-25 08:50:18 -08:00
env.response.content_type = "application/json"
2019-03-01 14:06:45 -08:00
if !config.top_enabled
error_message = {"error" => "Administrator has disabled this endpoint."}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 400
next error_message
2019-03-01 14:06:45 -08:00
end
2018-08-04 13:30:44 -07:00
videos = JSON.build do |json|
json.array do
top_videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
end
2018-08-04 13:30:44 -07:00
json.field "lengthSeconds", video.info["length_seconds"].to_i
json.field "viewCount", video.views
json.field "author", video.author
json.field "authorId", video.ucid
2018-08-04 13:30:44 -07:00
json.field "authorUrl", "/channel/#{video.ucid}"
2018-11-04 07:37:12 -08:00
json.field "published", video.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
2018-08-04 13:30:44 -07:00
description = video.description.gsub("<br>", "\n")
description = description.gsub("<br/>", "\n")
description = XML.parse_html(description)
json.field "description", description.content
json.field "descriptionHtml", video.description
end
end
end
end
videos
end
2018-08-04 13:30:44 -07:00
get "/api/v1/channels/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-21 07:40:04 -07:00
env.response.content_type = "application/json"
2018-09-21 07:40:04 -07:00
ucid = env.params.url["ucid"]
2018-11-13 17:04:25 -08:00
sort_by = env.params.query["sort_by"]?.try &.downcase
sort_by ||= "newest"
2018-09-04 19:04:40 -07:00
2018-09-21 07:40:04 -07:00
begin
2018-12-20 13:32:09 -08:00
author, ucid, auto_generated = get_about_info(ucid, locale)
2018-09-21 07:40:04 -07:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-09-04 19:04:40 -07:00
end
page = 1
if auto_generated
videos = [] of SearchVideo
count = 0
else
begin
videos, count = get_60_videos(ucid, page, auto_generated, sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
end
end
2018-08-28 18:29:08 -07:00
client = make_client(YT_URL)
2018-08-04 13:30:44 -07:00
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
channel_html = XML.parse_html(channel_html)
banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
2018-03-31 07:51:14 -07:00
2018-08-28 18:29:08 -07:00
author = channel_html.xpath_node(%q(//a[contains(@class, "branded-page-header-title-link")])).not_nil!.content
2018-08-04 13:30:44 -07:00
author_url = channel_html.xpath_node(%q(//a[@class="channel-header-profile-image-container spf-link"])).not_nil!["href"]
author_thumbnail = channel_html.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
2018-09-04 17:27:10 -07:00
description_html = channel_html.xpath_node(%q(//div[contains(@class,"about-description")]))
description_html, description = html_to_content(description_html)
2018-03-31 07:51:14 -07:00
2018-08-04 13:30:44 -07:00
paid = channel_html.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
is_family_friendly = channel_html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
allowed_regions = channel_html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
2018-03-31 07:51:14 -07:00
related_channels = channel_html.xpath_nodes(%q(//div[contains(@class, "branded-page-related-channels")]/ul/li))
related_channels = related_channels.map do |node|
related_id = node["data-external-id"]?
related_id ||= ""
anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
related_title = anchor.try &.["title"]
related_title ||= ""
related_author_url = anchor.try &.["href"]
related_author_url ||= ""
related_author_thumbnail = node.xpath_node(%q(.//img)).try &.["data-thumb"]
related_author_thumbnail ||= ""
{
id: related_id,
author: related_title,
author_url: related_author_url,
author_thumbnail: related_author_thumbnail,
}
end
2018-09-04 17:27:10 -07:00
total_views = 0_i64
sub_count = 0_i64
2018-11-04 07:37:12 -08:00
joined = Time.unix(0)
2018-09-04 17:27:10 -07:00
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
when .includes? "views"
total_views = item.content.gsub(/\D/, "").to_i64
2018-09-04 17:27:10 -07:00
when .includes? "subscribers"
sub_count = item.content.delete("subscribers").gsub(/\D/, "").to_i64
2018-09-04 17:27:10 -07:00
when .includes? "Joined"
joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
end
end
2018-03-31 07:51:14 -07:00
2018-08-04 13:30:44 -07:00
channel_info = JSON.build do |json|
json.object do
2018-08-28 18:29:08 -07:00
json.field "author", author
json.field "authorId", ucid
2018-08-04 13:30:44 -07:00
json.field "authorUrl", author_url
json.field "authorBanners" do
json.array do
qualities = {
{width: 2560, height: 424},
{width: 2120, height: 351},
{width: 1060, height: 175},
}
2018-08-04 13:30:44 -07:00
qualities.each do |quality|
json.object do
json.field "url", banner.gsub("=w1060", "=w#{quality[:width]}")
json.field "width", quality[:width]
json.field "height", quality[:height]
end
end
json.object do
json.field "url", banner.rchop("=w1060-fcrop64=1,00005a57ffffa5a8-nd-c0xffffffff-rj-k-no")
json.field "width", 512
json.field "height", 288
end
2018-07-18 12:26:02 -07:00
end
end
2018-03-31 07:51:14 -07:00
2018-08-04 13:30:44 -07:00
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
qualities.each do |quality|
json.object do
json.field "url", author_thumbnail.gsub("/s100-", "/s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
2018-07-18 12:26:02 -07:00
end
2018-03-31 07:51:14 -07:00
end
2018-08-04 13:30:44 -07:00
json.field "subCount", sub_count
json.field "totalViews", total_views
2018-11-04 07:37:12 -08:00
json.field "joined", joined.to_unix
2018-08-04 13:30:44 -07:00
json.field "paid", paid
2018-03-31 07:51:14 -07:00
json.field "autoGenerated", auto_generated
2018-08-04 13:30:44 -07:00
json.field "isFamilyFriendly", is_family_friendly
json.field "description", description
2018-09-04 17:27:10 -07:00
json.field "descriptionHtml", description_html
2018-08-04 13:30:44 -07:00
json.field "allowedRegions", allowed_regions
2018-07-28 18:40:59 -07:00
2018-08-04 13:30:44 -07:00
json.field "latestVideos" do
json.array do
2018-08-28 18:29:08 -07:00
videos.each do |video|
2018-08-04 13:30:44 -07:00
json.object do
json.field "title", video.title
json.field "videoId", video.id
2018-07-28 18:40:59 -07:00
2018-09-04 19:35:25 -07:00
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
2018-08-04 13:30:44 -07:00
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2018-08-04 13:30:44 -07:00
end
2018-08-28 18:29:08 -07:00
json.field "description", video.description
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
2018-11-04 07:37:12 -08:00
json.field "published", video.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
2018-08-28 18:29:08 -07:00
json.field "lengthSeconds", video.length_seconds
2018-11-08 14:35:57 -08:00
json.field "liveNow", video.live_now
2018-10-16 09:15:14 -07:00
json.field "paid", video.paid
json.field "premium", video.premium
2018-08-04 13:30:44 -07:00
end
end
end
end
json.field "relatedChannels" do
json.array do
related_channels.each do |related_channel|
json.object do
json.field "author", related_channel[:author]
json.field "authorId", related_channel[:id]
json.field "authorUrl", related_channel[:author_url]
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", related_channel[:author_thumbnail].gsub("=s48-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
end
end
end
end
2018-08-04 13:30:44 -07:00
end
2018-07-28 18:40:59 -07:00
end
channel_info
end
2018-07-16 06:18:59 -07:00
2019-04-28 09:47:16 -07:00
{"/api/v1/channels/:ucid/videos", "/api/v1/channels/videos/:ucid"}.each do |route|
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-21 07:40:04 -07:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2019-02-15 15:28:54 -08:00
sort_by = env.params.query["sort"]?.try &.downcase
sort_by ||= env.params.query["sort_by"]?.try &.downcase
sort_by ||= "newest"
2018-07-16 06:18:59 -07:00
2018-09-21 07:40:04 -07:00
begin
2018-12-20 13:32:09 -08:00
author, ucid, auto_generated = get_about_info(ucid, locale)
2018-09-21 07:40:04 -07:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
end
2018-07-16 06:18:59 -07:00
begin
videos, count = get_60_videos(ucid, page, auto_generated, sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
end
2018-07-29 19:01:28 -07:00
result = JSON.build do |json|
json.array do
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
2018-08-04 13:30:44 -07:00
if auto_generated
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
else
json.field "author", author
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
end
2018-09-04 19:35:25 -07:00
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
end
2018-08-04 13:30:44 -07:00
json.field "description", video.description
json.field "descriptionHtml", video.description_html
2018-08-04 13:30:44 -07:00
json.field "viewCount", video.views
2018-11-04 07:37:12 -08:00
json.field "published", video.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
json.field "lengthSeconds", video.length_seconds
json.field "liveNow", video.live_now
2018-10-16 09:15:14 -07:00
json.field "paid", video.paid
json.field "premium", video.premium
end
2018-08-04 13:30:44 -07:00
end
end
end
2018-07-16 06:18:59 -07:00
result
end
end
2018-07-16 06:18:59 -07:00
2019-04-28 09:47:16 -07:00
{"/api/v1/channels/:ucid/latest", "/api/v1/channels/latest/:ucid"}.each do |route|
2019-02-19 15:00:06 -08:00
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-19 15:00:06 -08:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
begin
videos = get_latest_videos(ucid)
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2019-02-19 15:00:06 -08:00
end
JSON.build do |json|
2019-02-19 15:00:06 -08:00
json.array do
videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "authorId", ucid
json.field "authorUrl", "/channel/#{ucid}"
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2019-02-19 15:00:06 -08:00
end
json.field "description", video.description
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
json.field "published", video.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
2019-02-19 15:00:06 -08:00
json.field "lengthSeconds", video.length_seconds
json.field "liveNow", video.live_now
json.field "paid", video.paid
json.field "premium", video.premium
end
end
end
end
end
end
2019-02-19 15:00:06 -08:00
2019-04-28 09:47:16 -07:00
{"/api/v1/channels/:ucid/playlists", "/api/v1/channels/playlists/:ucid"}.each do |route|
2019-02-19 15:05:27 -08:00
get route do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-19 15:05:27 -08:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
continuation = env.params.query["continuation"]?
sort_by = env.params.query["sort"]?.try &.downcase
sort_by ||= env.params.query["sort_by"]?.try &.downcase
sort_by ||= "last"
begin
author, ucid, auto_generated = get_about_info(ucid, locale)
rescue ex
2019-03-23 08:24:30 -07:00
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
next error_message
2019-02-19 15:05:27 -08:00
end
items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
2019-02-19 15:05:27 -08:00
response = JSON.build do |json|
json.object do
json.field "playlists" do
json.array do
items.each do |item|
json.object do
if item.is_a?(SearchPlaylist)
json.field "title", item.title
json.field "playlistId", item.id
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "videoCount", item.video_count
json.field "videos" do
json.array do
item.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "lengthSeconds", video.length_seconds
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2019-02-19 15:05:27 -08:00
end
end
end
end
end
end
end
end
end
end
json.field "continuation", continuation
end
end
response
2019-02-19 15:05:27 -08:00
end
end
2019-02-19 15:05:27 -08:00
2018-09-22 08:49:42 -07:00
get "/api/v1/channels/search/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-22 08:49:42 -07:00
env.response.content_type = "application/json"
ucid = env.params.url["ucid"]
query = env.params.query["q"]?
query ||= ""
page = env.params.query["page"]?.try &.to_i?
page ||= 1
count, search_results = channel_search(query, page, ucid)
response = JSON.build do |json|
json.array do
search_results.each do |item|
json.object do
case item
when SearchVideo
json.field "type", "video"
json.field "title", item.title
json.field "videoId", item.id
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, item.id, config, Kemal.config)
2018-09-22 08:49:42 -07:00
end
json.field "description", item.description
json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views
2018-11-04 07:37:12 -08:00
json.field "published", item.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(item.published, locale))
2018-09-22 08:49:42 -07:00
json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now
2018-10-16 09:15:14 -07:00
json.field "paid", item.paid
json.field "premium", item.premium
2018-09-22 08:49:42 -07:00
when SearchPlaylist
json.field "type", "playlist"
json.field "title", item.title
json.field "playlistId", item.id
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "videoCount", item.video_count
2018-09-22 08:49:42 -07:00
json.field "videos" do
json.array do
item.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "lengthSeconds", video.length_seconds
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2018-09-22 08:49:42 -07:00
end
end
end
end
end
when SearchChannel
json.field "type", "channel"
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
2018-09-22 08:49:42 -07:00
qualities.each do |quality|
json.object do
json.field "url", item.author_thumbnail.gsub("=s176-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
json.field "subCount", item.subscriber_count
json.field "videoCount", item.video_count
json.field "description", item.description
json.field "descriptionHtml", item.description_html
end
end
end
end
end
response
end
2018-09-22 08:49:42 -07:00
2018-08-04 13:30:44 -07:00
get "/api/v1/search" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2019-02-06 16:21:40 -08:00
region = env.params.query["region"]?
2018-12-20 13:32:09 -08:00
2018-09-22 08:49:42 -07:00
env.response.content_type = "application/json"
2018-08-04 21:07:38 -07:00
query = env.params.query["q"]?
query ||= ""
2018-08-04 13:30:44 -07:00
page = env.params.query["page"]?.try &.to_i?
page ||= 1
sort_by = env.params.query["sort_by"]?.try &.downcase
sort_by ||= "relevance"
date = env.params.query["date"]?.try &.downcase
date ||= ""
2019-02-26 12:31:37 -08:00
duration = env.params.query["duration"]?.try &.downcase
duration ||= ""
features = env.params.query["features"]?.try &.split(",").map { |feature| feature.downcase }
features ||= [] of String
content_type = env.params.query["type"]?.try &.downcase
content_type ||= "video"
begin
2018-09-17 14:38:18 -07:00
search_params = produce_search_params(sort_by, date, content_type, duration, features)
rescue ex
env.response.status_code = 400
next JSON.build do |json|
json.object do
json.field "error", ex.message
end
end
end
2019-02-06 16:21:40 -08:00
count, search_results = search(query, page, search_params, proxies, region).as(Tuple)
2018-08-04 21:07:38 -07:00
response = JSON.build do |json|
2018-08-04 13:30:44 -07:00
json.array do
search_results.each do |item|
2018-08-04 13:30:44 -07:00
json.object do
case item
when SearchVideo
json.field "type", "video"
json.field "title", item.title
json.field "videoId", item.id
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, item.id, config, Kemal.config)
end
2018-08-04 13:30:44 -07:00
json.field "description", item.description
json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views
2018-11-04 07:37:12 -08:00
json.field "published", item.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(item.published, locale))
json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now
2018-10-16 09:15:14 -07:00
json.field "paid", item.paid
json.field "premium", item.premium
when SearchPlaylist
json.field "type", "playlist"
json.field "title", item.title
json.field "playlistId", item.id
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "videoCount", item.video_count
json.field "videos" do
json.array do
item.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "lengthSeconds", video.length_seconds
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
end
end
end
end
end
when SearchChannel
json.field "type", "channel"
json.field "author", item.author
json.field "authorId", item.ucid
json.field "authorUrl", "/channel/#{item.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", item.author_thumbnail.gsub("=s176-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
2018-08-04 13:30:44 -07:00
json.field "subCount", item.subscriber_count
json.field "videoCount", item.video_count
json.field "description", item.description
json.field "descriptionHtml", item.description_html
2018-08-04 13:30:44 -07:00
end
end
end
end
end
response
end
2018-08-15 08:22:36 -07:00
get "/api/v1/playlists/:plid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-21 07:40:04 -07:00
env.response.content_type = "application/json"
2018-08-15 08:22:36 -07:00
plid = env.params.url["plid"]
page = env.params.query["page"]?.try &.to_i?
page ||= 1
2018-10-07 19:11:33 -07:00
format = env.params.query["format"]?
format ||= "json"
continuation = env.params.query["continuation"]?
2018-10-06 20:18:50 -07:00
if plid.starts_with? "RD"
next env.redirect "/api/v1/mixes/#{plid}"
end
2018-08-15 08:22:36 -07:00
begin
2018-12-20 13:32:09 -08:00
playlist = fetch_playlist(plid, locale)
2018-08-15 08:22:36 -07:00
rescue ex
2018-09-21 07:40:04 -07:00
error_message = {"error" => "Playlist is empty"}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-08-15 08:22:36 -07:00
end
begin
2018-12-20 13:32:09 -08:00
videos = fetch_playlist_videos(plid, page, playlist.video_count, continuation, locale)
rescue ex
videos = [] of PlaylistVideo
end
2018-08-15 08:22:36 -07:00
response = JSON.build do |json|
json.object do
json.field "title", playlist.title
2018-09-22 09:34:29 -07:00
json.field "playlistId", playlist.id
2018-08-15 08:22:36 -07:00
json.field "author", playlist.author
json.field "authorId", playlist.ucid
json.field "authorUrl", "/channel/#{playlist.ucid}"
json.field "authorThumbnails" do
json.array do
qualities = {32, 48, 76, 100, 176, 512}
qualities.each do |quality|
json.object do
json.field "url", playlist.author_thumbnail.gsub("=s100-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
end
end
end
2018-08-15 08:22:36 -07:00
json.field "description", playlist.description
2018-09-04 17:27:10 -07:00
json.field "descriptionHtml", playlist.description_html
2018-08-15 08:22:36 -07:00
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views
2018-11-04 07:37:12 -08:00
json.field "updated", playlist.updated.to_unix
2018-08-15 08:22:36 -07:00
json.field "videos" do
json.array do
videos.each do |video|
json.object do
json.field "title", video.title
2018-09-22 09:34:29 -07:00
json.field "videoId", video.id
2018-08-15 08:22:36 -07:00
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2018-08-15 08:22:36 -07:00
end
json.field "index", video.index
json.field "lengthSeconds", video.length_seconds
end
end
end
end
end
end
2018-10-07 19:11:33 -07:00
if format == "html"
response = JSON.parse(response)
playlist_html = template_playlist(response)
next_video = response["videos"].as_a[1]?.try &.["videoId"]
response = {
"playlistHtml" => playlist_html,
"nextVideo" => next_video,
}.to_json
end
response
end
2018-08-15 08:22:36 -07:00
2018-09-28 21:12:35 -07:00
get "/api/v1/mixes/:rdid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
2018-12-20 13:32:09 -08:00
2018-09-28 21:12:35 -07:00
env.response.content_type = "application/json"
rdid = env.params.url["rdid"]
continuation = env.params.query["continuation"]?
2019-02-15 15:28:54 -08:00
continuation ||= rdid.lchop("RD")[0, 11]
2018-09-28 21:12:35 -07:00
2018-10-07 19:11:33 -07:00
format = env.params.query["format"]?
format ||= "json"
2018-09-28 21:12:35 -07:00
begin
2018-12-20 13:32:09 -08:00
mix = fetch_mix(rdid, continuation, locale: locale)
2018-10-31 07:15:17 -07:00
if !rdid.ends_with? continuation
mix = fetch_mix(rdid, mix.videos[1].id)
index = mix.videos.index(mix.videos.select { |video| video.id == continuation }[0]?)
end
index ||= 0
mix.videos = mix.videos[index..-1]
2018-09-28 21:12:35 -07:00
rescue ex
error_message = {"error" => ex.message}.to_json
2019-03-23 08:24:30 -07:00
env.response.status_code = 500
next error_message
2018-09-28 21:12:35 -07:00
end
response = JSON.build do |json|
json.object do
json.field "title", mix.title
json.field "mixId", mix.id
json.field "videos" do
json.array do
mix.videos.each do |video|
json.object do
json.field "title", video.title
json.field "videoId", video.id
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
json.field "videoThumbnails" do
json.array do
2019-03-08 12:42:37 -08:00
generate_thumbnails(json, video.id, config, Kemal.config)
2018-09-28 21:12:35 -07:00
end
end
json.field "index", video.index
json.field "lengthSeconds", video.length_seconds
end
end
end
end
end
end
2018-10-07 19:11:33 -07:00
if format == "html"
response = JSON.parse(response)
playlist_html = template_mix(response)
next_video = response["videos"].as_a[1]?.try &.["videoId"]
2018-10-31 07:15:17 -07:00
next_video ||= ""
2018-10-07 19:11:33 -07:00
response = {
"playlistHtml" => playlist_html,
"nextVideo" => next_video,
}.to_json
end
2019-03-23 08:24:30 -07:00
response
2018-09-28 21:12:35 -07:00
end
2019-04-10 15:58:42 -07:00
get "/api/v1/auth/notifications" do |env|
topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000)
topics ||= [] of String
2019-05-21 07:01:17 -07:00
create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics)
end
2019-04-10 15:58:42 -07:00
2019-05-21 07:01:17 -07:00
post "/api/v1/auth/notifications" do |env|
topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
topics ||= [] of String
2019-04-10 15:58:42 -07:00
2019-05-21 07:01:17 -07:00
create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics)
2019-04-10 15:58:42 -07:00
end
2019-04-30 19:01:57 -07:00
get "/api/v1/auth/preferences" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
user.preferences.to_json
end
2019-04-18 14:23:50 -07:00
2019-04-30 19:01:57 -07:00
post "/api/v1/auth/preferences" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
begin
preferences = Preferences.from_json(env.request.body || "{}")
rescue
preferences = user.preferences
end
PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
env.response.status_code = 204
end
2019-04-18 14:23:50 -07:00
2019-04-22 08:40:29 -07:00
get "/api/v1/auth/subscriptions" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
2019-04-18 14:23:50 -07:00
2019-04-22 08:40:29 -07:00
if user.subscriptions.empty?
values = "'{}'"
else
values = "VALUES #{user.subscriptions.map { |id| %(('#{id}')) }.join(",")}"
end
2019-04-18 14:23:50 -07:00
2019-04-22 08:40:29 -07:00
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
JSON.build do |json|
json.array do
subscriptions.each do |subscription|
json.object do
json.field "author", subscription.author
json.field "authorId", subscription.id
end
end
end
end
end
post "/api/v1/auth/subscriptions/:ucid" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
ucid = env.params.url["ucid"]
if !user.subscriptions.includes? ucid
get_channel(ucid, PG_DB, false, false)
PG_DB.exec("UPDATE users SET subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
end
2019-05-15 10:26:29 -07:00
# For Google accounts, access tokens don't have enough information to
# make a request on the user's behalf, which is why we don't sync with
# YouTube.
2019-04-22 08:40:29 -07:00
env.response.status_code = 204
end
delete "/api/v1/auth/subscriptions/:ucid" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
ucid = env.params.url["ucid"]
PG_DB.exec("UPDATE users SET subscriptions = array_remove(subscriptions,$1) WHERE email = $2", ucid, user.email)
env.response.status_code = 204
end
2019-04-18 14:23:50 -07:00
get "/api/v1/auth/tokens" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
scopes = env.get("scopes").as(Array(String))
tokens = PG_DB.query_all("SELECT id, issued FROM session_ids WHERE email = $1", user.email, as: {session: String, issued: Time})
JSON.build do |json|
json.array do
tokens.each do |token|
json.object do
json.field "session", token[:session]
json.field "issued", token[:issued].to_unix
end
end
end
end
end
post "/api/v1/auth/tokens/register" do |env|
user = env.get("user").as(User)
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
case env.request.headers["Content-Type"]?
when "application/x-www-form-urlencoded"
scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
callback_url = env.params.body["callbackUrl"]?
expire = env.params.body["expire"]?.try &.to_i?
when "application/json"
scopes = env.params.json["scopes"].as(Array).map { |v| v.as_s }
callback_url = env.params.json["callbackUrl"]?.try &.as(String)
expire = env.params.json["expire"]?.try &.as(Int64)
else
error_message = {"error" => "Invalid or missing header 'Content-Type'"}.to_json
env.response.status_code = 400
next error_message
end
if callback_url && callback_url.empty?
callback_url = nil
end
if callback_url
callback_url = URI.parse(callback_url)
end
if sid = env.get?("sid").try &.as(String)
env.response.content_type = "text/html"
csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, PG_DB, use_nonce: true)
next templated "authorize_token"
else
env.response.content_type = "application/json"
superset_scopes = env.get("scopes").as(Array(String))
authorized_scopes = [] of String
scopes.each do |scope|
if scopes_include_scope(superset_scopes, scope)
authorized_scopes << scope
end
end
access_token = generate_token(user.email, authorized_scopes, expire, HMAC_KEY, PG_DB)
if callback_url
access_token = URI.escape(access_token)
if query = callback_url.query
query = HTTP::Params.parse(query.not_nil!)
else
query = HTTP::Params.new
end
query["token"] = access_token
callback_url.query = query.to_s
env.redirect callback_url.to_s
else
access_token
end
end
end
post "/api/v1/auth/tokens/unregister" do |env|
env.response.content_type = "application/json"
user = env.get("user").as(User)
scopes = env.get("scopes").as(Array(String))
session = env.params.json["session"]?.try &.as(String)
session ||= env.get("session").as(String)
# Allow tokens to revoke other tokens with correct scope
if session == env.get("session").as(String)
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", session)
elsif scopes_include_scope(scopes, "GET:tokens")
PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", session)
else
error_message = {"error" => "Cannot revoke session #{session}"}.to_json
env.response.status_code = 400
next error_message
end
env.response.status_code = 204
end
2018-08-07 11:10:52 -07:00
get "/api/manifest/dash/id/videoplayback" do |env|
env.response.headers.delete("Content-Type")
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-07 11:10:52 -07:00
env.redirect "/videoplayback?#{env.params.query}"
end
get "/api/manifest/dash/id/videoplayback/*" do |env|
env.response.headers.delete("Content-Type")
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-07 11:10:52 -07:00
env.redirect env.request.path.lchop("/api/manifest/dash/id")
end
2018-07-16 06:18:59 -07:00
get "/api/manifest/dash/id/:id" do |env|
env.response.headers.add("Access-Control-Allow-Origin", "*")
env.response.content_type = "application/dash+xml"
local = env.params.query["local"]?.try &.== "true"
id = env.params.url["id"]
region = env.params.query["region"]?
2018-07-16 06:18:59 -07:00
client = make_client(YT_URL)
begin
video = get_video(id, PG_DB, proxies, region: region)
rescue ex : VideoRedirect
url = "/api/manifest/dash/id/#{ex.message}"
if local
url += "?local=true"
end
next env.redirect url
2018-07-16 06:18:59 -07:00
rescue ex
2019-03-23 08:24:30 -07:00
env.response.status_code = 403
next
2018-07-16 06:18:59 -07:00
end
2019-03-22 08:32:42 -07:00
if dashmpd = video.player_response["streamingData"]?.try &.["dashManifestUrl"]?.try &.as_s
2019-02-25 07:11:41 -08:00
manifest = client.get(dashmpd).body
2018-07-16 06:18:59 -07:00
manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
url = baseurl.lchop("<BaseURL>")
url = url.rchop("</BaseURL>")
if local
2018-08-07 11:14:58 -07:00
url = URI.parse(url).full_path.lchop("/")
2018-07-16 06:18:59 -07:00
end
"<BaseURL>#{url}</BaseURL>"
end
next manifest
end
2018-08-04 21:07:38 -07:00
adaptive_fmts = video.adaptive_fmts(decrypt_function)
2018-07-16 06:18:59 -07:00
if local
adaptive_fmts.each do |fmt|
2018-08-07 11:14:58 -07:00
fmt["url"] = URI.parse(fmt["url"]).full_path.lchop("/")
2018-07-16 06:18:59 -07:00
end
end
2019-04-12 06:04:59 -07:00
audio_streams = video.audio_streams(adaptive_fmts)
video_streams = video.video_streams(adaptive_fmts)
2018-08-04 21:07:38 -07:00
XML.build(indent: " ", encoding: "UTF-8") do |xml|
2018-08-11 13:01:22 -07:00
xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
"profiles": "urn:mpeg:dash:profile:isoff-live:2011", minBufferTime: "PT1.5S", type: "static",
2018-07-16 06:18:59 -07:00
mediaPresentationDuration: "PT#{video.info["length_seconds"]}S") do
xml.element("Period") do
2019-04-12 09:19:54 -07:00
i = 0
2019-04-12 06:04:59 -07:00
{"audio/mp4", "audio/webm"}.each do |mime_type|
2019-04-12 09:19:54 -07:00
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true) do
2019-04-12 06:04:59 -07:00
audio_streams.select { |stream| stream["type"].starts_with? mime_type }.each do |fmt|
codecs = fmt["type"].split("codecs=")[1].strip('"')
bandwidth = fmt["bitrate"]
itag = fmt["itag"]
url = fmt["url"]
xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011",
value: "2")
xml.element("BaseURL") { xml.text url }
xml.element("SegmentBase", indexRange: fmt["index"]) do
xml.element("Initialization", range: fmt["init"])
end
2018-07-16 06:18:59 -07:00
end
end
end
2019-04-12 09:19:54 -07:00
i += 1
2018-07-16 06:18:59 -07:00
end
2019-04-12 06:04:59 -07:00
{"video/mp4", "video/webm"}.each do |mime_type|
2019-04-12 09:19:54 -07:00
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do
2019-04-12 06:04:59 -07:00
video_streams.select { |stream| stream["type"].starts_with? mime_type }.each do |fmt|
codecs = fmt["type"].split("codecs=")[1].strip('"')
bandwidth = fmt["bitrate"]
itag = fmt["itag"]
url = fmt["url"]
width, height = fmt["size"].split("x")
xml.element("Representation", id: itag, codecs: codecs, width: width, height: height,
startWithSAP: "1", maxPlayoutRate: "1",
bandwidth: bandwidth, frameRate: fmt["fps"]) do
xml.element("BaseURL") { xml.text url }
xml.element("SegmentBase", indexRange: fmt["index"]) do
xml.element("Initialization", range: fmt["init"])
end
2018-07-16 06:18:59 -07:00
end
end
end
2019-04-12 09:19:54 -07:00
i += 1
2018-07-16 06:18:59 -07:00
end
end
end
end
end
2018-07-27 16:25:58 -07:00
get "/api/manifest/hls_variant/*" do |env|
client = make_client(YT_URL)
manifest = client.get(env.request.path)
if manifest.status_code != 200
2019-03-23 08:24:30 -07:00
env.response.status_code = manifest.status_code
next
2018-07-27 16:25:58 -07:00
end
2019-04-25 10:41:35 -07:00
local = env.params.query["local"]?.try &.== "true"
2018-07-27 16:25:58 -07:00
env.response.content_type = "application/x-mpegURL"
env.response.headers.add("Access-Control-Allow-Origin", "*")
2018-08-04 21:07:38 -07:00
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-08-04 21:07:38 -07:00
manifest = manifest.body
2019-04-25 10:41:35 -07:00
if local
manifest = manifest.gsub("https://www.youtube.com", host_url)
manifest = manifest.gsub("index.m3u8", "index.m3u8?local=true")
end
manifest
2018-07-27 16:25:58 -07:00
end
get "/api/manifest/hls_playlist/*" do |env|
client = make_client(YT_URL)
manifest = client.get(env.request.path)
if manifest.status_code != 200
2019-03-23 08:24:30 -07:00
env.response.status_code = manifest.status_code
next
2018-07-27 16:25:58 -07:00
end
2019-04-25 10:41:35 -07:00
local = env.params.query["local"]?.try &.== "true"
env.response.content_type = "application/x-mpegURL"
env.response.headers.add("Access-Control-Allow-Origin", "*")
2019-03-05 10:56:59 -08:00
host_url = make_host_url(config, Kemal.config)
2018-07-27 16:25:58 -07:00
2019-04-25 10:41:35 -07:00
manifest = manifest.body
if local
manifest = manifest.gsub("https://www.youtube.com", host_url)
manifest = manifest.gsub(/https:\/\/r\d---.{11}\.c\.youtube\.com/, host_url)
manifest = manifest.gsub("seg.ts", "seg.ts?local=true")
end
fvip = manifest.match(/hls_chunk_host\/r(?<fvip>\d+)---/).not_nil!["fvip"]
2018-07-27 16:25:58 -07:00
manifest = manifest.gsub("seg.ts", "seg.ts/fvip/#{fvip}")
manifest
end
2019-01-27 18:35:32 -08:00
# YouTube /videoplayback links expire after 6 hours,
# so we have a mechanism here to redirect to the latest version
get "/latest_version" do |env|
2019-02-24 09:04:46 -08:00
if env.params.query["download_widget"]?
download_widget = JSON.parse(env.params.query["download_widget"])
2019-04-11 10:08:43 -07:00
2019-02-24 09:04:46 -08:00
id = download_widget["id"].as_s
title = download_widget["title"].as_s
2019-04-11 10:08:43 -07:00
if label = download_widget["label"]?
env.redirect "/api/v1/captions/#{id}?label=#{label}&title=#{title}"
next
else
2019-04-12 05:29:47 -07:00
itag = download_widget["itag"].as_s
local = "true"
end
2019-04-11 10:08:43 -07:00
end
2019-02-24 09:04:46 -08:00
id ||= env.params.query["id"]?
itag ||= env.params.query["itag"]?
2019-01-27 18:35:32 -08:00
2019-02-09 10:28:43 -08:00
region = env.params.query["region"]?
2019-02-24 09:04:46 -08:00
local ||= env.params.query["local"]?
local ||= "false"
local = local == "true"
2019-01-27 18:35:32 -08:00
if !id || !itag
2019-03-23 08:24:30 -07:00
env.response.status_code = 400
next
2019-01-27 18:35:32 -08:00
end
2019-02-09 10:28:43 -08:00
video = get_video(id, PG_DB, proxies, region: region)
2019-01-27 18:35:32 -08:00
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
urls = (fmt_stream + adaptive_fmts).select { |fmt| fmt["itag"] == itag }
if urls.empty?
2019-03-23 08:24:30 -07:00
env.response.status_code = 404
next
2019-01-27 18:35:32 -08:00
elsif urls.size > 1
2019-03-23 08:24:30 -07:00
env.response.status_code = 409
next
2019-01-27 18:35:32 -08:00
end
url = urls[0]["url"]
if local
url = URI.parse(url).full_path.not_nil!
end
2019-02-24 09:04:46 -08:00
if title
url += "&title=#{title}"
end
env.redirect url
2019-01-27 18:35:32 -08:00
end
2018-08-07 11:25:22 -07:00
options "/videoplayback" do |env|
env.response.headers.delete("Content-Type")
2018-08-04 13:30:44 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 07:43:47 -07:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-04 13:30:44 -07:00
end
2018-08-07 09:39:56 -07:00
options "/videoplayback/*" do |env|
env.response.headers.delete("Content-Type")
2018-08-07 09:39:56 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 07:43:47 -07:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-07 09:39:56 -07:00
end
2018-08-07 11:18:38 -07:00
options "/api/manifest/dash/id/videoplayback" do |env|
env.response.headers.delete("Content-Type")
2018-08-07 11:18:38 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 07:43:47 -07:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-07 11:18:38 -07:00
end
options "/api/manifest/dash/id/videoplayback/*" do |env|
env.response.headers.delete("Content-Type")
2018-08-07 11:18:38 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-09 07:43:47 -07:00
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
2018-08-07 11:18:38 -07:00
end
2018-08-07 09:39:56 -07:00
get "/videoplayback/*" do |env|
2018-06-06 15:55:51 -07:00
path = env.request.path
2018-08-07 09:39:56 -07:00
path = path.lchop("/videoplayback/")
path = path.rchop("/")
2018-07-15 19:53:24 -07:00
path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
mimetype = mimetype.split("/")
mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
end
2018-07-15 19:53:24 -07:00
path = path.split("/")
2018-06-06 15:55:51 -07:00
raw_params = {} of String => Array(String)
path.each_slice(2) do |pair|
key, value = pair
value = URI.unescape(value)
2018-06-06 15:55:51 -07:00
if raw_params[key]?
raw_params[key] << value
else
raw_params[key] = [value]
2018-06-06 15:55:51 -07:00
end
end
2018-06-06 15:55:51 -07:00
query_params = HTTP::Params.new(raw_params)
2018-08-07 09:39:56 -07:00
2018-08-11 12:29:51 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-08-07 09:39:56 -07:00
env.redirect "/videoplayback?#{query_params}"
end
get "/videoplayback" do |env|
query_params = env.params.query
2018-04-15 18:47:37 -07:00
fvip = query_params["fvip"]? || "3"
mns = query_params["mn"].split(",")
2019-03-27 12:59:53 -07:00
if query_params["region"]?
region = query_params["region"]
query_params.delete("region")
end
2019-03-11 11:14:30 -07:00
if query_params["host"]? && !query_params["host"].empty?
host = "https://#{query_params["host"]}"
2019-03-11 11:32:46 -07:00
query_params.delete("host")
2019-03-11 11:14:30 -07:00
else
host = "https://r#{fvip}---#{mns.pop}.googlevideo.com"
2019-03-11 11:14:30 -07:00
end
2018-04-15 18:47:37 -07:00
url = "/videoplayback?#{query_params.to_s}"
headers = HTTP::Headers.new
2019-04-11 15:00:00 -07:00
{"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
2019-01-24 11:52:33 -08:00
response = HTTP::Client::Response.new(403)
5.times do
2019-01-24 11:52:33 -08:00
begin
client = make_client(URI.parse(host), proxies, region)
response = client.head(url, headers)
break
rescue Socket::Addrinfo::Error
if !mns.empty?
mn = mns.pop
end
fvip = "3"
host = "https://r#{fvip}---#{mn}.googlevideo.com"
2019-01-24 11:52:33 -08:00
rescue ex
end
end
2018-04-15 18:47:37 -07:00
2018-11-20 08:07:50 -08:00
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
2019-03-27 13:25:08 -07:00
host = url.host
2018-11-20 08:07:50 -08:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2018-11-20 08:07:50 -08:00
url = url.full_path
2019-03-27 13:25:08 -07:00
url += "&host=#{host}"
2019-03-27 12:59:53 -07:00
2018-11-20 08:07:50 -08:00
if region
url += "&region=#{region}"
2018-10-01 17:01:44 -07:00
end
2018-11-20 08:07:50 -08:00
next env.redirect url
end
2018-11-20 08:07:50 -08:00
if response.status_code >= 400
2019-03-23 08:24:30 -07:00
env.response.status_code = response.status_code
next
2018-08-25 15:24:07 -07:00
end
2018-11-20 08:07:50 -08:00
client = make_client(URI.parse(host), proxies, region)
2018-04-15 18:47:37 -07:00
client.get(url, headers) do |response|
2018-08-25 15:24:07 -07:00
env.response.status_code = response.status_code
2018-04-15 18:47:37 -07:00
2018-08-25 15:24:07 -07:00
response.headers.each do |key, value|
2019-05-19 05:12:45 -07:00
if !{"Access-Control-Allow-Origin", "Alt-Svc"}.includes? key
env.response.headers[key] = value
end
2018-08-25 15:24:07 -07:00
end
2018-04-15 18:47:37 -07:00
2019-05-19 05:12:45 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-03-27 12:59:53 -07:00
if response.headers["Location"]?
url = URI.parse(response.headers["Location"])
2019-03-27 13:25:08 -07:00
host = url.host
2019-03-27 12:59:53 -07:00
url = url.full_path
2019-03-27 13:25:08 -07:00
url += "&host=#{host}"
2019-03-27 12:59:53 -07:00
if region
url += "&region=#{region}"
end
next env.redirect url
end
2019-03-25 14:32:11 -07:00
if title = query_params["title"]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}"
end
2019-05-18 17:14:58 -07:00
proxy_file(response, env)
2018-09-14 19:24:28 -07:00
end
end
# We need this so the below route works as expected
2018-09-17 16:39:28 -07:00
get "/ggpht*" do |env|
end
get "/ggpht/*" do |env|
host = "https://yt3.ggpht.com"
client = make_client(URI.parse(host))
url = env.request.path.lchop("/ggpht")
headers = HTTP::Headers.new
2019-04-11 15:00:00 -07:00
{"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
client.get(url, headers) do |response|
response.headers.each do |key, value|
2019-05-19 05:12:45 -07:00
if !{"Access-Control-Allow-Origin", "Alt-Svc"}.includes? key
env.response.headers[key] = value
end
2019-04-11 15:00:00 -07:00
end
if response.status_code == 304
break
end
2019-05-19 05:12:45 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-05-18 17:14:58 -07:00
proxy_file(response, env)
2019-04-11 15:00:00 -07:00
end
end
2019-05-02 12:20:19 -07:00
options "/sb/:id/:storyboard/:index" do |env|
env.response.headers.delete("Content-Type")
env.response.headers["Access-Control-Allow-Origin"] = "*"
env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
end
2019-04-11 15:00:00 -07:00
get "/sb/:id/:storyboard/:index" do |env|
id = env.params.url["id"]
storyboard = env.params.url["storyboard"]
index = env.params.url["index"]
if storyboard.starts_with? "storyboard_live"
host = "https://i.ytimg.com"
else
host = "https://i9.ytimg.com"
end
client = make_client(URI.parse(host))
url = "/sb/#{id}/#{storyboard}/#{index}?#{env.params.query}"
headers = HTTP::Headers.new
{"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
2018-09-17 16:39:28 -07:00
client.get(url, headers) do |response|
env.response.status_code = response.status_code
response.headers.each do |key, value|
2019-05-19 05:12:45 -07:00
if !{"Access-Control-Allow-Origin", "Alt-Svc"}.includes? key
env.response.headers[key] = value
end
2018-09-17 16:39:28 -07:00
end
2019-05-03 06:23:11 -07:00
if response.status_code >= 400
2018-09-17 16:39:28 -07:00
break
end
2019-05-19 05:12:45 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-05-18 17:14:58 -07:00
proxy_file(response, env)
2018-09-17 16:39:28 -07:00
end
end
2018-09-14 19:24:28 -07:00
get "/vi/:id/:name" do |env|
id = env.params.url["id"]
name = env.params.url["name"]
host = "https://i.ytimg.com"
client = make_client(URI.parse(host))
if name == "maxres.jpg"
2019-03-08 12:42:37 -08:00
build_thumbnails(id, config, Kemal.config).each do |thumb|
2018-09-14 19:24:28 -07:00
if client.head("/vi/#{id}/#{thumb[:url]}.jpg").status_code == 200
name = thumb[:url] + ".jpg"
break
end
end
end
url = "/vi/#{id}/#{name}"
headers = HTTP::Headers.new
2019-04-11 15:00:00 -07:00
{"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
end
2018-09-14 19:24:28 -07:00
2018-09-17 07:38:52 -07:00
client.get(url, headers) do |response|
env.response.status_code = response.status_code
2018-09-14 19:24:28 -07:00
response.headers.each do |key, value|
2019-05-19 05:12:45 -07:00
if !{"Access-Control-Allow-Origin", "Alt-Svc"}.includes? key
env.response.headers[key] = value
end
2018-09-14 19:24:28 -07:00
end
2018-09-17 07:38:52 -07:00
if response.status_code == 304
break
end
2018-09-14 19:24:28 -07:00
2019-05-19 05:12:45 -07:00
env.response.headers["Access-Control-Allow-Origin"] = "*"
2019-05-18 17:14:58 -07:00
proxy_file(response, env)
2018-04-15 18:47:37 -07:00
end
end
2019-05-03 07:15:53 -07:00
# Undocumented, creates anonymous playlist with specified 'video_ids'
2019-05-03 07:11:27 -07:00
get "/watch_videos" do |env|
client = make_client(YT_URL)
response = client.get("#{env.request.path}?#{env.request.query}")
if url = response.headers["Location"]?
url = URI.parse(url).full_path
next env.redirect url
end
env.response.status_code = response.status_code
end
2018-02-10 07:15:23 -08:00
error 404 do |env|
2019-03-27 03:28:53 -07:00
if md = env.request.path.match(/^\/(?<id>([a-zA-Z0-9_-]{11})|(\w+))$/)
2019-04-17 12:46:00 -07:00
item = md["id"]
2019-04-17 12:46:00 -07:00
# Check if item is branding URL e.g. https://youtube.com/gaming
2019-03-27 03:28:53 -07:00
client = make_client(YT_URL)
2019-04-17 12:46:00 -07:00
response = client.get("/#{item}")
2019-03-27 03:28:53 -07:00
if response.status_code == 301
response = client.get(response.headers["Location"])
end
html = XML.parse_html(response.body)
ucid = html.xpath_node(%q(//meta[@itemprop="channelId"]))
if ucid
env.response.headers["Location"] = "/channel/#{ucid["content"]}"
halt env, status_code: 302
end
params = [] of String
env.params.query.each do |k, v|
params << "#{k}=#{v}"
end
params = params.join("&")
2019-04-17 12:46:00 -07:00
url = "/watch?v=#{item}"
if !params.empty?
url += "&#{params}"
end
2019-04-17 12:46:00 -07:00
# Check if item is video ID
2019-04-18 14:23:50 -07:00
client = make_client(YT_URL)
if item.match(/^[a-zA-Z0-9_-]{11}$/) && client.head("/watch?v=#{item}").status_code != 404
2019-02-21 13:07:22 -08:00
env.response.headers["Location"] = url
halt env, status_code: 302
end
end
env.response.headers["Location"] = "/"
halt env, status_code: 302
2017-12-30 13:21:43 -08:00
end
error 500 do |env|
error_message = <<-END_HTML
Looks like you've found a bug in Invidious. Feel free to open a new issue
<a href="https://github.com/omarroth/invidious/issues">
here
</a>
or send an email to
<a href="mailto:omarroth@protonmail.com">
omarroth@protonmail.com</a>.
END_HTML
2018-02-10 07:15:23 -08:00
templated "error"
2017-12-30 13:21:43 -08:00
end
2018-03-16 17:58:33 -07:00
# Add redirect if SSL is enabled
if Kemal.config.ssl
2018-03-09 11:22:04 -08:00
spawn do
2019-04-07 12:01:08 -07:00
server = HTTP::Server.new do |env|
redirect_url = "https://#{env.request.host}#{env.request.path}"
if env.request.query
redirect_url += "?#{env.request.query}"
2018-03-09 12:13:26 -08:00
end
2019-04-07 12:01:08 -07:00
2019-05-14 06:21:01 -07:00
if config.hsts
env.response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains; preload"
end
2019-04-07 12:01:08 -07:00
env.response.headers["Location"] = redirect_url
env.response.status_code = 301
2018-03-09 11:22:04 -08:00
end
server.bind_tcp "0.0.0.0", 80
2018-03-09 11:22:04 -08:00
server.listen
end
end
2018-03-09 09:28:57 -08:00
static_headers do |response, filepath, filestat|
2019-05-08 06:58:10 -07:00
response.headers.add("Cache-Control", "max-age=2629800")
2018-03-09 09:28:57 -08:00
end
2017-11-22 23:48:55 -08:00
public_folder "assets"
2018-04-15 20:56:58 -07:00
2018-07-30 16:42:45 -07:00
Kemal.config.powered_by_header = false
2018-04-15 20:56:58 -07:00
add_handler FilteredCompressHandler.new
2019-02-02 20:48:47 -08:00
add_handler APIHandler.new
2019-04-18 14:23:50 -07:00
add_handler AuthHandler.new
2019-03-23 08:24:30 -07:00
add_handler DenyFrame.new
2019-04-18 14:23:50 -07:00
add_context_storage_type(Array(String))
add_context_storage_type(Preferences)
2019-04-18 14:23:50 -07:00
add_context_storage_type(User)
2017-11-22 23:48:55 -08:00
2019-01-23 12:15:19 -08:00
Kemal.config.logger = logger
2017-11-22 23:48:55 -08:00
Kemal.run