2018-09-04 07:22:10 -07:00
# "Invidious" (which is an alternative front-end to YouTube)
2019-03-15 09:44:53 -07:00
# Copyright (C) 2019 Omar Roth
2018-01-28 09:32:40 -08:00
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2018-11-22 11:26:08 -08:00
require " digest/md5 "
2019-01-23 12:15:19 -08:00
require " file_utils "
2017-11-22 23:48:55 -08:00
require " kemal "
2018-07-18 12:26:02 -07:00
require " openssl/hmac "
2018-02-03 14:13:14 -08:00
require " option_parser "
2017-11-23 20:06:43 -08:00
require " pg "
2018-11-21 15:12:13 -08:00
require " sqlite3 "
2018-01-16 12:02:35 -08:00
require " xml "
2018-03-09 10:42:23 -08:00
require " yaml "
2020-06-15 15:57:20 -07:00
require " compress/zip "
2019-10-27 10:50:42 -07:00
require " protodec/utils "
2018-08-04 13:30:44 -07:00
require " ./invidious/helpers/* "
2018-07-06 05:59:56 -07:00
require " ./invidious/* "
2020-10-05 21:41:18 -07:00
require " ./invidious/routes/** "
require " ./invidious/jobs/** "
2017-11-29 13:33:46 -08:00
2021-01-23 09:58:13 -08:00
CONFIG = Config . load
HMAC_KEY = CONFIG . hmac_key || Random :: Secure . hex ( 32 )
2018-03-09 10:42:23 -08:00
PG_URL = URI . new (
scheme : " postgres " ,
2019-05-21 07:00:35 -07:00
user : CONFIG . db . user ,
password : CONFIG . db . password ,
host : CONFIG . db . host ,
port : CONFIG . db . port ,
path : CONFIG . db . dbname ,
2018-03-09 10:42:23 -08:00
)
2019-06-23 06:39:14 -07:00
PG_DB = DB . open PG_URL
ARCHIVE_URL = URI . parse ( " https://archive.org " )
LOGIN_URL = URI . parse ( " https://accounts.google.com " )
PUBSUB_URL = URI . parse ( " https://pubsubhubbub.appspot.com " )
REDDIT_URL = URI . parse ( " https://www.reddit.com " )
2020-03-10 08:12:11 -07:00
TEXTCAPTCHA_URL = URI . parse ( " https://textcaptcha.com " )
2019-06-23 06:39:14 -07:00
YT_URL = URI . parse ( " https://www.youtube.com " )
2021-01-23 10:39:04 -08:00
HOST_URL = make_host_url ( Kemal . config )
2019-06-23 06:39:14 -07:00
2019-06-07 10:39:12 -07:00
CHARS_SAFE = " ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_ "
TEST_IDS = { " AgbeGFYluEA " , " BaW_jenozKc " , " a9LDPn-MO4I " , " ddFvjfvPnqk " , " iqKdEhx-dD4 " }
2019-06-08 14:04:55 -07:00
MAX_ITEMS_PER_PAGE = 1500
2018-03-04 20:25:03 -08:00
2019-11-24 10:41:47 -08:00
REQUEST_HEADERS_WHITELIST = { " accept " , " accept-encoding " , " cache-control " , " content-length " , " if-none-match " , " range " }
RESPONSE_HEADERS_BLACKLIST = { " access-control-allow-origin " , " alt-svc " , " server " }
2019-07-04 13:30:00 -07:00
HTTP_CHUNK_SIZE = 10485760 # ~10MB
2019-06-23 06:39:14 -07:00
2020-02-15 10:52:28 -08:00
CURRENT_BRANCH = {{ " #{ ` git branch | sed -n '/* /s///p' ` . strip } " }}
2019-06-23 06:39:14 -07:00
CURRENT_COMMIT = {{ " #{ ` git rev-list HEAD --max-count=1 --abbrev-commit ` . strip } " }}
2020-12-05 11:06:24 -08:00
CURRENT_VERSION = {{ " #{ ` git log -1 --format=%ci | awk '{print $1}' | sed s/-/./g ` . strip } " }}
2019-06-23 06:39:14 -07:00
2019-05-09 09:52:37 -07:00
# This is used to determine the `?v=` on the end of file URLs (for cache busting). We
# only need to expire modified assets, so we can use this to find the last commit that changes
# any assets
ASSET_COMMIT = {{ " #{ ` git rev-list HEAD --max-count=1 --abbrev-commit -- assets ` . strip } " }}
2019-04-06 06:28:53 -07:00
SOFTWARE = {
" name " = > " invidious " ,
" version " = > " #{ CURRENT_VERSION } - #{ CURRENT_COMMIT } " ,
" branch " = > " #{ CURRENT_BRANCH } " ,
}
2018-12-20 13:32:09 -08:00
LOCALES = {
" ar " = > load_locale ( " ar " ) ,
" de " = > load_locale ( " de " ) ,
2019-05-20 11:06:54 -07:00
" el " = > load_locale ( " el " ) ,
2018-12-20 13:32:09 -08:00
" en-US " = > load_locale ( " en-US " ) ,
2019-04-19 09:20:18 -07:00
" eo " = > load_locale ( " eo " ) ,
2019-04-05 15:24:06 -07:00
" es " = > load_locale ( " es " ) ,
2019-03-01 17:24:53 -08:00
" eu " = > load_locale ( " eu " ) ,
2019-01-21 13:04:09 -08:00
" fr " = > load_locale ( " fr " ) ,
2020-04-20 14:40:03 -07:00
" hu " = > load_locale ( " hu-HU " ) ,
2019-07-12 19:07:40 -07:00
" is " = > load_locale ( " is " ) ,
2019-02-19 15:46:31 -08:00
" it " = > load_locale ( " it " ) ,
2019-10-26 02:34:25 -07:00
" ja " = > load_locale ( " ja " ) ,
2019-12-05 12:26:35 -08:00
" nb-NO " = > load_locale ( " nb-NO " ) ,
2018-12-20 13:32:09 -08:00
" nl " = > load_locale ( " nl " ) ,
" pl " = > load_locale ( " pl " ) ,
2020-04-04 13:57:29 -07:00
" pt-BR " = > load_locale ( " pt-BR " ) ,
2020-04-20 14:40:03 -07:00
" pt-PT " = > load_locale ( " pt-PT " ) ,
2019-12-03 16:41:58 -08:00
" ro " = > load_locale ( " ro " ) ,
2018-12-20 13:32:09 -08:00
" ru " = > load_locale ( " ru " ) ,
2020-04-04 13:57:29 -07:00
" sv " = > load_locale ( " sv-SE " ) ,
2019-09-23 09:49:07 -07:00
" tr " = > load_locale ( " tr " ) ,
2019-04-19 09:20:18 -07:00
" uk " = > load_locale ( " uk " ) ,
2019-07-04 21:11:04 -07:00
" zh-CN " = > load_locale ( " zh-CN " ) ,
2019-10-09 07:23:26 -07:00
" zh-TW " = > load_locale ( " zh-TW " ) ,
2018-12-20 13:32:09 -08:00
}
2020-12-06 04:12:02 -08:00
YT_POOL = QUICPool . new ( YT_URL , capacity : CONFIG . pool_size , timeout : 2.0 )
2019-10-25 09:58:16 -07:00
2021-01-04 07:51:06 -08:00
# CLI
2019-04-06 06:28:53 -07:00
Kemal . config . extra_options do | parser |
parser . banner = " Usage: invidious [arguments] "
2021-01-04 07:51:06 -08:00
parser . on ( " -c THREADS " , " --channel-threads=THREADS " , " Number of threads for refreshing channels (default: #{ CONFIG . channel_threads } ) " ) do | number |
2019-04-06 06:28:53 -07:00
begin
2021-01-04 07:51:06 -08:00
CONFIG . channel_threads = number . to_i
2019-04-06 06:28:53 -07:00
rescue ex
puts " THREADS must be integer "
exit
end
end
2021-01-04 07:51:06 -08:00
parser . on ( " -f THREADS " , " --feed-threads=THREADS " , " Number of threads for refreshing feeds (default: #{ CONFIG . feed_threads } ) " ) do | number |
2019-04-06 06:28:53 -07:00
begin
2021-01-04 07:51:06 -08:00
CONFIG . feed_threads = number . to_i
2019-04-06 06:28:53 -07:00
rescue ex
puts " THREADS must be integer "
exit
end
end
2021-01-04 07:51:06 -08:00
parser . on ( " -o OUTPUT " , " --output=OUTPUT " , " Redirect output (default: #{ CONFIG . output } ) " ) do | output |
CONFIG . output = output
2019-04-06 06:28:53 -07:00
end
2021-01-04 07:51:06 -08:00
parser . on ( " -l LEVEL " , " --log-level=LEVEL " , " Log level, one of #{ LogLevel . values } (default: #{ CONFIG . log_level } ) " ) do | log_level |
CONFIG . log_level = LogLevel . parse ( log_level )
2020-12-21 07:05:35 -08:00
end
parser . on ( " -v " , " --version " , " Print version " ) do
2019-04-06 06:28:53 -07:00
puts SOFTWARE . to_pretty_json
exit
end
end
Kemal :: CLI . new ARGV
2021-01-04 07:51:06 -08:00
if CONFIG . output . upcase != " STDOUT "
FileUtils . mkdir_p ( File . dirname ( CONFIG . output ) )
2021-01-04 07:05:15 -08:00
end
2021-01-04 07:51:06 -08:00
OUTPUT = CONFIG . output . upcase == " STDOUT " ? STDOUT : File . open ( CONFIG . output , mode : " a " )
LOGGER = Invidious :: LogHandler . new ( OUTPUT , CONFIG . log_level )
2021-01-04 07:05:15 -08:00
2019-04-15 09:13:09 -07:00
# Check table integrity
2019-04-11 10:13:25 -07:00
if CONFIG . check_tables
2021-01-04 07:51:06 -08:00
check_enum ( PG_DB , " privacy " , PlaylistPrivacy )
2019-08-05 16:49:13 -07:00
2021-01-04 07:51:06 -08:00
check_table ( PG_DB , " channels " , InvidiousChannel )
check_table ( PG_DB , " channel_videos " , ChannelVideo )
check_table ( PG_DB , " playlists " , InvidiousPlaylist )
check_table ( PG_DB , " playlist_videos " , PlaylistVideo )
check_table ( PG_DB , " nonces " , Nonce )
check_table ( PG_DB , " session_ids " , SessionId )
check_table ( PG_DB , " users " , User )
check_table ( PG_DB , " videos " , Video )
2019-04-15 09:13:09 -07:00
if CONFIG . cache_annotations
2021-01-04 07:51:06 -08:00
check_table ( PG_DB , " annotations " , Annotation )
2019-04-15 09:13:09 -07:00
end
2019-04-11 10:13:25 -07:00
end
2018-03-25 20:18:29 -07:00
2019-04-10 14:23:37 -07:00
# Start jobs
2019-05-15 10:26:29 -07:00
2021-01-23 10:41:50 -08:00
if CONFIG . channel_threads > 0
Invidious :: Jobs . register Invidious :: Jobs :: RefreshChannelsJob . new ( PG_DB )
end
if CONFIG . feed_threads > 0
Invidious :: Jobs . register Invidious :: Jobs :: RefreshFeedsJob . new ( PG_DB )
end
2020-09-27 10:19:44 -07:00
DECRYPT_FUNCTION = DecryptFunction . new ( CONFIG . decrypt_polling )
2021-01-23 10:39:04 -08:00
if CONFIG . decrypt_polling
2021-01-04 07:51:06 -08:00
Invidious :: Jobs . register Invidious :: Jobs :: UpdateDecryptFunctionJob . new
2020-09-27 10:19:44 -07:00
end
2019-03-03 17:18:23 -08:00
2021-01-23 10:39:04 -08:00
if CONFIG . statistics_enabled
Invidious :: Jobs . register Invidious :: Jobs :: StatisticsRefreshJob . new ( PG_DB , SOFTWARE )
2020-10-17 05:25:57 -07:00
end
2019-08-27 06:08:26 -07:00
2021-01-23 10:39:04 -08:00
if ( CONFIG . use_pubsub_feeds . is_a? ( Bool ) && CONFIG . use_pubsub_feeds . as ( Bool ) ) || ( CONFIG . use_pubsub_feeds . is_a? ( Int32 ) && CONFIG . use_pubsub_feeds . as ( Int32 ) > 0 )
Invidious :: Jobs . register Invidious :: Jobs :: SubscribeToFeedsJob . new ( PG_DB , HMAC_KEY )
2021-01-07 11:15:26 -08:00
end
2021-01-23 10:39:04 -08:00
if CONFIG . popular_enabled
2020-12-26 21:12:43 -08:00
Invidious :: Jobs . register Invidious :: Jobs :: PullPopularVideosJob . new ( PG_DB )
end
2021-01-23 10:39:04 -08:00
if CONFIG . captcha_key
Invidious :: Jobs . register Invidious :: Jobs :: BypassCaptchaJob . new
2019-03-01 17:25:16 -08:00
end
2019-03-01 14:47:06 -08:00
2020-10-17 05:25:57 -07:00
connection_channel = Channel ( { Bool , Channel ( PQ :: Notification ) } ) . new ( 32 )
Invidious :: Jobs . register Invidious :: Jobs :: NotificationJob . new ( connection_channel , PG_URL )
2020-10-05 21:41:18 -07:00
Invidious :: Jobs . start_all
def popular_videos
Invidious :: Jobs :: PullPopularVideosJob :: POPULAR_VIDEOS . get
2018-11-08 18:08:03 -08:00
end
2018-03-24 20:56:41 -07:00
before_all do | env |
2020-10-16 03:23:18 -07:00
preferences = begin
Preferences . from_json ( env . request . cookies [ " PREFS " ]? . try & . value || " {} " )
2020-03-15 14:46:08 -07:00
rescue
2020-10-16 03:23:18 -07:00
Preferences . from_json ( " {} " )
2020-03-15 14:46:08 -07:00
end
2020-10-16 03:23:18 -07:00
env . set " preferences " , preferences
2019-05-10 14:48:38 -07:00
env . response . headers [ " X-XSS-Protection " ] = " 1; mode=block "
2018-09-05 19:51:40 -07:00
env . response . headers [ " X-Content-Type-Options " ] = " nosniff "
2020-03-15 14:46:08 -07:00
extra_media_csp = " "
if CONFIG . disabled? ( " local " ) || ! preferences . local
extra_media_csp += " https://*.googlevideo.com:443 "
2020-12-07 21:47:26 -08:00
extra_media_csp += " https://*.youtube.com:443 "
2020-03-15 14:46:08 -07:00
end
# TODO: Remove style-src's 'unsafe-inline', requires to remove all inline styles (<style> [..] </style>, style=" [..] ")
2020-03-19 11:37:22 -07:00
env . response . headers [ " Content-Security-Policy " ] = " default-src 'none'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:; connect-src 'self'; manifest-src 'self'; media-src 'self' blob: #{ extra_media_csp } "
2019-04-07 12:01:08 -07:00
env . response . headers [ " Referrer-Policy " ] = " same-origin "
2021-01-23 10:39:04 -08:00
if ( Kemal . config . ssl || CONFIG . https_only ) && CONFIG . hsts
2019-04-30 18:53:56 -07:00
env . response . headers [ " Strict-Transport-Security " ] = " max-age=31536000; includeSubDomains; preload "
2019-04-07 12:01:08 -07:00
end
2019-03-28 11:43:40 -07:00
2019-11-20 09:03:52 -08:00
next if {
" /sb/ " ,
" /vi/ " ,
" /s_p/ " ,
" /yts/ " ,
" /ggpht/ " ,
" /api/manifest/ " ,
" /videoplayback " ,
" /latest_version " ,
} . any? { | r | env . request . resource . starts_with? r }
2018-07-16 09:24:24 -07:00
if env . request . cookies . has_key? " SID "
2018-03-31 17:09:27 -07:00
sid = env . request . cookies [ " SID " ] . value
2018-07-05 16:43:26 -07:00
2019-04-18 14:23:50 -07:00
if sid . starts_with? " v1: "
raise " Cannot use token as SID "
end
2018-07-18 12:26:02 -07:00
# Invidious users only have SID
if ! env . request . cookies . has_key? " SSID "
2019-04-15 21:23:40 -07:00
if email = PG_DB . query_one? ( " SELECT email FROM session_ids WHERE id = $1 " , sid , as : String )
2019-02-11 18:52:47 -08:00
user = PG_DB . query_one ( " SELECT * FROM users WHERE email = $1 " , email , as : User )
2019-08-05 16:49:13 -07:00
csrf_token = generate_response ( sid , {
" :authorize_token " ,
" :playlist_ajax " ,
" :signout " ,
" :subscription_ajax " ,
" :token_ajax " ,
" :watch_ajax " ,
} , HMAC_KEY , PG_DB , 1 . week )
2018-11-08 15:42:25 -08:00
2019-03-11 10:44:25 -07:00
preferences = user . preferences
2020-10-16 03:23:18 -07:00
env . set " preferences " , preferences
2019-03-11 10:44:25 -07:00
2018-08-15 10:40:42 -07:00
env . set " sid " , sid
2019-04-18 14:23:50 -07:00
env . set " csrf_token " , csrf_token
2019-04-15 21:23:40 -07:00
env . set " user " , user
2018-07-18 12:26:02 -07:00
end
else
2019-04-15 21:23:40 -07:00
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2018-07-18 12:26:02 -07:00
begin
2021-01-04 07:51:06 -08:00
user , sid = get_user ( sid , headers , PG_DB , false )
2019-08-05 16:49:13 -07:00
csrf_token = generate_response ( sid , {
" :authorize_token " ,
" :playlist_ajax " ,
" :signout " ,
" :subscription_ajax " ,
" :token_ajax " ,
" :watch_ajax " ,
} , HMAC_KEY , PG_DB , 1 . week )
2018-11-15 18:23:17 -08:00
2019-03-11 10:44:25 -07:00
preferences = user . preferences
2020-10-16 03:23:18 -07:00
env . set " preferences " , preferences
2019-03-11 10:44:25 -07:00
2018-08-15 10:40:42 -07:00
env . set " sid " , sid
2019-04-18 14:23:50 -07:00
env . set " csrf_token " , csrf_token
2019-04-15 21:23:40 -07:00
env . set " user " , user
2018-07-18 12:26:02 -07:00
rescue ex
end
2018-07-16 10:50:41 -07:00
end
2018-04-13 19:32:14 -07:00
end
2018-08-17 08:19:20 -07:00
2019-08-15 09:29:55 -07:00
dark_mode = convert_theme ( env . params . query [ " dark_mode " ]? ) || preferences . dark_mode . to_s
2019-03-11 10:44:25 -07:00
thin_mode = env . params . query [ " thin_mode " ]? || preferences . thin_mode . to_s
thin_mode = thin_mode == " true "
locale = env . params . query [ " hl " ]? || preferences . locale
preferences . dark_mode = dark_mode
preferences . thin_mode = thin_mode
preferences . locale = locale
2018-12-20 13:32:09 -08:00
2018-08-17 08:19:20 -07:00
current_page = env . request . path
if env . request . query
query = HTTP :: Params . parse ( env . request . query . not_nil! )
if query [ " referer " ]?
query [ " referer " ] = get_referer ( env , " / " )
end
current_page += " ? #{ query } "
end
2019-09-24 10:31:33 -07:00
env . set " current_page " , URI . encode_www_form ( current_page )
2018-03-22 10:44:36 -07:00
end
2020-10-05 21:41:18 -07:00
Invidious :: Routing . get " / " , Invidious :: Routes :: Home
Invidious :: Routing . get " /privacy " , Invidious :: Routes :: Privacy
Invidious :: Routing . get " /licenses " , Invidious :: Routes :: Licenses
2020-11-02 16:06:06 -08:00
Invidious :: Routing . get " /watch " , Invidious :: Routes :: Watch
2020-11-06 14:44:36 -08:00
Invidious :: Routing . get " /embed/ " , Invidious :: Routes :: Embed :: Index
Invidious :: Routing . get " /embed/:id " , Invidious :: Routes :: Embed :: Show
2020-11-19 17:21:48 -08:00
Invidious :: Routing . get " /view_all_playlists " , Invidious :: Routes :: Playlists , :index
Invidious :: Routing . get " /create_playlist " , Invidious :: Routes :: Playlists , :new
Invidious :: Routing . post " /create_playlist " , Invidious :: Routes :: Playlists , :create
Invidious :: Routing . get " /subscribe_playlist " , Invidious :: Routes :: Playlists , :subscribe
Invidious :: Routing . get " /delete_playlist " , Invidious :: Routes :: Playlists , :delete_page
Invidious :: Routing . post " /delete_playlist " , Invidious :: Routes :: Playlists , :delete
Invidious :: Routing . get " /edit_playlist " , Invidious :: Routes :: Playlists , :edit
Invidious :: Routing . post " /edit_playlist " , Invidious :: Routes :: Playlists , :update
Invidious :: Routing . get " /add_playlist_items " , Invidious :: Routes :: Playlists , :add_playlist_items_page
Invidious :: Routing . post " /playlist_ajax " , Invidious :: Routes :: Playlists , :playlist_ajax
Invidious :: Routing . get " /playlist " , Invidious :: Routes :: Playlists , :show
Invidious :: Routing . get " /mix " , Invidious :: Routes :: Playlists , :mix
2020-12-01 16:02:39 -08:00
Invidious :: Routing . get " /opensearch.xml " , Invidious :: Routes :: Search , :opensearch
Invidious :: Routing . get " /results " , Invidious :: Routes :: Search , :results
Invidious :: Routing . get " /search " , Invidious :: Routes :: Search , :search
2020-12-08 18:50:39 -08:00
Invidious :: Routing . get " /login " , Invidious :: Routes :: Login , :login_page
Invidious :: Routing . post " /login " , Invidious :: Routes :: Login , :login
Invidious :: Routing . post " /signout " , Invidious :: Routes :: Login , :signout
2020-12-15 16:54:30 -08:00
Invidious :: Routing . get " /preferences " , Invidious :: Routes :: UserPreferences , :show
Invidious :: Routing . post " /preferences " , Invidious :: Routes :: UserPreferences , :update
Invidious :: Routing . get " /toggle_theme " , Invidious :: Routes :: UserPreferences , :toggle_theme
2018-07-21 18:56:11 -07:00
2018-08-04 13:30:44 -07:00
# Users
2018-07-21 18:56:11 -07:00
2019-04-15 21:23:40 -07:00
post " /watch_ajax " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-11-19 20:06:59 -08:00
user = env . get? " user "
2019-04-15 21:23:40 -07:00
sid = env . get? " sid "
2018-11-19 20:06:59 -08:00
referer = get_referer ( env , " /feed/subscriptions " )
redirect = env . params . query [ " redirect " ]?
2019-04-15 21:23:40 -07:00
redirect || = " true "
2018-11-19 20:06:59 -08:00
redirect = redirect == " true "
2019-04-15 21:23:40 -07:00
if ! user
2019-04-18 14:23:50 -07:00
if redirect
next env . redirect referer
else
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " No such user " )
2019-04-18 14:23:50 -07:00
end
2018-11-19 20:06:59 -08:00
end
2018-12-20 13:32:09 -08:00
2019-04-15 21:23:40 -07:00
user = user . as ( User )
sid = sid . as ( String )
2019-04-18 14:23:50 -07:00
token = env . params . body [ " csrf_token " ]?
2018-11-19 20:06:59 -08:00
id = env . params . query [ " id " ]?
if ! id
2019-03-23 08:24:30 -07:00
env . response . status_code = 400
next
2018-11-19 20:06:59 -08:00
end
2019-04-15 21:23:40 -07:00
begin
2019-04-18 14:23:50 -07:00
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
2019-04-15 21:23:40 -07:00
rescue ex
if redirect
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2019-04-15 21:23:40 -07:00
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , ex )
2019-04-15 21:23:40 -07:00
end
end
if env . params . query [ " action_mark_watched " ]?
action = " action_mark_watched "
elsif env . params . query [ " action_mark_unwatched " ]?
action = " action_mark_unwatched "
else
next env . redirect referer
end
case action
when " action_mark_watched "
if ! user . watched . includes? id
2020-02-28 08:46:24 -08:00
PG_DB . exec ( " UPDATE users SET watched = array_append(watched, $1) WHERE email = $2 " , id , user . email )
2019-04-15 21:23:40 -07:00
end
when " action_mark_unwatched "
2018-11-21 15:12:13 -08:00
PG_DB . exec ( " UPDATE users SET watched = array_remove(watched, $1) WHERE email = $2 " , id , user . email )
2020-04-09 10:18:09 -07:00
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Unsupported action #{ action } " )
2018-11-19 20:06:59 -08:00
end
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
end
2018-08-04 21:07:38 -07:00
# /modify_notifications
# will "ding" all subscriptions.
2018-08-04 13:30:44 -07:00
# /modify_notifications?receive_all_updates=false&receive_no_updates=false
# will "unding" all subscriptions.
get " /modify_notifications " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
2019-04-07 10:59:12 -07:00
sid = env . get? " sid "
referer = get_referer ( env , " / " )
2018-07-14 06:36:31 -07:00
2019-04-07 10:59:12 -07:00
redirect = env . params . query [ " redirect " ]?
redirect || = " false "
redirect = redirect == " true "
2018-07-29 19:05:40 -07:00
2019-04-18 14:23:50 -07:00
if ! user
if redirect
next env . redirect referer
else
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " No such user " )
2019-04-18 14:23:50 -07:00
end
2019-04-07 10:59:12 -07:00
end
user = user . as ( User )
if ! user . password
2018-08-04 13:30:44 -07:00
channel_req = { } of String = > String
2018-02-26 16:59:02 -08:00
2018-08-04 13:30:44 -07:00
channel_req [ " receive_all_updates " ] = env . params . query [ " receive_all_updates " ]? || " true "
channel_req [ " receive_no_updates " ] = env . params . query [ " receive_no_updates " ]? || " "
channel_req [ " receive_post_updates " ] = env . params . query [ " receive_post_updates " ]? || " true "
2018-01-07 09:42:24 -08:00
2018-08-04 13:30:44 -07:00
channel_req . reject! { | k , v | v != " true " && v != " false " }
2018-01-06 18:39:24 -08:00
2018-08-04 13:30:44 -07:00
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2017-12-30 13:21:43 -08:00
2019-10-25 09:58:16 -07:00
html = YT_POOL . client & . get ( " /subscription_manager?disable_polymer=1 " , headers )
2019-04-07 10:59:12 -07:00
cookies = HTTP :: Cookies . from_headers ( headers )
html . cookies . each do | cookie |
if { " VISITOR_INFO1_LIVE " , " YSC " , " SIDCC " } . includes? cookie . name
if cookies [ cookie . name ]?
cookies [ cookie . name ] = cookie
else
cookies << cookie
end
end
end
headers = cookies . add_request_headers ( headers )
2020-06-15 15:33:23 -07:00
if match = html . body . match ( / 'XSRF_TOKEN': "(?<session_token>[^"]+)" / )
2018-08-04 13:30:44 -07:00
session_token = match [ " session_token " ]
else
next env . redirect referer
end
2018-07-18 12:26:02 -07:00
2019-04-07 10:59:12 -07:00
headers [ " content-type " ] = " application/x-www-form-urlencoded "
2018-08-04 13:30:44 -07:00
channel_req [ " session_token " ] = session_token
2018-04-07 19:36:09 -07:00
2019-04-07 10:59:12 -07:00
subs = XML . parse_html ( html . body )
2018-08-04 13:30:44 -07:00
subs . xpath_nodes ( % q ( / / a [ @class = " subscription-title yt-uix-sessionlink " ] / @href ) ) . each do | channel |
channel_id = channel . content . lstrip ( " /channel/ " ) . not_nil!
channel_req [ " channel_id " ] = channel_id
2019-10-25 09:58:16 -07:00
YT_POOL . client & . post ( " /subscription_ajax?action_update_subscription_preferences=1 " , headers , form : channel_req )
2018-08-04 13:30:44 -07:00
end
2018-07-18 12:26:02 -07:00
end
2019-04-07 10:59:12 -07:00
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
end
2019-04-15 21:23:40 -07:00
post " /subscription_ajax " do | env |
2019-04-07 10:59:12 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env , " / " )
redirect = env . params . query [ " redirect " ]?
2019-04-15 21:23:40 -07:00
redirect || = " true "
2019-04-07 10:59:12 -07:00
redirect = redirect == " true "
2019-04-15 21:23:40 -07:00
if ! user
2019-04-18 14:23:50 -07:00
if redirect
next env . redirect referer
else
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " No such user " )
2019-04-18 14:23:50 -07:00
end
2019-04-07 10:59:12 -07:00
end
user = user . as ( User )
2019-04-15 21:23:40 -07:00
sid = sid . as ( String )
2019-04-18 14:23:50 -07:00
token = env . params . body [ " csrf_token " ]?
2019-04-15 21:23:40 -07:00
begin
2019-04-18 14:23:50 -07:00
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
2019-04-15 21:23:40 -07:00
rescue ex
if redirect
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2019-04-15 21:23:40 -07:00
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , ex )
2019-04-15 21:23:40 -07:00
end
end
2019-04-07 10:59:12 -07:00
2019-06-07 17:56:41 -07:00
if env . params . query [ " action_create_subscription_to_channel " ]? . try & . to_i? . try & . == 1
2019-04-07 10:59:12 -07:00
action = " action_create_subscription_to_channel "
2019-06-07 17:56:41 -07:00
elsif env . params . query [ " action_remove_subscriptions " ]? . try & . to_i? . try & . == 1
2019-04-07 10:59:12 -07:00
action = " action_remove_subscriptions "
else
next env . redirect referer
end
channel_id = env . params . query [ " c " ]?
channel_id || = " "
if ! user . password
2019-04-14 16:08:00 -07:00
# Sync subscriptions with YouTube
2019-05-15 10:26:29 -07:00
subscribe_ajax ( channel_id , action , env . request . headers )
2019-04-07 10:59:12 -07:00
end
2019-05-15 10:26:29 -07:00
email = user . email
2019-04-07 10:59:12 -07:00
case action
2019-06-07 17:56:41 -07:00
when " action_create_subscription_to_channel "
2019-04-07 10:59:12 -07:00
if ! user . subscriptions . includes? channel_id
2021-01-04 07:51:06 -08:00
get_channel ( channel_id , PG_DB , false , false )
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2 " , channel_id , email )
2019-04-07 10:59:12 -07:00
end
2019-06-07 17:56:41 -07:00
when " action_remove_subscriptions "
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2 " , channel_id , email )
2020-04-09 10:18:09 -07:00
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Unsupported action #{ action } " )
2019-04-07 10:59:12 -07:00
end
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
2018-08-04 13:30:44 -07:00
end
2018-04-29 07:40:33 -07:00
2018-08-04 13:30:44 -07:00
get " /subscription_manager " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
2019-02-10 10:33:29 -08:00
sid = env . get? " sid "
2019-04-18 14:23:50 -07:00
referer = get_referer ( env )
2018-08-08 18:26:02 -07:00
2019-04-18 14:23:50 -07:00
if ! user
2018-08-08 18:26:02 -07:00
next env . redirect referer
2018-04-28 07:27:05 -07:00
end
2018-08-04 13:30:44 -07:00
user = user . as ( User )
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if ! user . password
# Refresh account
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2018-04-07 19:36:09 -07:00
2021-01-04 07:51:06 -08:00
user , sid = get_user ( sid , headers , PG_DB )
2018-08-04 13:30:44 -07:00
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
action_takeout = env . params . query [ " action_takeout " ]? . try & . to_i?
action_takeout || = 0
action_takeout = action_takeout == 1
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
format = env . params . query [ " format " ]?
format || = " rss "
2018-07-18 12:26:02 -07:00
2019-04-22 08:40:29 -07:00
if user . subscriptions . empty?
values = " '{}' "
else
values = " VALUES #{ user . subscriptions . map { | id | %( ( ' #{ id } ' ) ) } . join ( " , " ) } "
end
subscriptions = PG_DB . query_all ( " SELECT * FROM channels WHERE id = ANY( #{ values } ) " , as : InvidiousChannel )
2018-08-04 13:30:44 -07:00
subscriptions . sort_by! { | channel | channel . author . downcase }
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if action_takeout
if format == " json "
env . response . content_type = " application/json "
env . response . headers [ " content-disposition " ] = " attachment "
2020-07-25 10:30:28 -07:00
playlists = PG_DB . query_all ( " SELECT * FROM playlists WHERE author = $1 AND id LIKE 'IV%' ORDER BY created " , user . email , as : InvidiousPlaylist )
next JSON . build do | json |
json . object do
json . field " subscriptions " , user . subscriptions
json . field " watch_history " , user . watched
json . field " preferences " , user . preferences
json . field " playlists " do
json . array do
playlists . each do | playlist |
json . object do
json . field " title " , playlist . title
json . field " description " , html_to_content ( playlist . description_html )
json . field " privacy " , playlist . privacy . to_s
json . field " videos " do
json . array do
2020-07-28 14:21:39 -07:00
PG_DB . query_all ( " SELECT id FROM playlist_videos WHERE plid = $1 ORDER BY array_position($2, index) LIMIT 500 " , playlist . id , playlist . index , as : String ) . each do | video_id |
json . string video_id
2020-07-25 10:30:28 -07:00
end
end
end
end
end
end
end
end
end
2018-08-04 13:30:44 -07:00
else
env . response . content_type = " application/xml "
env . response . headers [ " content-disposition " ] = " attachment "
export = XML . build do | xml |
xml . element ( " opml " , version : " 1.1 " ) do
xml . element ( " body " ) do
if format == " newpipe "
title = " YouTube Subscriptions "
else
title = " Invidious Subscriptions "
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
xml . element ( " outline " , text : title , title : title ) do
subscriptions . each do | channel |
if format == " newpipe "
xmlUrl = " https://www.youtube.com/feeds/videos.xml?channel_id= #{ channel . id } "
else
2020-06-15 15:10:30 -07:00
xmlUrl = " #{ HOST_URL } /feed/channel/ #{ channel . id } "
2018-08-04 13:30:44 -07:00
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
xml . element ( " outline " , text : channel . author , title : channel . author ,
" type " : " rss " , xmlUrl : xmlUrl )
end
end
end
2018-07-18 12:26:02 -07:00
end
2018-03-16 09:40:29 -07:00
end
2018-08-04 13:30:44 -07:00
next export . gsub ( %( <?xml version="1.0"?> \n ) , " " )
end
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
templated " subscription_manager "
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
get " /data_control " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
2018-08-08 18:26:02 -07:00
referer = get_referer ( env )
2018-03-16 09:40:29 -07:00
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
2018-08-04 13:30:44 -07:00
end
2019-07-12 19:00:50 -07:00
user = user . as ( User )
templated " data_control "
2018-08-04 13:30:44 -07:00
end
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
post " /data_control " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
2018-08-08 18:26:02 -07:00
referer = get_referer ( env )
2018-03-16 09:40:29 -07:00
2018-08-04 13:30:44 -07:00
if user
user = user . as ( User )
2018-04-29 07:40:33 -07:00
2020-07-26 07:58:50 -07:00
# TODO: Find a way to prevent browser timeout
2019-04-24 18:18:35 -07:00
2018-08-04 13:30:44 -07:00
HTTP :: FormData . parse ( env . request ) do | part |
body = part . body . gets_to_end
2020-07-25 10:30:28 -07:00
next if body . empty?
2018-04-17 15:54:33 -07:00
2020-04-09 10:18:09 -07:00
# TODO: Unify into single import based on content-type
2018-08-04 13:30:44 -07:00
case part . name
when " import_invidious "
body = JSON . parse ( body )
2018-07-26 08:20:15 -07:00
2018-11-09 15:25:24 -08:00
if body [ " subscriptions " ]?
user . subscriptions += body [ " subscriptions " ] . as_a . map { | a | a . as_s }
user . subscriptions . uniq!
2021-01-04 07:51:06 -08:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-09 15:25:24 -08:00
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-08-04 13:30:44 -07:00
end
2018-07-26 08:20:15 -07:00
2018-11-08 14:43:28 -08:00
if body [ " watch_history " ]?
2018-11-09 15:25:24 -08:00
user . watched += body [ " watch_history " ] . as_a . map { | a | a . as_s }
user . watched . uniq!
PG_DB . exec ( " UPDATE users SET watched = $1 WHERE email = $2 " , user . watched , user . email )
2018-07-26 08:20:15 -07:00
end
2018-04-29 07:40:33 -07:00
2018-11-08 14:35:26 -08:00
if body [ " preferences " ]?
2020-07-26 07:58:50 -07:00
user . preferences = Preferences . from_json ( body [ " preferences " ] . to_json )
2018-11-09 15:25:24 -08:00
PG_DB . exec ( " UPDATE users SET preferences = $1 WHERE email = $2 " , user . preferences . to_json , user . email )
2018-11-08 14:35:26 -08:00
end
2020-07-25 10:30:28 -07:00
if playlists = body [ " playlists " ]? . try & . as_a?
playlists . each do | item |
title = item [ " title " ]? . try & . as_s? . try & . delete ( " <> " )
description = item [ " description " ]? . try & . as_s? . try & . delete ( " \ r " )
privacy = item [ " privacy " ]? . try & . as_s? . try { | privacy | PlaylistPrivacy . parse? privacy }
next if ! title
next if ! description
next if ! privacy
playlist = create_playlist ( PG_DB , title , privacy , user )
PG_DB . exec ( " UPDATE playlists SET description = $1 WHERE id = $2 " , description , playlist . id )
2020-07-28 14:21:39 -07:00
videos = item [ " videos " ]? . try & . as_a? . try & . each_with_index do | video_id , idx |
2020-11-30 01:59:21 -08:00
raise InfoException . new ( " Playlist cannot have more than 500 videos " ) if idx > 500
2020-07-28 14:21:39 -07:00
2020-07-25 10:30:28 -07:00
video_id = video_id . try & . as_s?
next if ! video_id
begin
video = get_video ( video_id , PG_DB )
rescue ex
next
end
2020-07-26 07:58:50 -07:00
playlist_video = PlaylistVideo . new ( {
title : video . title ,
id : video . id ,
author : video . author ,
ucid : video . ucid ,
2020-07-25 10:30:28 -07:00
length_seconds : video . length_seconds ,
2020-07-26 07:58:50 -07:00
published : video . published ,
plid : playlist . id ,
live_now : video . live_now ,
index : Random :: Secure . rand ( 0 _i64 .. Int64 :: MAX ) ,
} )
2020-07-25 10:30:28 -07:00
video_array = playlist_video . to_a
args = arg_array ( video_array )
PG_DB . exec ( " INSERT INTO playlist_videos VALUES ( #{ args } ) " , args : video_array )
2020-10-31 21:14:46 -07:00
PG_DB . exec ( " UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3 " , playlist_video . index , Time . utc , playlist . id )
2020-07-25 10:30:28 -07:00
end
end
end
2018-08-04 13:30:44 -07:00
when " import_youtube "
2020-12-06 12:47:50 -08:00
subscriptions = JSON . parse ( body )
user . subscriptions += subscriptions . as_a . compact_map do | entry |
entry [ " snippet " ] [ " resourceId " ] [ " channelId " ] . as_s
2018-11-09 15:25:24 -08:00
end
user . subscriptions . uniq!
2021-01-04 07:51:06 -08:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-10-06 16:19:47 -07:00
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-11-09 15:25:24 -08:00
when " import_freetube "
user . subscriptions += body . scan ( / "channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})" / ) . map do | md |
md [ " channel_id " ]
end
user . subscriptions . uniq!
2021-01-04 07:51:06 -08:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-09 15:25:24 -08:00
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-08-04 13:30:44 -07:00
when " import_newpipe_subscriptions "
body = JSON . parse ( body )
2019-04-22 13:39:57 -07:00
user . subscriptions += body [ " subscriptions " ] . as_a . compact_map do | channel |
if match = channel [ " url " ] . as_s . match ( / \/ channel \/ (?<channel>UC[a-zA-Z0-9_-]{22}) / )
next match [ " channel " ]
elsif match = channel [ " url " ] . as_s . match ( / \/ user \/ (?<user>.+) / )
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( " /user/ #{ match [ " user " ] } ?disable_polymer=1&hl=en&gl=US " )
2020-01-14 05:21:17 -08:00
html = XML . parse_html ( response . body )
ucid = html . xpath_node ( % q ( / / link [ @rel = " canonical " ] ) ) . try & . [ " href " ] . split ( " / " ) [ - 1 ]
next ucid if ucid
2019-04-22 13:39:57 -07:00
end
nil
2018-11-09 15:25:24 -08:00
end
user . subscriptions . uniq!
2021-01-04 07:51:06 -08:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-09 15:25:24 -08:00
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-08-04 13:30:44 -07:00
when " import_newpipe "
2020-06-15 15:57:20 -07:00
Compress :: Zip :: Reader . open ( IO :: Memory . new ( body ) ) do | file |
2018-08-04 13:30:44 -07:00
file . each_entry do | entry |
if entry . filename == " newpipe.db "
2018-11-21 15:12:13 -08:00
tempfile = File . tempfile ( " .db " )
File . write ( tempfile . path , entry . io . gets_to_end )
db = DB . open ( " sqlite3:// " + tempfile . path )
2018-04-29 07:40:33 -07:00
2018-11-21 15:12:13 -08:00
user . watched += db . query_all ( " SELECT url FROM streams " , as : String ) . map { | url | url . lchop ( " https://www.youtube.com/watch?v= " ) }
2018-11-09 15:25:24 -08:00
user . watched . uniq!
2018-07-18 12:26:02 -07:00
2018-11-09 15:25:24 -08:00
PG_DB . exec ( " UPDATE users SET watched = $1 WHERE email = $2 " , user . watched , user . email )
2018-10-06 16:19:47 -07:00
2018-11-21 15:12:13 -08:00
user . subscriptions += db . query_all ( " SELECT url FROM subscriptions " , as : String ) . map { | url | url . lchop ( " https://www.youtube.com/channel/ " ) }
2018-11-09 15:25:24 -08:00
user . subscriptions . uniq!
2021-01-04 07:51:06 -08:00
user . subscriptions = get_batch_channels ( user . subscriptions , PG_DB , false , false )
2018-11-09 15:25:24 -08:00
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2 " , user . subscriptions , user . email )
2018-11-21 15:12:13 -08:00
db . close
tempfile . delete
2018-08-04 13:30:44 -07:00
end
2018-07-18 12:26:02 -07:00
end
2018-07-08 06:57:06 -07:00
end
2020-04-09 10:18:09 -07:00
else nil # Ignore
2018-07-18 12:26:02 -07:00
end
2018-08-04 13:30:44 -07:00
end
end
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
env . redirect referer
end
2018-07-18 12:26:02 -07:00
2019-04-22 08:18:17 -07:00
get " /change_password " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
2019-04-22 08:18:17 -07:00
end
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :change_password " } , HMAC_KEY , PG_DB )
templated " change_password "
2019-04-22 08:18:17 -07:00
end
post " /change_password " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
# We don't store passwords for Google accounts
if ! user . password
2020-11-30 01:59:21 -08:00
next error_template ( 400 , " Cannot change password for Google accounts " )
2019-07-12 19:00:50 -07:00
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2019-07-12 19:00:50 -07:00
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
password = env . params . body [ " password " ]?
if ! password
2020-11-30 01:59:21 -08:00
next error_template ( 401 , " Password is a required field " )
2019-07-12 19:00:50 -07:00
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
new_passwords = env . params . body . select { | k , v | k . match ( / ^new_password \ [ \ d+ \ ]$ / ) } . map { | k , v | v }
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
if new_passwords . size <= 1 || new_passwords . uniq . size != 1
2020-11-30 01:59:21 -08:00
next error_template ( 400 , " New passwords must match " )
2019-07-12 19:00:50 -07:00
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
new_password = new_passwords . uniq [ 0 ]
if new_password . empty?
2020-11-30 01:59:21 -08:00
next error_template ( 401 , " Password cannot be empty " )
2019-07-12 19:00:50 -07:00
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
if new_password . bytesize > 55
2020-11-30 01:59:21 -08:00
next error_template ( 400 , " Password cannot be longer than 55 characters " )
2019-07-12 19:00:50 -07:00
end
2019-04-22 08:18:17 -07:00
2019-07-12 19:00:50 -07:00
if ! Crypto :: Bcrypt :: Password . new ( user . password . not_nil! ) . verify ( password . byte_slice ( 0 , 55 ) )
2020-11-30 01:59:21 -08:00
next error_template ( 401 , " Incorrect password " )
2019-04-22 08:18:17 -07:00
end
2019-07-12 19:00:50 -07:00
new_password = Crypto :: Bcrypt :: Password . create ( new_password , cost : 10 )
PG_DB . exec ( " UPDATE users SET password = $1 WHERE email = $2 " , new_password . to_s , user . email )
2019-04-22 08:18:17 -07:00
env . redirect referer
end
2018-11-07 22:12:14 -08:00
get " /delete_account " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-11-07 22:12:14 -08:00
user = env . get? " user "
2019-04-15 21:23:40 -07:00
sid = env . get? " sid "
2018-11-07 22:12:14 -08:00
referer = get_referer ( env )
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
2018-11-07 22:12:14 -08:00
end
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :delete_account " } , HMAC_KEY , PG_DB )
templated " delete_account "
2018-11-07 22:12:14 -08:00
end
post " /delete_account " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-11-07 22:12:14 -08:00
user = env . get? " user "
2019-04-15 21:23:40 -07:00
sid = env . get? " sid "
2018-11-07 22:12:14 -08:00
referer = get_referer ( env )
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
end
2018-11-07 22:12:14 -08:00
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2018-11-07 22:12:14 -08:00
2019-07-12 19:00:50 -07:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2019-07-12 19:00:50 -07:00
end
2018-11-07 22:12:14 -08:00
2019-07-12 19:00:50 -07:00
view_name = " subscriptions_ #{ sha256 ( user . email ) } "
PG_DB . exec ( " DELETE FROM users * WHERE email = $1 " , user . email )
PG_DB . exec ( " DELETE FROM session_ids * WHERE email = $1 " , user . email )
PG_DB . exec ( " DROP MATERIALIZED VIEW #{ view_name } " )
env . request . cookies . each do | cookie |
cookie . expires = Time . utc ( 1990 , 1 , 1 )
env . response . cookies << cookie
2018-11-07 22:12:14 -08:00
end
env . redirect referer
end
2018-08-04 13:30:44 -07:00
get " /clear_watch_history " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
2019-04-15 21:23:40 -07:00
sid = env . get? " sid "
2018-11-07 22:12:14 -08:00
referer = get_referer ( env )
2018-08-08 18:26:02 -07:00
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
2018-11-07 22:12:14 -08:00
end
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :clear_watch_history " } , HMAC_KEY , PG_DB )
templated " clear_watch_history "
2018-11-07 22:12:14 -08:00
end
post " /clear_watch_history " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-11-07 22:12:14 -08:00
user = env . get? " user "
2019-04-15 21:23:40 -07:00
sid = env . get? " sid "
2018-08-08 18:26:02 -07:00
referer = get_referer ( env )
2018-03-16 09:40:29 -07:00
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
end
2018-11-07 22:12:14 -08:00
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2018-11-07 22:12:14 -08:00
2019-07-12 19:00:50 -07:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2018-08-04 13:30:44 -07:00
end
2019-07-12 19:00:50 -07:00
PG_DB . exec ( " UPDATE users SET watched = '{}' WHERE email = $1 " , user . email )
2018-08-04 13:30:44 -07:00
env . redirect referer
end
2019-05-15 10:26:29 -07:00
get " /authorize_token " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
end
2019-05-15 10:26:29 -07:00
2019-07-12 19:00:50 -07:00
user = user . as ( User )
sid = sid . as ( String )
csrf_token = generate_response ( sid , { " :authorize_token " } , HMAC_KEY , PG_DB )
2019-05-15 10:26:29 -07:00
2019-07-12 19:00:50 -07:00
scopes = env . params . query [ " scopes " ]? . try & . split ( " , " )
scopes || = [ ] of String
2019-05-15 10:26:29 -07:00
2019-07-12 19:00:50 -07:00
callback_url = env . params . query [ " callback_url " ]?
if callback_url
callback_url = URI . parse ( callback_url )
2019-05-15 10:26:29 -07:00
end
2019-07-12 19:00:50 -07:00
expire = env . params . query [ " expire " ]? . try & . to_i?
templated " authorize_token "
2019-05-15 10:26:29 -07:00
end
2019-04-18 14:23:50 -07:00
post " /authorize_token " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
2019-07-12 19:00:50 -07:00
if ! user
next env . redirect referer
end
2019-04-18 14:23:50 -07:00
2019-07-12 19:00:50 -07:00
user = env . get ( " user " ) . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
2019-04-18 14:23:50 -07:00
2019-07-12 19:00:50 -07:00
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2019-07-12 19:00:50 -07:00
end
2019-04-18 14:23:50 -07:00
2019-07-12 19:00:50 -07:00
scopes = env . params . body . select { | k , v | k . match ( / ^scopes \ [ \ d+ \ ]$ / ) } . map { | k , v | v }
callback_url = env . params . body [ " callbackUrl " ]?
expire = env . params . body [ " expire " ]? . try & . to_i?
2019-04-18 14:23:50 -07:00
2019-07-12 19:00:50 -07:00
access_token = generate_token ( user . email , scopes , expire , HMAC_KEY , PG_DB )
2019-04-18 14:23:50 -07:00
2019-07-12 19:00:50 -07:00
if callback_url
2019-09-24 10:31:33 -07:00
access_token = URI . encode_www_form ( access_token )
2019-07-12 19:00:50 -07:00
url = URI . parse ( callback_url )
2019-04-18 14:23:50 -07:00
2019-07-12 19:00:50 -07:00
if url . query
query = HTTP :: Params . parse ( url . query . not_nil! )
2019-04-18 14:23:50 -07:00
else
2019-07-12 19:00:50 -07:00
query = HTTP :: Params . new
2019-04-18 14:23:50 -07:00
end
2019-07-12 19:00:50 -07:00
query [ " token " ] = access_token
url . query = query . to_s
env . redirect url . to_s
else
csrf_token = " "
env . set " access_token " , access_token
templated " authorize_token "
2019-04-18 14:23:50 -07:00
end
end
get " /token_manager " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env , " /subscription_manager " )
if ! user
next env . redirect referer
end
user = user . as ( User )
tokens = PG_DB . query_all ( " SELECT id, issued FROM session_ids WHERE email = $1 ORDER BY issued DESC " , user . email , as : { session : String , issued : Time } )
templated " token_manager "
end
post " /token_ajax " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
sid = env . get? " sid "
referer = get_referer ( env )
redirect = env . params . query [ " redirect " ]?
redirect || = " true "
redirect = redirect == " true "
if ! user
if redirect
next env . redirect referer
else
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " No such user " )
2019-04-18 14:23:50 -07:00
end
end
user = user . as ( User )
sid = sid . as ( String )
token = env . params . body [ " csrf_token " ]?
begin
validate_request ( token , sid , env . request , HMAC_KEY , PG_DB , locale )
rescue ex
if redirect
2020-11-30 01:59:21 -08:00
next error_template ( 400 , ex )
2019-04-18 14:23:50 -07:00
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , ex )
2019-04-18 14:23:50 -07:00
end
end
if env . params . query [ " action_revoke_token " ]?
action = " action_revoke_token "
else
next env . redirect referer
end
session = env . params . query [ " session " ]?
session || = " "
case action
when . starts_with? " action_revoke_token "
PG_DB . exec ( " DELETE FROM session_ids * WHERE id = $1 AND email = $2 " , session , user . email )
2020-04-09 10:18:09 -07:00
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Unsupported action #{ action } " )
2019-04-18 14:23:50 -07:00
end
if redirect
env . redirect referer
else
env . response . content_type = " application/json "
" {} "
end
end
2018-08-04 13:30:44 -07:00
# Feeds
2020-11-19 17:21:48 -08:00
get " /feed/playlists " do | env |
env . redirect " /view_all_playlists "
end
2018-11-26 08:50:34 -08:00
get " /feed/top " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2020-12-26 21:12:43 -08:00
message = translate ( locale , " The Top feed has been removed from Invidious. " )
templated " message "
2018-11-26 08:50:34 -08:00
end
get " /feed/popular " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2021-01-23 10:39:04 -08:00
if CONFIG . popular_enabled
2020-12-26 21:12:43 -08:00
templated " popular "
else
message = translate ( locale , " The Popular feed has been disabled by the administrator. " )
templated " message "
end
2018-11-26 08:50:34 -08:00
end
2018-11-20 09:18:12 -08:00
get " /feed/trending " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-11-20 09:18:12 -08:00
trending_type = env . params . query [ " type " ]?
2018-12-20 14:48:45 -08:00
trending_type || = " Default "
2018-11-20 09:18:12 -08:00
region = env . params . query [ " region " ]?
2018-12-20 14:48:45 -08:00
region || = " US "
2018-11-20 09:18:12 -08:00
begin
2019-06-28 19:17:56 -07:00
trending , plid = fetch_trending ( trending_type , region , locale )
2018-11-20 09:18:12 -08:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 500 , ex )
2018-11-20 09:18:12 -08:00
end
templated " trending "
end
2018-08-04 13:30:44 -07:00
get " /feed/subscriptions " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
2019-02-10 10:33:29 -08:00
sid = env . get? " sid "
2018-08-08 18:26:02 -07:00
referer = get_referer ( env )
2018-08-04 13:30:44 -07:00
2019-06-07 10:39:12 -07:00
if ! user
next env . redirect referer
end
2019-06-07 10:42:07 -07:00
user = user . as ( User )
sid = sid . as ( String )
token = user . token
2018-11-21 11:06:29 -08:00
2019-06-07 10:39:12 -07:00
if user . preferences . unseen_only
2019-06-07 10:42:07 -07:00
env . set " show_watched " , true
end
2018-08-04 13:30:44 -07:00
2019-06-07 10:42:07 -07:00
# Refresh account
headers = HTTP :: Headers . new
headers [ " Cookie " ] = env . request . headers [ " Cookie " ]
2018-08-04 13:30:44 -07:00
2019-06-07 10:42:07 -07:00
if ! user . password
2021-01-04 07:51:06 -08:00
user , sid = get_user ( sid , headers , PG_DB )
2019-06-07 10:42:07 -07:00
end
2018-03-25 20:21:24 -07:00
2019-06-08 14:04:55 -07:00
max_results = env . params . query [ " max_results " ]? . try & . to_i? . try & . clamp ( 0 , MAX_ITEMS_PER_PAGE )
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
2018-08-04 13:30:44 -07:00
2019-06-07 10:42:07 -07:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2018-08-04 13:30:44 -07:00
2019-06-07 10:39:12 -07:00
videos , notifications = get_subscription_feed ( PG_DB , user , max_results , page )
2019-02-18 09:29:57 -08:00
2019-06-07 10:42:07 -07:00
# "updated" here is used for delivering new notifications, so if
# we know a user has looked at their feed e.g. in the past 10 minutes,
# they've already seen a video posted 20 minutes ago, and don't need
# to be notified.
2019-06-07 17:56:41 -07:00
PG_DB . exec ( " UPDATE users SET notifications = $1, updated = $2 WHERE email = $3 " , [ ] of String , Time . utc ,
2019-06-07 10:42:07 -07:00
user . email )
user . notifications = [ ] of String
env . set " user " , user
2018-08-04 13:30:44 -07:00
2019-06-07 10:42:07 -07:00
templated " subscriptions "
end
2018-03-16 09:40:29 -07:00
2018-11-19 20:06:59 -08:00
get " /feed/history " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-11-19 20:06:59 -08:00
user = env . get? " user "
referer = get_referer ( env )
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2019-06-07 10:39:12 -07:00
if ! user
next env . redirect referer
end
2019-06-07 10:42:07 -07:00
user = user . as ( User )
2018-11-09 18:37:46 -08:00
2019-06-08 14:04:55 -07:00
max_results = env . params . query [ " max_results " ]? . try & . to_i? . try & . clamp ( 0 , MAX_ITEMS_PER_PAGE )
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
if user . watched [ ( page - 1 ) * max_results ]?
watched = user . watched . reverse [ ( page - 1 ) * max_results , max_results ]
2018-11-19 20:06:59 -08:00
end
2019-06-08 14:04:55 -07:00
watched || = [ ] of String
2018-11-09 15:25:24 -08:00
2019-06-07 10:42:07 -07:00
templated " history "
end
2018-08-04 13:30:44 -07:00
get " /feed/channel/:ucid " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2019-06-07 19:39:32 -07:00
env . response . content_type = " application/atom+xml "
2019-03-05 10:56:59 -08:00
2018-08-04 13:30:44 -07:00
ucid = env . params . url [ " ucid " ]
2018-04-07 19:36:09 -07:00
2019-09-07 08:45:37 -07:00
params = HTTP :: Params . parse ( env . params . query [ " params " ]? || " " )
2018-09-21 07:40:04 -07:00
begin
2019-06-28 18:48:24 -07:00
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2018-09-21 07:40:04 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_atom ( 500 , ex )
2018-09-04 19:04:40 -07:00
end
2020-04-10 09:49:51 -07:00
response = YT_POOL . client & . get ( " /feeds/videos.xml?channel_id= #{ channel . ucid } " )
rss = XML . parse_html ( response . body )
2019-02-18 14:06:00 -08:00
2020-04-07 11:34:40 -07:00
videos = rss . xpath_nodes ( " //feed/entry " ) . map do | entry |
2019-02-18 14:06:00 -08:00
video_id = entry . xpath_node ( " videoid " ) . not_nil! . content
title = entry . xpath_node ( " title " ) . not_nil! . content
2019-03-07 19:13:54 -08:00
published = Time . parse_rfc3339 ( entry . xpath_node ( " published " ) . not_nil! . content )
updated = Time . parse_rfc3339 ( entry . xpath_node ( " updated " ) . not_nil! . content )
2019-02-18 14:06:00 -08:00
author = entry . xpath_node ( " author/name " ) . not_nil! . content
ucid = entry . xpath_node ( " channelid " ) . not_nil! . content
2019-06-08 13:08:27 -07:00
description_html = entry . xpath_node ( " group/description " ) . not_nil! . to_s
2019-02-18 14:06:00 -08:00
views = entry . xpath_node ( " group/community/statistics " ) . not_nil! . [ " views " ] . to_i64
2020-07-26 07:58:50 -07:00
SearchVideo . new ( {
title : title ,
id : video_id ,
author : author ,
ucid : ucid ,
published : published ,
views : views ,
description_html : description_html ,
length_seconds : 0 ,
live_now : false ,
paid : false ,
premium : false ,
premiere_timestamp : nil ,
} )
2019-02-18 14:06:00 -08:00
end
2018-07-16 09:24:24 -07:00
2019-06-07 10:39:12 -07:00
XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
2018-08-04 13:30:44 -07:00
xml . element ( " feed " , " xmlns:yt " : " http://www.youtube.com/xml/schemas/2015 " ,
2018-12-23 10:07:04 -08:00
" xmlns:media " : " http://search.yahoo.com/mrss/ " , xmlns : " http://www.w3.org/2005/Atom " ,
" xml:lang " : " en-US " ) do
2020-06-15 15:10:30 -07:00
xml . element ( " link " , rel : " self " , href : " #{ HOST_URL } #{ env . request . resource } " )
2019-06-28 18:48:24 -07:00
xml . element ( " id " ) { xml . text " yt:channel: #{ channel . ucid } " }
xml . element ( " yt:channelId " ) { xml . text channel . ucid }
2020-12-14 10:25:39 -08:00
xml . element ( " icon " ) { xml . text channel . author_thumbnail }
2019-06-28 18:48:24 -07:00
xml . element ( " title " ) { xml . text channel . author }
2020-06-15 15:10:30 -07:00
xml . element ( " link " , rel : " alternate " , href : " #{ HOST_URL } /channel/ #{ channel . ucid } " )
2018-07-28 07:49:58 -07:00
2018-08-04 13:30:44 -07:00
xml . element ( " author " ) do
2019-06-28 18:48:24 -07:00
xml . element ( " name " ) { xml . text channel . author }
2020-06-15 15:10:30 -07:00
xml . element ( " uri " ) { xml . text " #{ HOST_URL } /channel/ #{ channel . ucid } " }
2018-08-04 13:30:44 -07:00
end
2018-07-28 20:31:02 -07:00
2018-09-04 19:04:40 -07:00
videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_xml ( channel . auto_generated , params , xml )
2019-06-07 10:42:07 -07:00
end
end
end
end
2018-07-16 09:24:24 -07:00
2018-08-04 13:30:44 -07:00
get " /feed/private " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2019-06-07 19:39:32 -07:00
env . response . content_type = " application/atom+xml "
2019-03-05 10:56:59 -08:00
2018-08-04 13:30:44 -07:00
token = env . params . query [ " token " ]?
2018-07-16 09:24:24 -07:00
2018-08-04 13:30:44 -07:00
if ! token
2019-03-23 08:24:30 -07:00
env . response . status_code = 403
next
2018-08-04 13:30:44 -07:00
end
2018-03-24 20:38:35 -07:00
2018-08-04 13:30:44 -07:00
user = PG_DB . query_one? ( " SELECT * FROM users WHERE token = $1 " , token . strip , as : User )
if ! user
2019-03-23 08:24:30 -07:00
env . response . status_code = 403
next
2018-08-04 13:30:44 -07:00
end
2018-07-17 06:19:45 -07:00
2019-06-08 14:04:55 -07:00
max_results = env . params . query [ " max_results " ]? . try & . to_i? . try & . clamp ( 0 , MAX_ITEMS_PER_PAGE )
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
2018-07-17 06:19:45 -07:00
2018-08-04 13:30:44 -07:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2018-03-24 20:38:35 -07:00
2019-09-07 08:45:37 -07:00
params = HTTP :: Params . parse ( env . params . query [ " params " ]? || " " )
2019-06-07 10:39:12 -07:00
videos , notifications = get_subscription_feed ( PG_DB , user , max_results , page )
2018-07-31 08:44:07 -07:00
2019-06-07 10:39:12 -07:00
XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
2018-12-23 10:07:04 -08:00
xml . element ( " feed " , " xmlns:yt " : " http://www.youtube.com/xml/schemas/2015 " ,
" xmlns:media " : " http://search.yahoo.com/mrss/ " , xmlns : " http://www.w3.org/2005/Atom " ,
2018-08-04 13:30:44 -07:00
" xml:lang " : " en-US " ) do
2020-06-15 15:10:30 -07:00
xml . element ( " link " , " type " : " text/html " , rel : " alternate " , href : " #{ HOST_URL } /feed/subscriptions " )
2019-06-07 10:39:12 -07:00
xml . element ( " link " , " type " : " application/atom+xml " , rel : " self " ,
2020-06-15 15:10:30 -07:00
href : " #{ HOST_URL } #{ env . request . resource } " )
2018-12-20 13:32:09 -08:00
xml . element ( " title " ) { xml . text translate ( locale , " Invidious Private Feed for `x` " , user . email ) }
2018-07-18 07:15:58 -07:00
2019-06-07 19:27:37 -07:00
( notifications + videos ) . each do | video |
2020-06-15 15:10:30 -07:00
video . to_xml ( locale , params , xml )
2018-08-04 13:30:44 -07:00
end
2018-04-17 14:27:55 -07:00
end
2018-08-04 13:30:44 -07:00
end
end
2018-03-24 20:38:35 -07:00
2018-09-17 16:13:24 -07:00
get " /feed/playlist/:plid " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2019-06-07 19:39:32 -07:00
env . response . content_type = " application/atom+xml "
2019-03-05 10:56:59 -08:00
2018-09-17 16:13:24 -07:00
plid = env . params . url [ " plid " ]
2019-09-07 08:45:37 -07:00
params = HTTP :: Params . parse ( env . params . query [ " params " ]? || " " )
2018-09-17 16:13:24 -07:00
path = env . request . path
2019-08-05 16:49:13 -07:00
if plid . starts_with? " IV "
if playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
videos = get_playlist_videos ( PG_DB , playlist , offset : 0 , locale : locale )
next XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
xml . element ( " feed " , " xmlns:yt " : " http://www.youtube.com/xml/schemas/2015 " ,
" xmlns:media " : " http://search.yahoo.com/mrss/ " , xmlns : " http://www.w3.org/2005/Atom " ,
" xml:lang " : " en-US " ) do
2020-06-15 15:10:30 -07:00
xml . element ( " link " , rel : " self " , href : " #{ HOST_URL } #{ env . request . resource } " )
2019-08-05 16:49:13 -07:00
xml . element ( " id " ) { xml . text " iv:playlist: #{ plid } " }
xml . element ( " iv:playlistId " ) { xml . text plid }
xml . element ( " title " ) { xml . text playlist . title }
2020-06-15 15:10:30 -07:00
xml . element ( " link " , rel : " alternate " , href : " #{ HOST_URL } /playlist?list= #{ plid } " )
2019-08-05 16:49:13 -07:00
xml . element ( " author " ) do
xml . element ( " name " ) { xml . text playlist . author }
end
videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_xml ( false , xml )
2019-08-05 16:49:13 -07:00
end
end
end
else
env . response . status_code = 404
next
end
end
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( " /feeds/videos.xml?playlist_id= #{ plid } " )
2018-09-17 16:13:24 -07:00
document = XML . parse ( response . body )
document . xpath_nodes ( % q ( / / * [ @href ] | / / * [ @url ] ) ) . each do | node |
node . attributes . each do | attribute |
case attribute . name
2019-09-07 08:45:37 -07:00
when " url " , " href "
full_path = URI . parse ( node [ attribute . name ] ) . full_path
query_string_opt = full_path . starts_with? ( " /watch?v= " ) ? " & #{ params } " : " "
2020-06-15 15:10:30 -07:00
node [ attribute . name ] = " #{ HOST_URL } #{ full_path } #{ query_string_opt } "
2020-04-09 10:18:09 -07:00
else nil # Skip
2018-09-17 16:13:24 -07:00
end
end
end
document = document . to_xml ( options : XML :: SaveOptions :: NO_DECL )
document . scan ( / <uri>(?<url>[^<]+)< \/ uri> / ) . each do | match |
2020-06-15 15:10:30 -07:00
content = " #{ HOST_URL } #{ URI . parse ( match [ " url " ] ) . full_path } "
2018-09-17 16:13:24 -07:00
document = document . gsub ( match [ 0 ] , " <uri> #{ content } </uri> " )
end
document
end
2019-03-29 13:50:18 -07:00
get " /feeds/videos.xml " do | env |
if ucid = env . params . query [ " channel_id " ]?
env . redirect " /feed/channel/ #{ ucid } "
elsif user = env . params . query [ " user " ]?
env . redirect " /feed/channel/ #{ user } "
elsif plid = env . params . query [ " playlist_id " ]?
env . redirect " /feed/playlist/ #{ plid } "
end
end
2019-03-04 08:46:58 -08:00
# Support push notifications via PubSubHubbub
2019-03-03 17:18:23 -08:00
2019-03-03 18:40:24 -08:00
get " /feed/webhook/:token " do | env |
verify_token = env . params . url [ " token " ]
2019-07-10 09:22:10 -07:00
mode = env . params . query [ " hub.mode " ]?
topic = env . params . query [ " hub.topic " ]?
challenge = env . params . query [ " hub.challenge " ]?
if ! mode || ! topic || ! challenge
env . response . status_code = 400
next
else
mode = mode . not_nil!
topic = topic . not_nil!
challenge = challenge . not_nil!
end
2019-03-03 17:18:23 -08:00
2019-06-07 17:56:41 -07:00
case verify_token
when . starts_with? " v1 "
2019-03-04 05:53:31 -08:00
_ , time , nonce , signature = verify_token . split ( " : " )
data = " #{ time } : #{ nonce } "
2019-06-07 17:56:41 -07:00
when . starts_with? " v2 "
2019-03-04 05:53:31 -08:00
time , signature = verify_token . split ( " : " )
data = " #{ time } "
2019-06-07 17:56:41 -07:00
else
env . response . status_code = 400
next
2019-03-04 05:53:31 -08:00
end
2019-03-03 17:18:23 -08:00
2019-04-04 05:49:53 -07:00
# The hub will sometimes check if we're still subscribed after delivery errors,
# so we reply with a 200 as long as the request hasn't expired
2019-06-07 17:56:41 -07:00
if Time . utc . to_unix - time . to_i > 432000
2019-03-23 08:24:30 -07:00
env . response . status_code = 400
next
2019-03-03 17:18:23 -08:00
end
2019-03-04 05:53:31 -08:00
if OpenSSL :: HMAC . hexdigest ( :sha1 , HMAC_KEY , data ) != signature
2019-03-23 08:24:30 -07:00
env . response . status_code = 400
next
2019-03-03 17:18:23 -08:00
end
2019-06-07 17:56:41 -07:00
if ucid = HTTP :: Params . parse ( URI . parse ( topic ) . query . not_nil! ) [ " channel_id " ]?
PG_DB . exec ( " UPDATE channels SET subscribed = $1 WHERE id = $2 " , Time . utc , ucid )
elsif plid = HTTP :: Params . parse ( URI . parse ( topic ) . query . not_nil! ) [ " playlist_id " ]?
PG_DB . exec ( " UPDATE playlists SET subscribed = $1 WHERE id = $2 " , Time . utc , ucid )
else
env . response . status_code = 400
next
end
2019-03-03 17:18:23 -08:00
2019-03-23 08:24:30 -07:00
env . response . status_code = 200
2019-06-07 17:56:41 -07:00
challenge
2019-03-03 17:18:23 -08:00
end
2019-03-03 18:40:24 -08:00
post " /feed/webhook/:token " do | env |
2019-04-10 15:58:42 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-03-04 09:07:27 -08:00
token = env . params . url [ " token " ]
2019-03-03 17:18:23 -08:00
body = env . request . body . not_nil! . gets_to_end
signature = env . request . headers [ " X-Hub-Signature " ] . lchop ( " sha1= " )
if signature != OpenSSL :: HMAC . hexdigest ( :sha1 , HMAC_KEY , body )
2021-01-04 07:51:06 -08:00
LOGGER . error ( " /feed/webhook/ #{ token } : Invalid signature " )
2019-03-23 08:24:30 -07:00
env . response . status_code = 200
next
2019-03-03 17:18:23 -08:00
end
2019-03-04 09:07:27 -08:00
spawn do
rss = XML . parse_html ( body )
rss . xpath_nodes ( " //feed/entry " ) . each do | entry |
id = entry . xpath_node ( " videoid " ) . not_nil! . content
2019-04-04 05:49:53 -07:00
author = entry . xpath_node ( " author/name " ) . not_nil! . content
2019-03-07 19:49:52 -08:00
published = Time . parse_rfc3339 ( entry . xpath_node ( " published " ) . not_nil! . content )
2019-03-07 19:13:54 -08:00
updated = Time . parse_rfc3339 ( entry . xpath_node ( " updated " ) . not_nil! . content )
2019-03-03 17:18:23 -08:00
2019-06-28 19:17:56 -07:00
video = get_video ( id , PG_DB , force_refresh : true )
2019-04-10 15:58:42 -07:00
# Deliver notifications to `/api/v1/auth/notifications`
payload = {
2019-04-20 10:41:51 -07:00
" topic " = > video . ucid ,
" videoId " = > video . id ,
" published " = > published . to_unix ,
2019-04-10 15:58:42 -07:00
} . to_json
PG_DB . exec ( " NOTIFY notifications, E' #{ payload } ' " )
2020-07-26 07:58:50 -07:00
video = ChannelVideo . new ( {
id : id ,
title : video . title ,
published : published ,
updated : updated ,
ucid : video . ucid ,
author : author ,
length_seconds : video . length_seconds ,
live_now : video . live_now ,
2019-04-10 15:58:42 -07:00
premiere_timestamp : video . premiere_timestamp ,
2020-07-26 07:58:50 -07:00
views : video . views ,
} )
2019-03-03 17:18:23 -08:00
2020-09-09 16:03:27 -07:00
was_insert = PG_DB . query_one ( " INSERT INTO channel_videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \
2019-05-26 09:28:54 -07:00
ON CONFLICT ( id ) DO UPDATE SET title = $2 , published = $3 , \
updated = $4 , ucid = $5 , author = $6 , length_seconds = $7 , \
2020-09-09 16:03:27 -07:00
live_now = $8 , premiere_timestamp = $9 , views = $10 returning ( xmax = 0 ) as was_insert " , *video.to_tuple, as: Bool)
PG_DB . exec ( " UPDATE users SET notifications = array_append(notifications, $1), \
feed_needs_update = true WHERE $2 = ANY ( subscriptions ) " , video.id, video.ucid) if was_insert
2019-03-04 09:07:27 -08:00
end
2019-03-03 17:18:23 -08:00
end
2019-03-03 17:50:23 -08:00
2019-03-23 08:24:30 -07:00
env . response . status_code = 200
next
2019-03-03 17:18:23 -08:00
end
2018-08-04 13:30:44 -07:00
# Channels
2019-04-28 09:47:16 -07:00
{ " /channel/:ucid/live " , " /user/:user/live " , " /c/:user/live " } . each do | route |
get route do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
# Appears to be a bug in routing, having several routes configured
# as `/a/:a`, `/b/:a`, `/c/:a` results in 404
value = env . request . resource . split ( " / " ) [ 2 ]
body = " "
{ " channel " , " user " , " c " } . each do | type |
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( " / #{ type } / #{ value } /live?disable_polymer=1 " )
2019-04-28 09:47:16 -07:00
if response . status_code == 200
body = response . body
end
end
video_id = body . match ( / 'VIDEO_ID': "(?<id>[a-zA-Z0-9_-]{11})" / ) . try & . [ " id " ]?
if video_id
params = [ ] of String
env . params . query . each do | k , v |
params << " #{ k } = #{ v } "
end
params = params . join ( " & " )
url = " /watch?v= #{ video_id } "
if ! params . empty?
url += " & #{ params } "
end
env . redirect url
else
env . redirect " /channel/ #{ value } "
end
end
end
2018-09-04 07:13:58 -07:00
# YouTube appears to let users set a "brand" URL that
# is different from their username, so we convert that here
get " /c/:user " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-04 07:13:58 -07:00
user = env . params . url [ " user " ]
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( " /c/ #{ user } " )
2020-04-04 13:31:24 -07:00
html = XML . parse_html ( response . body )
2018-09-04 07:13:58 -07:00
2020-04-04 13:31:24 -07:00
ucid = html . xpath_node ( % q ( / / link [ @rel = " canonical " ] ) ) . try & . [ " href " ] . split ( " / " ) [ - 1 ]
next env . redirect " / " if ! ucid
2018-09-04 07:13:58 -07:00
2020-04-04 13:31:24 -07:00
env . redirect " /channel/ #{ ucid } "
2018-09-04 07:13:58 -07:00
end
2019-01-23 21:12:48 -08:00
# Legacy endpoint for /user/:username
get " /profile " do | env |
user = env . params . query [ " user " ]?
if ! user
env . redirect " / "
else
env . redirect " /user/ #{ user } "
end
end
2019-06-08 09:13:00 -07:00
get " /attribution_link " do | env |
if query = env . params . query [ " u " ]?
url = URI . parse ( query ) . full_path
else
url = " / "
end
env . redirect url
end
2019-05-26 11:49:35 -07:00
# Page used by YouTube to provide captioning widget, since we
# don't support it we redirect to '/'
get " /timedtext_video " do | env |
env . redirect " / "
end
2018-08-04 13:30:44 -07:00
get " /user/:user " do | env |
user = env . params . url [ " user " ]
env . redirect " /channel/ #{ user } "
2018-03-24 20:38:35 -07:00
end
2018-09-05 21:12:11 -07:00
get " /user/:user/videos " do | env |
user = env . params . url [ " user " ]
env . redirect " /channel/ #{ user } /videos "
end
2019-07-09 07:31:04 -07:00
get " /user/:user/about " do | env |
user = env . params . url [ " user " ]
env . redirect " /channel/ #{ user } "
end
2019-08-16 18:06:21 -07:00
get " /channel/:ucid/about " do | env |
2019-07-09 07:31:04 -07:00
ucid = env . params . url [ " ucid " ]
env . redirect " /channel/ #{ ucid } "
end
2018-08-04 13:30:44 -07:00
get " /channel/:ucid " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
user = env . get? " user "
if user
user = user . as ( User )
subscriptions = user . subscriptions
end
subscriptions || = [ ] of String
2018-07-28 06:24:53 -07:00
ucid = env . params . url [ " ucid " ]
2018-08-04 13:30:44 -07:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2019-02-24 14:39:44 -08:00
continuation = env . params . query [ " continuation " ]?
2018-11-13 17:04:25 -08:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
2018-09-21 07:40:04 -07:00
begin
2019-06-28 18:48:24 -07:00
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2018-09-21 07:40:04 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 500 , ex )
2018-08-04 13:30:44 -07:00
end
2019-06-28 18:48:24 -07:00
if channel . auto_generated
2019-03-03 08:54:23 -08:00
sort_options = { " last " , " oldest " , " newest " }
sort_by || = " last "
2019-06-28 18:48:24 -07:00
items , continuation = fetch_channel_playlists ( channel . ucid , channel . author , channel . auto_generated , continuation , sort_by )
2019-03-17 16:31:11 -07:00
items . uniq! do | item |
if item . responds_to? ( :title )
item . title
elsif item . responds_to? ( :author )
item . author
end
end
2020-06-15 15:33:23 -07:00
items = items . select ( & . is_a? ( SearchPlaylist ) ) . map ( & . as ( SearchPlaylist ) )
2019-02-25 07:52:44 -08:00
items . each { | item | item . author = " " }
2019-02-24 14:39:44 -08:00
else
2019-03-03 08:54:23 -08:00
sort_options = { " newest " , " oldest " , " popular " }
sort_by || = " newest "
2020-06-15 15:33:23 -07:00
count , items = get_60_videos ( channel . ucid , channel . author , page , channel . auto_generated , sort_by )
items . reject! & . paid
2019-06-28 18:48:24 -07:00
env . set " search " , " channel: #{ channel . ucid } "
2019-02-24 14:39:44 -08:00
end
2018-08-04 13:30:44 -07:00
templated " channel "
end
get " /channel/:ucid/videos " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-08-04 13:30:44 -07:00
ucid = env . params . url [ " ucid " ]
params = env . request . query
if ! params || params . empty?
params = " "
else
params = " ? #{ params } "
2018-08-01 08:44:02 -07:00
end
2018-08-04 13:30:44 -07:00
env . redirect " /channel/ #{ ucid } #{ params } "
end
2018-07-28 06:24:53 -07:00
2019-03-03 08:54:23 -08:00
get " /channel/:ucid/playlists " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-03-03 08:54:23 -08:00
user = env . get? " user "
if user
user = user . as ( User )
subscriptions = user . subscriptions
end
subscriptions || = [ ] of String
ucid = env . params . url [ " ucid " ]
continuation = env . params . query [ " continuation " ]?
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
sort_by || = " last "
begin
2019-06-28 18:48:24 -07:00
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2019-03-03 08:54:23 -08:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 500 , ex )
2019-03-03 08:54:23 -08:00
end
2019-06-28 18:48:24 -07:00
if channel . auto_generated
next env . redirect " /channel/ #{ channel . ucid } "
2019-03-03 08:54:23 -08:00
end
2019-06-28 18:48:24 -07:00
items , continuation = fetch_channel_playlists ( channel . ucid , channel . author , channel . auto_generated , continuation , sort_by )
2019-08-16 18:06:21 -07:00
items = items . select { | item | item . is_a? ( SearchPlaylist ) } . map { | item | item . as ( SearchPlaylist ) }
2019-03-03 08:54:23 -08:00
items . each { | item | item . author = " " }
2019-07-27 06:51:10 -07:00
env . set " search " , " channel: #{ channel . ucid } "
2019-03-03 08:54:23 -08:00
templated " playlists "
end
2019-07-09 07:31:04 -07:00
get " /channel/:ucid/community " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
user = env . get? " user "
if user
user = user . as ( User )
subscriptions = user . subscriptions
end
subscriptions || = [ ] of String
ucid = env . params . url [ " ucid " ]
thin_mode = env . params . query [ " thin_mode " ]? || env . get ( " preferences " ) . as ( Preferences ) . thin_mode
thin_mode = thin_mode == " true "
continuation = env . params . query [ " continuation " ]?
# sort_by = env.params.query["sort_by"]?.try &.downcase
begin
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
next env . redirect env . request . resource . gsub ( ucid , ex . channel_id )
2019-07-09 07:31:04 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_template ( 500 , ex )
2019-07-09 07:31:04 -07:00
end
if ! channel . tabs . includes? " community "
next env . redirect " /channel/ #{ channel . ucid } "
end
begin
2020-06-15 15:10:30 -07:00
items = JSON . parse ( fetch_channel_community ( ucid , continuation , locale , " json " , thin_mode ) )
2020-11-30 01:59:21 -08:00
rescue ex : InfoException
2019-07-09 07:31:04 -07:00
env . response . status_code = 500
error_message = ex . message
2020-11-30 01:59:21 -08:00
rescue ex
next error_template ( 500 , ex )
2019-07-09 07:31:04 -07:00
end
2019-07-27 06:51:10 -07:00
env . set " search " , " channel: #{ channel . ucid } "
2019-07-09 07:31:04 -07:00
templated " community "
end
2018-08-04 13:30:44 -07:00
# API Endpoints
2018-07-28 06:24:53 -07:00
2019-03-01 17:25:16 -08:00
get " /api/v1/stats " do | env |
2020-11-30 01:59:21 -08:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-03-01 17:25:16 -08:00
env . response . content_type = " application/json "
2021-01-23 10:39:04 -08:00
if ! CONFIG . statistics_enabled
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Statistics are not enabled. " )
2019-03-01 17:25:16 -08:00
end
2020-10-17 05:25:57 -07:00
Invidious :: Jobs :: StatisticsRefreshJob :: STATISTICS . to_json
2019-03-23 12:05:13 -07:00
end
2019-03-01 17:25:16 -08:00
2019-05-20 18:22:01 -07:00
# YouTube provides "storyboards", which are sprites containing x * y
2019-05-02 12:20:19 -07:00
# preview thumbnails for individual scenes in a video.
# See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails
get " /api/v1/storyboards/:id " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
id = env . params . url [ " id " ]
region = env . params . query [ " region " ]?
begin
2019-06-28 19:17:56 -07:00
video = get_video ( id , PG_DB , region : region )
2019-05-02 12:20:19 -07:00
rescue ex : VideoRedirect
2019-09-08 09:08:59 -07:00
env . response . headers [ " Location " ] = env . request . resource . gsub ( id , ex . video_id )
2020-11-30 01:59:21 -08:00
next error_json ( 302 , " Video is unavailable " , { " videoId " = > ex . video_id } )
2019-05-02 12:20:19 -07:00
rescue ex
env . response . status_code = 500
next
end
storyboards = video . storyboards
width = env . params . query [ " width " ]?
height = env . params . query [ " height " ]?
if ! width && ! height
response = JSON . build do | json |
json . object do
json . field " storyboards " do
2020-06-15 15:10:30 -07:00
generate_storyboards ( json , id , storyboards )
2019-05-02 12:20:19 -07:00
end
end
end
next response
end
env . response . content_type = " text/vtt "
storyboard = storyboards . select { | storyboard | width == " #{ storyboard [ :width ] } " || height == " #{ storyboard [ :height ] } " }
if storyboard . empty?
env . response . status_code = 404
next
else
storyboard = storyboard [ 0 ]
end
2019-08-27 06:08:26 -07:00
String . build do | str |
str << <<-END_VTT
WEBVTT
2019-05-02 12:20:19 -07:00
2019-08-27 06:08:26 -07:00
END_VTT
2019-05-02 12:20:19 -07:00
2019-08-27 06:08:26 -07:00
start_time = 0 . milliseconds
end_time = storyboard [ :interval ] . milliseconds
2019-05-02 12:20:19 -07:00
2019-08-27 06:08:26 -07:00
storyboard [ :storyboard_count ] . times do | i |
2020-10-25 01:35:16 -07:00
url = storyboard [ :url ]
authority = / (i \ d?).ytimg.com / . match ( url ) . not_nil! [ 1 ]?
2020-12-21 08:02:36 -08:00
url = url . gsub ( " $M " , i ) . gsub ( %r( https://i \ d?.ytimg.com/sb/ ) , " " )
2020-10-25 01:35:16 -07:00
url = " #{ HOST_URL } /sb/ #{ authority } / #{ url } "
2019-05-02 12:20:19 -07:00
2019-08-27 06:08:26 -07:00
storyboard [ :storyboard_height ] . times do | j |
storyboard [ :storyboard_width ] . times do | k |
str << <<-END_CUE
#{start_time}.000 --> #{end_time}.000
2020-12-21 08:02:36 -08:00
#{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width] - 2},#{storyboard[:height]}
2019-05-02 12:20:19 -07:00
2019-08-27 06:08:26 -07:00
END_CUE
2019-05-02 12:20:19 -07:00
2019-08-27 06:08:26 -07:00
start_time += storyboard [ :interval ] . milliseconds
end_time += storyboard [ :interval ] . milliseconds
end
2019-05-02 12:20:19 -07:00
end
end
end
end
2018-08-04 13:30:44 -07:00
get " /api/v1/captions/:id " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-26 16:44:37 -07:00
env . response . content_type = " application/json "
2018-08-04 13:30:44 -07:00
id = env . params . url [ " id " ]
2018-11-17 15:37:57 -08:00
region = env . params . query [ " region " ]?
2018-07-29 19:01:28 -07:00
2019-05-20 18:22:01 -07:00
# See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354
# It is possible to use `/api/timedtext?type=list&v=#{id}` and
# `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly,
# but this does not provide links for auto-generated captions.
#
# In future this should be investigated as an alternative, since it does not require
# getting video info.
2018-08-04 13:30:44 -07:00
begin
2019-06-28 19:17:56 -07:00
video = get_video ( id , PG_DB , region : region )
2018-10-06 20:22:22 -07:00
rescue ex : VideoRedirect
2019-09-08 09:08:59 -07:00
env . response . headers [ " Location " ] = env . request . resource . gsub ( id , ex . video_id )
2020-11-30 01:59:21 -08:00
next error_json ( 302 , " Video is unavailable " , { " videoId " = > ex . video_id } )
2018-08-04 13:30:44 -07:00
rescue ex
2019-03-23 08:24:30 -07:00
env . response . status_code = 500
next
2018-07-28 06:24:53 -07:00
end
2018-08-04 21:07:38 -07:00
captions = video . captions
2018-07-28 06:24:53 -07:00
2018-08-04 13:30:44 -07:00
label = env . params . query [ " label " ]?
2018-09-30 08:13:07 -07:00
lang = env . params . query [ " lang " ]?
tlang = env . params . query [ " tlang " ]?
if ! label && ! lang
2018-08-04 13:30:44 -07:00
response = JSON . build do | json |
json . object do
json . field " captions " do
json . array do
2018-08-04 21:07:38 -07:00
captions . each do | caption |
2018-08-04 13:30:44 -07:00
json . object do
2018-08-06 16:25:25 -07:00
json . field " label " , caption . name . simpleText
json . field " languageCode " , caption . languageCode
2019-09-24 10:31:33 -07:00
json . field " url " , " /api/v1/captions/ #{ id } ?label= #{ URI . encode_www_form ( caption . name . simpleText ) } "
2018-08-04 13:30:44 -07:00
end
end
end
end
2018-07-28 06:24:53 -07:00
end
2018-08-04 13:30:44 -07:00
end
2018-07-28 06:24:53 -07:00
2019-03-23 12:05:13 -07:00
next response
end
2018-07-28 06:24:53 -07:00
2019-05-20 18:22:01 -07:00
env . response . content_type = " text/vtt; charset=UTF-8 "
2018-09-30 08:13:07 -07:00
if lang
caption = captions . select { | caption | caption . languageCode == lang }
2020-01-08 17:27:21 -08:00
else
caption = captions . select { | caption | caption . name . simpleText == label }
2018-09-30 08:13:07 -07:00
end
2018-08-04 21:07:38 -07:00
if caption . empty?
2019-03-23 08:24:30 -07:00
env . response . status_code = 404
next
2018-08-04 13:30:44 -07:00
else
2018-08-04 21:07:38 -07:00
caption = caption [ 0 ]
2018-08-04 13:30:44 -07:00
end
2018-07-28 06:24:53 -07:00
2019-12-01 14:52:39 -08:00
url = URI . parse ( " #{ caption . baseUrl } &tlang= #{ tlang } " ) . full_path
2018-07-28 06:24:53 -07:00
2019-05-18 18:27:19 -07:00
# Auto-generated captions often have cues that aren't aligned properly with the video,
# as well as some other markup that makes it cumbersome, so we try to fix that here
if caption . name . simpleText . includes? " auto-generated "
2019-10-25 09:58:16 -07:00
caption_xml = YT_POOL . client & . get ( url ) . body
2019-05-18 18:27:19 -07:00
caption_xml = XML . parse ( caption_xml )
2018-07-28 06:24:53 -07:00
2019-08-27 06:08:26 -07:00
webvtt = String . build do | str |
str << <<-END_VTT
WEBVTT
Kind : captions
Language : #{tlang || caption.languageCode}
2018-07-28 06:24:53 -07:00
2019-08-27 06:08:26 -07:00
END_VTT
2018-08-07 05:36:51 -07:00
2019-08-27 06:08:26 -07:00
caption_nodes = caption_xml . xpath_nodes ( " //transcript/text " )
caption_nodes . each_with_index do | node , i |
start_time = node [ " start " ] . to_f . seconds
duration = node [ " dur " ]? . try & . to_f . seconds
duration || = start_time
2019-05-18 18:27:19 -07:00
2019-08-27 06:08:26 -07:00
if caption_nodes . size > i + 1
end_time = caption_nodes [ i + 1 ] [ " start " ] . to_f . seconds
else
end_time = start_time + duration
end
2018-07-28 06:24:53 -07:00
2019-08-27 06:08:26 -07:00
start_time = " #{ start_time . hours . to_s . rjust ( 2 , '0' ) } : #{ start_time . minutes . to_s . rjust ( 2 , '0' ) } : #{ start_time . seconds . to_s . rjust ( 2 , '0' ) } . #{ start_time . milliseconds . to_s . rjust ( 3 , '0' ) } "
end_time = " #{ end_time . hours . to_s . rjust ( 2 , '0' ) } : #{ end_time . minutes . to_s . rjust ( 2 , '0' ) } : #{ end_time . seconds . to_s . rjust ( 2 , '0' ) } . #{ end_time . milliseconds . to_s . rjust ( 3 , '0' ) } "
2018-08-04 13:30:44 -07:00
2019-08-27 06:08:26 -07:00
text = HTML . unescape ( node . content )
text = text . gsub ( / <font color=" # [a-fA-F0-9]{6}"> / , " " )
text = text . gsub ( / < \/ font> / , " " )
if md = text . match ( / (?<name>.*) : (?<text>.*) / )
text = " <v #{ md [ " name " ] } > #{ md [ " text " ] } </v> "
end
2018-08-04 13:30:44 -07:00
2019-08-27 06:08:26 -07:00
str << <<-END_CUE
#{start_time} --> #{end_time}
#{text}
2018-08-04 13:30:44 -07:00
2019-08-27 06:08:26 -07:00
END_CUE
end
2019-05-18 18:27:19 -07:00
end
else
2019-10-25 09:58:16 -07:00
webvtt = YT_POOL . client & . get ( " #{ url } &format=vtt " ) . body
2018-07-28 06:24:53 -07:00
end
2019-04-11 10:08:43 -07:00
if title = env . params . query [ " title " ]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
2019-09-24 10:31:33 -07:00
env . response . headers [ " Content-Disposition " ] = " attachment; filename= \" #{ URI . encode_www_form ( title ) } \" ; filename*=UTF-8'' #{ URI . encode_www_form ( title ) } "
2019-04-11 10:08:43 -07:00
end
2018-08-04 13:30:44 -07:00
webvtt
2018-07-28 06:24:53 -07:00
end
2018-08-04 13:30:44 -07:00
get " /api/v1/comments/:id " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-06 15:55:22 -08:00
region = env . params . query [ " region " ]?
2018-12-20 13:32:09 -08:00
2018-09-26 16:44:37 -07:00
env . response . content_type = " application/json "
2018-08-04 13:30:44 -07:00
id = env . params . url [ " id " ]
2018-07-20 09:19:49 -07:00
2018-08-04 13:30:44 -07:00
source = env . params . query [ " source " ]?
source || = " youtube "
2018-07-20 09:19:49 -07:00
2019-03-27 09:31:05 -07:00
thin_mode = env . params . query [ " thin_mode " ]?
thin_mode = thin_mode == " true "
2018-08-04 13:30:44 -07:00
format = env . params . query [ " format " ]?
format || = " json "
2018-07-20 09:19:49 -07:00
2018-10-31 14:47:53 -07:00
continuation = env . params . query [ " continuation " ]?
2019-04-14 16:08:00 -07:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
2018-08-04 13:30:44 -07:00
2018-10-31 14:47:53 -07:00
if source == " youtube "
2019-04-14 16:08:00 -07:00
sort_by || = " top "
2018-10-31 14:47:53 -07:00
begin
2019-06-28 19:17:56 -07:00
comments = fetch_youtube_comments ( id , PG_DB , continuation , format , locale , thin_mode , region , sort_by : sort_by )
2018-10-31 14:47:53 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-07-20 09:19:49 -07:00
end
2018-11-10 07:05:26 -08:00
next comments
2018-08-04 13:30:44 -07:00
elsif source == " reddit "
2019-04-14 16:08:00 -07:00
sort_by || = " confidence "
2018-08-04 13:30:44 -07:00
begin
2019-04-14 16:08:00 -07:00
comments , reddit_thread = fetch_reddit_comments ( id , sort_by : sort_by )
2018-12-20 13:32:09 -08:00
content_html = template_reddit_comments ( comments , locale )
2018-03-30 18:52:10 -07:00
2018-08-04 13:30:44 -07:00
content_html = fill_links ( content_html , " https " , " www.reddit.com " )
2018-09-03 20:15:47 -07:00
content_html = replace_links ( content_html )
2018-08-04 13:30:44 -07:00
rescue ex
2018-09-06 08:19:28 -07:00
comments = nil
2018-08-04 13:30:44 -07:00
reddit_thread = nil
content_html = " "
end
2018-07-16 09:24:24 -07:00
2018-09-06 08:19:28 -07:00
if ! reddit_thread || ! comments
2019-03-23 08:24:30 -07:00
env . response . status_code = 404
next
2018-08-04 13:30:44 -07:00
end
2018-03-30 18:52:10 -07:00
2018-09-06 08:19:28 -07:00
if format == " json "
reddit_thread = JSON . parse ( reddit_thread . to_json ) . as_h
reddit_thread [ " comments " ] = JSON . parse ( comments . to_json )
2019-01-25 08:50:18 -08:00
2019-03-23 12:05:13 -07:00
next reddit_thread . to_json
2018-09-06 08:19:28 -07:00
else
2019-01-25 08:50:18 -08:00
response = {
2018-09-06 08:19:28 -07:00
" title " = > reddit_thread . title ,
2018-09-06 16:18:36 -07:00
" permalink " = > reddit_thread . permalink ,
" contentHtml " = > content_html ,
2019-01-25 08:50:18 -08:00
}
2019-03-23 12:05:13 -07:00
next response . to_json
2018-09-06 16:18:36 -07:00
end
2018-08-04 13:30:44 -07:00
end
2019-03-23 12:05:13 -07:00
end
2018-03-30 18:52:10 -07:00
2018-09-17 18:08:26 -07:00
get " /api/v1/insights/:id " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2020-11-30 01:59:21 -08:00
next error_json ( 410 , " YouTube has removed publicly available analytics. " )
2019-03-23 12:05:13 -07:00
end
2018-09-17 18:08:26 -07:00
2019-03-31 20:07:06 -07:00
get " /api/v1/annotations/:id " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " text/xml "
id = env . params . url [ " id " ]
source = env . params . query [ " source " ]?
source || = " archive "
if ! id . match ( / [a-zA-Z0-9_-]{11} / )
env . response . status_code = 400
next
end
annotations = " "
case source
when " archive "
2019-04-15 09:13:09 -07:00
if CONFIG . cache_annotations && ( cached_annotation = PG_DB . query_one? ( " SELECT * FROM annotations WHERE id = $1 " , id , as : Annotation ) )
annotations = cached_annotation . annotations
else
index = CHARS_SAFE . index ( id [ 0 ] ) . not_nil! . to_s . rjust ( 2 , '0' )
2019-03-31 20:07:06 -07:00
2019-04-15 09:13:09 -07:00
# IA doesn't handle leading hyphens,
# so we use https://archive.org/details/youtubeannotations_64
if index == " 62 "
index = " 64 "
id = id . sub ( / ^- / , 'A' )
end
2019-03-31 20:07:06 -07:00
2019-09-24 10:31:33 -07:00
file = URI . encode_www_form ( " #{ id [ 0 , 3 ] } / #{ id } .xml " )
2019-03-31 20:07:06 -07:00
2020-12-22 21:52:23 -08:00
location = make_client ( ARCHIVE_URL , & . get ( " /download/youtubeannotations_ #{ index } / #{ id [ 0 , 2 ] } .tar/ #{ file } " ) )
2019-03-31 20:07:06 -07:00
2019-04-15 09:13:09 -07:00
if ! location . headers [ " Location " ]?
env . response . status_code = location . status_code
end
2019-03-31 20:07:06 -07:00
2020-12-22 21:52:23 -08:00
response = make_client ( URI . parse ( location . headers [ " Location " ] ) , & . get ( location . headers [ " Location " ] ) )
2019-03-31 20:07:06 -07:00
2019-04-15 09:13:09 -07:00
if response . body . empty?
env . response . status_code = 404
next
end
2019-04-13 06:28:59 -07:00
2019-04-15 09:13:09 -07:00
if response . status_code != 200
env . response . status_code = response . status_code
next
end
2019-03-31 20:07:06 -07:00
2019-04-15 09:13:09 -07:00
annotations = response . body
cache_annotation ( PG_DB , id , annotations )
end
2020-04-09 10:18:09 -07:00
else # "youtube"
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( " /annotations_invideo?video_id= #{ id } " )
2019-03-31 20:07:06 -07:00
if response . status_code != 200
env . response . status_code = response . status_code
next
end
annotations = response . body
end
2019-11-09 19:05:17 -08:00
etag = sha256 ( annotations ) [ 0 , 16 ]
if env . request . headers [ " If-None-Match " ]? . try & . == etag
env . response . status_code = 304
else
env . response . headers [ " ETag " ] = etag
annotations
end
2019-03-31 20:07:06 -07:00
end
2018-08-04 13:30:44 -07:00
get " /api/v1/videos/:id " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-28 21:12:35 -07:00
env . response . content_type = " application/json "
2018-08-04 13:30:44 -07:00
id = env . params . url [ " id " ]
2018-11-17 15:33:30 -08:00
region = env . params . query [ " region " ]?
2018-03-30 18:52:10 -07:00
2018-08-04 13:30:44 -07:00
begin
2019-06-28 19:17:56 -07:00
video = get_video ( id , PG_DB , region : region )
2018-10-06 20:22:22 -07:00
rescue ex : VideoRedirect
2019-09-08 09:08:59 -07:00
env . response . headers [ " Location " ] = env . request . resource . gsub ( id , ex . video_id )
2020-11-30 01:59:21 -08:00
next error_json ( 302 , " Video is unavailable " , { " videoId " = > ex . video_id } )
2018-08-04 13:30:44 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-08-04 13:30:44 -07:00
end
2018-03-30 18:52:10 -07:00
2020-06-15 15:10:30 -07:00
video . to_json ( locale )
2019-03-23 12:05:13 -07:00
end
2018-07-30 10:34:57 -07:00
2018-08-04 13:30:44 -07:00
get " /api/v1/trending " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2019-01-25 08:50:18 -08:00
env . response . content_type = " application/json "
2018-11-20 09:18:12 -08:00
region = env . params . query [ " region " ]?
trending_type = env . params . query [ " type " ]?
begin
2019-06-28 19:17:56 -07:00
trending , plid = fetch_trending ( trending_type , region , locale )
2018-11-20 09:18:12 -08:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-11-20 09:18:12 -08:00
end
2018-08-04 13:30:44 -07:00
videos = JSON . build do | json |
json . array do
2018-11-20 09:18:12 -08:00
trending . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2018-08-04 13:30:44 -07:00
end
2019-06-23 10:54:46 -07:00
end
end
2018-07-30 10:34:57 -07:00
2019-03-23 12:05:13 -07:00
videos
end
2018-08-04 13:30:44 -07:00
2018-11-25 16:13:11 -08:00
get " /api/v1/popular " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2019-01-25 08:50:18 -08:00
env . response . content_type = " application/json "
2021-01-23 10:39:04 -08:00
if ! CONFIG . popular_enabled
2020-12-26 21:12:43 -08:00
error_message = { " error " = > " Administrator has disabled this endpoint. " } . to_json
env . response . status_code = 400
next error_message
end
2019-04-28 16:14:16 -07:00
JSON . build do | json |
2018-11-25 16:13:11 -08:00
json . array do
popular_videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2018-11-25 16:13:11 -08:00
end
end
end
2019-03-23 12:05:13 -07:00
end
2018-11-25 16:13:11 -08:00
2018-08-04 13:30:44 -07:00
get " /api/v1/top " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2019-01-25 08:50:18 -08:00
env . response . content_type = " application/json "
2020-12-26 21:12:43 -08:00
env . response . status_code = 400
{ " error " = > " The Top feed has been removed from Invidious. " } . to_json
2019-03-23 12:05:13 -07:00
end
2018-07-30 10:34:57 -07:00
2018-08-04 13:30:44 -07:00
get " /api/v1/channels/:ucid " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-21 07:40:04 -07:00
env . response . content_type = " application/json "
2018-08-10 07:44:19 -07:00
2018-09-21 07:40:04 -07:00
ucid = env . params . url [ " ucid " ]
2018-11-13 17:04:25 -08:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
sort_by || = " newest "
2018-09-04 19:04:40 -07:00
2018-09-21 07:40:04 -07:00
begin
2019-06-28 18:48:24 -07:00
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
env . response . headers [ " Location " ] = env . request . resource . gsub ( ucid , ex . channel_id )
2020-11-30 01:59:21 -08:00
next error_json ( 302 , " Channel is unavailable " , { " authorId " = > ex . channel_id } )
2018-09-21 07:40:04 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-09-04 19:04:40 -07:00
end
2018-09-06 06:43:22 -07:00
page = 1
2019-06-28 18:48:24 -07:00
if channel . auto_generated
2019-02-24 14:39:44 -08:00
videos = [ ] of SearchVideo
count = 0
else
begin
2020-06-15 15:10:30 -07:00
count , videos = get_60_videos ( channel . ucid , channel . author , page , channel . auto_generated , sort_by )
2019-02-24 14:39:44 -08:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2019-02-24 14:39:44 -08:00
end
2018-11-08 15:10:14 -08:00
end
2018-08-28 18:29:08 -07:00
2019-06-28 18:48:24 -07:00
JSON . build do | json |
2019-06-08 11:31:41 -07:00
# TODO: Refactor into `to_json` for InvidiousChannel
2018-08-04 13:30:44 -07:00
json . object do
2019-06-28 18:48:24 -07:00
json . field " author " , channel . author
json . field " authorId " , channel . ucid
json . field " authorUrl " , channel . author_url
2018-08-04 13:30:44 -07:00
json . field " authorBanners " do
json . array do
2019-06-30 10:59:38 -07:00
if channel . banner
qualities = {
{ width : 2560 , height : 424 } ,
{ width : 2120 , height : 351 } ,
{ width : 1060 , height : 175 } ,
}
qualities . each do | quality |
json . object do
2019-07-02 16:53:19 -07:00
json . field " url " , channel . banner . not_nil! . gsub ( " =w1060- " , " =w #{ quality [ :width ] } - " )
2019-06-30 10:59:38 -07:00
json . field " width " , quality [ :width ]
json . field " height " , quality [ :height ]
end
2018-08-04 13:30:44 -07:00
end
2019-06-30 10:59:38 -07:00
json . object do
2019-07-02 16:53:19 -07:00
json . field " url " , channel . banner . not_nil! . split ( " =w1060- " ) [ 0 ]
2019-06-30 10:59:38 -07:00
json . field " width " , 512
json . field " height " , 288
end
2018-08-04 13:30:44 -07:00
end
2018-07-18 12:26:02 -07:00
end
end
2018-03-31 07:51:14 -07:00
2018-08-04 13:30:44 -07:00
json . field " authorThumbnails " do
json . array do
2019-03-25 08:00:18 -07:00
qualities = { 32 , 48 , 76 , 100 , 176 , 512 }
2018-07-18 12:26:02 -07:00
2018-08-04 13:30:44 -07:00
qualities . each do | quality |
json . object do
2020-04-10 09:49:51 -07:00
json . field " url " , channel . author_thumbnail . gsub ( / =s \ d+ / , " =s #{ quality } " )
2018-08-04 13:30:44 -07:00
json . field " width " , quality
json . field " height " , quality
end
end
2018-07-18 12:26:02 -07:00
end
2018-03-31 07:51:14 -07:00
end
2019-06-28 18:48:24 -07:00
json . field " subCount " , channel . sub_count
json . field " totalViews " , channel . total_views
json . field " joined " , channel . joined . to_unix
json . field " paid " , channel . paid
2018-03-31 07:51:14 -07:00
2019-06-28 18:48:24 -07:00
json . field " autoGenerated " , channel . auto_generated
json . field " isFamilyFriendly " , channel . is_family_friendly
json . field " description " , html_to_content ( channel . description_html )
json . field " descriptionHtml " , channel . description_html
2018-09-04 17:27:10 -07:00
2019-06-28 18:48:24 -07:00
json . field " allowedRegions " , channel . allowed_regions
2018-07-28 18:40:59 -07:00
2018-08-04 13:30:44 -07:00
json . field " latestVideos " do
json . array do
2018-08-28 18:29:08 -07:00
videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2018-08-04 13:30:44 -07:00
end
end
end
2018-11-27 20:07:45 -08:00
json . field " relatedChannels " do
json . array do
2019-06-28 18:48:24 -07:00
channel . related_channels . each do | related_channel |
2018-11-27 20:07:45 -08:00
json . object do
2019-06-28 18:48:24 -07:00
json . field " author " , related_channel . author
json . field " authorId " , related_channel . ucid
json . field " authorUrl " , related_channel . author_url
2018-11-27 20:07:45 -08:00
json . field " authorThumbnails " do
json . array do
2019-03-25 08:00:18 -07:00
qualities = { 32 , 48 , 76 , 100 , 176 , 512 }
2018-11-27 20:07:45 -08:00
qualities . each do | quality |
json . object do
2019-07-31 17:16:09 -07:00
json . field " url " , related_channel . author_thumbnail . gsub ( / = \ d+ / , " =s #{ quality } " )
2018-11-27 20:07:45 -08:00
json . field " width " , quality
json . field " height " , quality
end
end
end
end
end
end
end
end
2018-08-04 13:30:44 -07:00
end
2018-07-28 18:40:59 -07:00
end
2019-03-23 12:05:13 -07:00
end
2018-07-16 06:18:59 -07:00
2019-04-28 09:47:16 -07:00
{ " /api/v1/channels/:ucid/videos " , " /api/v1/channels/videos/:ucid " } . each do | route |
2018-09-18 08:47:22 -07:00
get route do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-21 07:40:04 -07:00
env . response . content_type = " application/json "
2018-09-20 07:36:09 -07:00
ucid = env . params . url [ " ucid " ]
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2019-02-15 15:28:54 -08:00
sort_by = env . params . query [ " sort " ]? . try & . downcase
sort_by || = env . params . query [ " sort_by " ]? . try & . downcase
2018-11-13 17:11:16 -08:00
sort_by || = " newest "
2018-07-16 06:18:59 -07:00
2018-09-21 07:40:04 -07:00
begin
2019-06-28 18:48:24 -07:00
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
env . response . headers [ " Location " ] = env . request . resource . gsub ( ucid , ex . channel_id )
2020-11-30 01:59:21 -08:00
next error_json ( 302 , " Channel is unavailable " , { " authorId " = > ex . channel_id } )
2018-09-21 07:40:04 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-09-20 07:36:09 -07:00
end
2018-07-16 06:18:59 -07:00
2018-11-08 15:10:14 -08:00
begin
2020-06-15 15:10:30 -07:00
count , videos = get_60_videos ( channel . ucid , channel . author , page , channel . auto_generated , sort_by )
2018-11-08 15:10:14 -08:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-11-08 15:10:14 -08:00
end
2018-07-29 19:01:28 -07:00
2019-06-08 11:31:41 -07:00
JSON . build do | json |
2018-09-20 07:36:09 -07:00
json . array do
videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2018-08-04 13:30:44 -07:00
end
end
2019-06-23 10:54:46 -07:00
end
end
end
2018-07-16 06:18:59 -07:00
2019-04-28 09:47:16 -07:00
{ " /api/v1/channels/:ucid/latest " , " /api/v1/channels/latest/:ucid " } . each do | route |
2019-02-19 15:00:06 -08:00
get route do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-20 06:49:39 -08:00
2019-02-19 15:00:06 -08:00
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
begin
videos = get_latest_videos ( ucid )
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2019-02-19 15:00:06 -08:00
end
2019-04-28 16:14:16 -07:00
JSON . build do | json |
2019-02-19 15:00:06 -08:00
json . array do
videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2019-02-19 15:00:06 -08:00
end
end
end
2019-06-23 10:54:46 -07:00
end
end
2019-02-19 15:00:06 -08:00
2019-04-28 09:47:16 -07:00
{ " /api/v1/channels/:ucid/playlists " , " /api/v1/channels/playlists/:ucid " } . each do | route |
2019-02-19 15:05:27 -08:00
get route do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-19 15:05:27 -08:00
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
continuation = env . params . query [ " continuation " ]?
2020-06-15 15:10:30 -07:00
sort_by = env . params . query [ " sort " ]? . try & . downcase ||
env . params . query [ " sort_by " ]? . try & . downcase ||
" last "
2019-02-19 15:05:27 -08:00
begin
2019-06-28 18:48:24 -07:00
channel = get_about_info ( ucid , locale )
2019-09-08 09:08:59 -07:00
rescue ex : ChannelRedirect
env . response . headers [ " Location " ] = env . request . resource . gsub ( ucid , ex . channel_id )
2020-11-30 01:59:21 -08:00
next error_json ( 302 , " Channel is unavailable " , { " authorId " = > ex . channel_id } )
2019-02-19 15:05:27 -08:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2019-02-19 15:05:27 -08:00
end
2019-06-28 18:48:24 -07:00
items , continuation = fetch_channel_playlists ( channel . ucid , channel . author , channel . auto_generated , continuation , sort_by )
2019-02-19 15:05:27 -08:00
2019-06-28 18:48:24 -07:00
JSON . build do | json |
2019-02-19 15:05:27 -08:00
json . object do
json . field " playlists " do
json . array do
items . each do | item |
2020-06-15 15:10:30 -07:00
item . to_json ( locale , json ) if item . is_a? ( SearchPlaylist )
2019-02-19 15:05:27 -08:00
end
end
end
json . field " continuation " , continuation
end
end
end
2019-03-23 12:05:13 -07:00
end
2019-02-19 15:05:27 -08:00
2019-07-02 16:53:19 -07:00
{ " /api/v1/channels/:ucid/comments " , " /api/v1/channels/comments/:ucid " } . each do | route |
get route do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
2019-07-09 07:31:04 -07:00
thin_mode = env . params . query [ " thin_mode " ]?
thin_mode = thin_mode == " true "
2019-07-02 16:53:19 -07:00
2019-07-09 07:31:04 -07:00
format = env . params . query [ " format " ]?
format || = " json "
continuation = env . params . query [ " continuation " ]?
2019-07-02 16:53:19 -07:00
# sort_by = env.params.query["sort_by"]?.try &.downcase
begin
2020-06-15 15:10:30 -07:00
fetch_channel_community ( ucid , continuation , locale , format , thin_mode )
2019-07-02 16:53:19 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2019-07-02 16:53:19 -07:00
end
end
end
2018-09-22 08:49:42 -07:00
get " /api/v1/channels/search/:ucid " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-22 08:49:42 -07:00
env . response . content_type = " application/json "
ucid = env . params . url [ " ucid " ]
query = env . params . query [ " q " ]?
query || = " "
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
count , search_results = channel_search ( query , page , ucid )
2019-06-08 11:31:41 -07:00
JSON . build do | json |
2018-09-22 08:49:42 -07:00
json . array do
search_results . each do | item |
2020-06-15 15:10:30 -07:00
item . to_json ( locale , json )
2018-09-22 08:49:42 -07:00
end
end
end
2019-03-23 12:05:13 -07:00
end
2018-09-22 08:49:42 -07:00
2018-08-04 13:30:44 -07:00
get " /api/v1/search " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-02-06 16:21:40 -08:00
region = env . params . query [ " region " ]?
2018-12-20 13:32:09 -08:00
2018-09-22 08:49:42 -07:00
env . response . content_type = " application/json "
2018-08-04 21:07:38 -07:00
query = env . params . query [ " q " ]?
query || = " "
2018-08-02 15:18:57 -07:00
2018-08-04 13:30:44 -07:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
2018-08-04 15:12:58 -07:00
sort_by = env . params . query [ " sort_by " ]? . try & . downcase
sort_by || = " relevance "
date = env . params . query [ " date " ]? . try & . downcase
date || = " "
2019-02-26 12:31:37 -08:00
duration = env . params . query [ " duration " ]? . try & . downcase
2018-08-04 15:12:58 -07:00
duration || = " "
features = env . params . query [ " features " ]? . try & . split ( " , " ) . map { | feature | feature . downcase }
features || = [ ] of String
2018-09-20 07:36:09 -07:00
content_type = env . params . query [ " type " ]? . try & . downcase
content_type || = " video "
2018-08-04 15:12:58 -07:00
begin
2018-09-17 14:38:18 -07:00
search_params = produce_search_params ( sort_by , date , content_type , duration , features )
2018-08-04 15:12:58 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 400 , ex )
2018-08-04 15:12:58 -07:00
end
2019-06-28 19:17:56 -07:00
count , search_results = search ( query , page , search_params , region ) . as ( Tuple )
2019-06-08 11:31:41 -07:00
JSON . build do | json |
2018-08-04 13:30:44 -07:00
json . array do
2018-09-20 07:36:09 -07:00
search_results . each do | item |
2020-06-15 15:10:30 -07:00
item . to_json ( locale , json )
2018-08-04 13:30:44 -07:00
end
end
end
2019-03-23 12:05:13 -07:00
end
2018-08-02 15:18:57 -07:00
2019-05-21 05:15:15 -07:00
get " /api/v1/search/suggestions " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
region = env . params . query [ " region " ]?
env . response . content_type = " application/json "
query = env . params . query [ " q " ]?
query || = " "
begin
2020-05-08 07:00:53 -07:00
headers = HTTP :: Headers { " :authority " = > " suggestqueries.google.com " }
response = YT_POOL . client & . get ( " /complete/search?hl=en&gl= #{ region } &client=youtube&ds=yt&q= #{ URI . encode_www_form ( query ) } &callback=suggestCallback " , headers ) . body
2019-11-28 06:20:44 -08:00
2019-05-21 05:15:15 -07:00
body = response [ 35 .. - 2 ]
body = JSON . parse ( body ) . as_a
suggestions = body [ 1 ] . as_a [ 0 .. - 2 ]
JSON . build do | json |
json . object do
json . field " query " , body [ 0 ] . as_s
json . field " suggestions " do
json . array do
suggestions . each do | suggestion |
json . string suggestion [ 0 ] . as_s
end
end
end
end
end
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2019-05-21 05:15:15 -07:00
end
end
2019-08-05 16:49:13 -07:00
{ " /api/v1/playlists/:plid " , " /api/v1/auth/playlists/:plid " } . each do | route |
get route do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-10-07 19:11:33 -07:00
2019-08-05 16:49:13 -07:00
env . response . content_type = " application/json "
plid = env . params . url [ " plid " ]
2018-10-06 20:18:50 -07:00
2019-08-05 16:49:13 -07:00
offset = env . params . query [ " index " ]? . try & . to_i?
offset || = env . params . query [ " page " ]? . try & . to_i? . try { | page | ( page - 1 ) * 100 }
offset || = 0
2018-08-15 08:22:36 -07:00
2019-08-05 16:49:13 -07:00
continuation = env . params . query [ " continuation " ]?
2018-09-28 07:54:45 -07:00
2019-08-05 16:49:13 -07:00
format = env . params . query [ " format " ]?
format || = " json "
2018-08-15 08:22:36 -07:00
2019-08-05 16:49:13 -07:00
if plid . starts_with? " RD "
next env . redirect " /api/v1/mixes/ #{ plid } "
end
2018-08-15 08:22:36 -07:00
2019-08-05 16:49:13 -07:00
begin
playlist = get_playlist ( PG_DB , plid , locale )
2021-01-03 20:35:59 -08:00
rescue ex : InfoException
next error_json ( 404 , ex )
2019-08-05 16:49:13 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-05 16:49:13 -07:00
end
2018-09-25 08:28:40 -07:00
2019-08-05 16:49:13 -07:00
user = env . get? ( " user " ) . try & . as ( User )
2019-10-16 05:21:26 -07:00
if ! playlist || playlist . privacy . private? && playlist . author != user . try & . email
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-05 16:49:13 -07:00
end
2018-09-25 08:28:40 -07:00
2020-06-15 15:10:30 -07:00
response = playlist . to_json ( offset , locale , continuation : continuation )
2018-08-15 08:22:36 -07:00
2019-08-05 16:49:13 -07:00
if format == " html "
response = JSON . parse ( response )
playlist_html = template_playlist ( response )
2019-10-21 18:40:03 -07:00
index , next_video = response [ " videos " ] . as_a . skip ( 1 ) . select { | video | ! video [ " author " ] . as_s . empty? } [ 0 ]? . try { | v | { v [ " index " ] , v [ " videoId " ] } } || { nil , nil }
2018-08-15 08:22:36 -07:00
2019-08-05 16:49:13 -07:00
response = {
" playlistHtml " = > playlist_html ,
" index " = > index ,
" nextVideo " = > next_video ,
} . to_json
2018-08-15 08:22:36 -07:00
end
2018-10-07 19:11:33 -07:00
2019-08-05 16:49:13 -07:00
response
2018-10-07 19:11:33 -07:00
end
2019-03-23 12:05:13 -07:00
end
2018-08-15 08:22:36 -07:00
2018-09-28 21:12:35 -07:00
get " /api/v1/mixes/:rdid " do | env |
2019-03-11 10:44:25 -07:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-12-20 13:32:09 -08:00
2018-09-28 21:12:35 -07:00
env . response . content_type = " application/json "
rdid = env . params . url [ " rdid " ]
continuation = env . params . query [ " continuation " ]?
2019-02-15 15:28:54 -08:00
continuation || = rdid . lchop ( " RD " ) [ 0 , 11 ]
2018-09-28 21:12:35 -07:00
2018-10-07 19:11:33 -07:00
format = env . params . query [ " format " ]?
format || = " json "
2018-09-28 21:12:35 -07:00
begin
2018-12-20 13:32:09 -08:00
mix = fetch_mix ( rdid , continuation , locale : locale )
2018-10-31 07:15:17 -07:00
if ! rdid . ends_with? continuation
mix = fetch_mix ( rdid , mix . videos [ 1 ] . id )
index = mix . videos . index ( mix . videos . select { | video | video . id == continuation } [ 0 ]? )
end
mix . videos = mix . videos [ index .. - 1 ]
2018-09-28 21:12:35 -07:00
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2018-09-28 21:12:35 -07:00
end
response = JSON . build do | json |
json . object do
json . field " title " , mix . title
json . field " mixId " , mix . id
json . field " videos " do
json . array do
mix . videos . each do | video |
json . object do
json . field " title " , video . title
json . field " videoId " , video . id
json . field " author " , video . author
json . field " authorId " , video . ucid
json . field " authorUrl " , " /channel/ #{ video . ucid } "
json . field " videoThumbnails " do
json . array do
2020-06-15 15:10:30 -07:00
generate_thumbnails ( json , video . id )
2018-09-28 21:12:35 -07:00
end
end
json . field " index " , video . index
json . field " lengthSeconds " , video . length_seconds
end
end
end
end
end
end
2018-10-07 19:11:33 -07:00
if format == " html "
response = JSON . parse ( response )
playlist_html = template_mix ( response )
2019-10-21 16:00:56 -07:00
next_video = response [ " videos " ] . as_a . select { | video | ! video [ " author " ] . as_s . empty? } [ 0 ]? . try & . [ " videoId " ]
2018-10-07 19:11:33 -07:00
response = {
" playlistHtml " = > playlist_html ,
" nextVideo " = > next_video ,
} . to_json
end
2019-03-23 08:24:30 -07:00
response
2018-09-28 21:12:35 -07:00
end
2019-06-07 10:39:12 -07:00
# Authenticated endpoints
2019-04-10 15:58:42 -07:00
get " /api/v1/auth/notifications " do | env |
2019-06-02 05:41:53 -07:00
env . response . content_type = " text/event-stream "
2019-04-10 15:58:42 -07:00
topics = env . params . query [ " topics " ]? . try & . split ( " , " ) . uniq . first ( 1000 )
topics || = [ ] of String
2020-06-15 15:10:30 -07:00
create_notification_stream ( env , topics , connection_channel )
2019-05-21 07:01:17 -07:00
end
2019-04-10 15:58:42 -07:00
2019-05-21 07:01:17 -07:00
post " /api/v1/auth/notifications " do | env |
2019-06-02 05:41:53 -07:00
env . response . content_type = " text/event-stream "
2019-05-21 07:01:17 -07:00
topics = env . params . body [ " topics " ]? . try & . split ( " , " ) . uniq . first ( 1000 )
topics || = [ ] of String
2019-04-10 15:58:42 -07:00
2020-06-15 15:10:30 -07:00
create_notification_stream ( env , topics , connection_channel )
2019-04-10 15:58:42 -07:00
end
2019-04-30 19:01:57 -07:00
get " /api/v1/auth/preferences " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
user . preferences . to_json
end
2019-04-18 14:23:50 -07:00
2019-04-30 19:01:57 -07:00
post " /api/v1/auth/preferences " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
begin
2020-07-26 07:58:50 -07:00
preferences = Preferences . from_json ( env . request . body || " {} " )
2019-04-30 19:01:57 -07:00
rescue
preferences = user . preferences
end
PG_DB . exec ( " UPDATE users SET preferences = $1 WHERE email = $2 " , preferences . to_json , user . email )
env . response . status_code = 204
end
2019-04-18 14:23:50 -07:00
2019-06-07 10:39:12 -07:00
get " /api/v1/auth/feed " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-06-08 14:04:55 -07:00
max_results = env . params . query [ " max_results " ]? . try & . to_i?
max_results || = user . preferences . max_results
max_results || = CONFIG . default_user_preferences . max_results
2019-06-07 10:39:12 -07:00
page = env . params . query [ " page " ]? . try & . to_i?
page || = 1
videos , notifications = get_subscription_feed ( PG_DB , user , max_results , page )
JSON . build do | json |
json . object do
json . field " notifications " do
json . array do
notifications . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2019-06-07 10:39:12 -07:00
end
end
end
json . field " videos " do
json . array do
videos . each do | video |
2020-06-15 15:10:30 -07:00
video . to_json ( locale , json )
2019-06-07 10:39:12 -07:00
end
end
end
end
end
end
2019-04-22 08:40:29 -07:00
get " /api/v1/auth/subscriptions " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
2019-04-18 14:23:50 -07:00
2019-04-22 08:40:29 -07:00
if user . subscriptions . empty?
values = " '{}' "
else
values = " VALUES #{ user . subscriptions . map { | id | %( ( ' #{ id } ' ) ) } . join ( " , " ) } "
end
2019-04-18 14:23:50 -07:00
2019-04-22 08:40:29 -07:00
subscriptions = PG_DB . query_all ( " SELECT * FROM channels WHERE id = ANY( #{ values } ) " , as : InvidiousChannel )
JSON . build do | json |
json . array do
subscriptions . each do | subscription |
json . object do
json . field " author " , subscription . author
json . field " authorId " , subscription . id
end
end
end
end
end
post " /api/v1/auth/subscriptions/:ucid " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
ucid = env . params . url [ " ucid " ]
if ! user . subscriptions . includes? ucid
2021-01-04 07:51:06 -08:00
get_channel ( ucid , PG_DB , false , false )
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2 " , ucid , user . email )
2019-04-22 08:40:29 -07:00
end
2019-05-15 10:26:29 -07:00
# For Google accounts, access tokens don't have enough information to
# make a request on the user's behalf, which is why we don't sync with
# YouTube.
2019-04-22 08:40:29 -07:00
env . response . status_code = 204
end
delete " /api/v1/auth/subscriptions/:ucid " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
ucid = env . params . url [ " ucid " ]
2019-06-01 08:19:18 -07:00
PG_DB . exec ( " UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2 " , ucid , user . email )
2019-04-22 08:40:29 -07:00
env . response . status_code = 204
end
2019-04-18 14:23:50 -07:00
2019-08-05 16:49:13 -07:00
get " /api/v1/auth/playlists " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
playlists = PG_DB . query_all ( " SELECT * FROM playlists WHERE author = $1 " , user . email , as : InvidiousPlaylist )
JSON . build do | json |
json . array do
playlists . each do | playlist |
2020-06-15 15:10:30 -07:00
playlist . to_json ( 0 , locale , json )
2019-08-05 16:49:13 -07:00
end
end
end
end
post " /api/v1/auth/playlists " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
title = env . params . json [ " title " ]? . try & . as ( String ) . delete ( " <> " ) . byte_slice ( 0 , 150 )
if ! title
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Invalid title. " )
2019-08-05 16:49:13 -07:00
end
privacy = env . params . json [ " privacy " ]? . try { | privacy | PlaylistPrivacy . parse ( privacy . as ( String ) . downcase ) }
if ! privacy
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Invalid privacy setting. " )
2019-08-05 16:49:13 -07:00
end
if PG_DB . query_one ( " SELECT count(*) FROM playlists WHERE author = $1 " , user . email , as : Int64 ) >= 100
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " User cannot have more than 100 playlists. " )
2019-08-05 16:49:13 -07:00
end
playlist = create_playlist ( PG_DB , title , privacy , user )
2020-06-15 15:10:30 -07:00
env . response . headers [ " Location " ] = " #{ HOST_URL } /api/v1/auth/playlists/ #{ playlist . id } "
2019-08-05 16:49:13 -07:00
env . response . status_code = 201
{
" title " = > title ,
" playlistId " = > playlist . id ,
} . to_json
end
patch " /api/v1/auth/playlists/:plid " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 05:21:26 -07:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-05 16:49:13 -07:00
end
if playlist . author != user . email
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " Invalid user " )
2019-08-05 16:49:13 -07:00
end
title = env . params . json [ " title " ] . try & . as ( String ) . delete ( " <> " ) . byte_slice ( 0 , 150 ) || playlist . title
privacy = env . params . json [ " privacy " ]? . try { | privacy | PlaylistPrivacy . parse ( privacy . as ( String ) . downcase ) } || playlist . privacy
description = env . params . json [ " description " ]? . try & . as ( String ) . delete ( " \ r " ) || playlist . description
if title != playlist . title ||
privacy != playlist . privacy ||
description != playlist . description
updated = Time . utc
else
updated = playlist . updated
end
PG_DB . exec ( " UPDATE playlists SET title = $1, privacy = $2, description = $3, updated = $4 WHERE id = $5 " , title , privacy , description , updated , plid )
env . response . status_code = 204
end
delete " /api/v1/auth/playlists/:plid " do | env |
2020-11-30 01:59:21 -08:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-08-05 16:49:13 -07:00
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 05:21:26 -07:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-05 16:49:13 -07:00
end
if playlist . author != user . email
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " Invalid user " )
2019-08-05 16:49:13 -07:00
end
PG_DB . exec ( " DELETE FROM playlist_videos * WHERE plid = $1 " , plid )
PG_DB . exec ( " DELETE FROM playlists * WHERE id = $1 " , plid )
env . response . status_code = 204
end
post " /api/v1/auth/playlists/:plid/videos " do | env |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 05:21:26 -07:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-05 16:49:13 -07:00
end
if playlist . author != user . email
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " Invalid user " )
2019-08-05 16:49:13 -07:00
end
if playlist . index . size >= 500
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Playlist cannot have more than 500 videos " )
2019-08-05 16:49:13 -07:00
end
video_id = env . params . json [ " videoId " ] . try & . as ( String )
if ! video_id
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " Invalid videoId " )
2019-08-05 16:49:13 -07:00
end
begin
video = get_video ( video_id , PG_DB )
rescue ex
2020-11-30 01:59:21 -08:00
next error_json ( 500 , ex )
2019-08-05 16:49:13 -07:00
end
2020-07-26 07:58:50 -07:00
playlist_video = PlaylistVideo . new ( {
title : video . title ,
id : video . id ,
author : video . author ,
ucid : video . ucid ,
2019-08-05 16:49:13 -07:00
length_seconds : video . length_seconds ,
2020-07-26 07:58:50 -07:00
published : video . published ,
plid : plid ,
live_now : video . live_now ,
index : Random :: Secure . rand ( 0 _i64 .. Int64 :: MAX ) ,
} )
2019-08-05 16:49:13 -07:00
video_array = playlist_video . to_a
args = arg_array ( video_array )
PG_DB . exec ( " INSERT INTO playlist_videos VALUES ( #{ args } ) " , args : video_array )
2020-10-31 21:14:46 -07:00
PG_DB . exec ( " UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3 " , playlist_video . index , Time . utc , plid )
2019-08-05 16:49:13 -07:00
2020-06-15 15:10:30 -07:00
env . response . headers [ " Location " ] = " #{ HOST_URL } /api/v1/auth/playlists/ #{ plid } /videos/ #{ playlist_video . index . to_u64 . to_s ( 16 ) . upcase } "
2019-08-05 16:49:13 -07:00
env . response . status_code = 201
2020-06-15 15:10:30 -07:00
playlist_video . to_json ( locale , index : playlist . index . size )
2019-08-05 16:49:13 -07:00
end
delete " /api/v1/auth/playlists/:plid/videos/:index " do | env |
2020-11-30 01:59:21 -08:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-08-05 16:49:13 -07:00
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
plid = env . params . url [ " plid " ]
index = env . params . url [ " index " ] . to_i64 ( 16 )
playlist = PG_DB . query_one? ( " SELECT * FROM playlists WHERE id = $1 " , plid , as : InvidiousPlaylist )
2019-10-16 05:21:26 -07:00
if ! playlist || playlist . author != user . email && playlist . privacy . private?
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not exist. " )
2019-08-05 16:49:13 -07:00
end
if playlist . author != user . email
2020-11-30 01:59:21 -08:00
next error_json ( 403 , " Invalid user " )
2019-08-05 16:49:13 -07:00
end
if ! playlist . index . includes? index
2020-11-30 01:59:21 -08:00
next error_json ( 404 , " Playlist does not contain index " )
2019-08-05 16:49:13 -07:00
end
PG_DB . exec ( " DELETE FROM playlist_videos * WHERE index = $1 " , index )
2020-10-31 21:14:46 -07:00
PG_DB . exec ( " UPDATE playlists SET index = array_remove(index, $1), video_count = cardinality(index) - 1, updated = $2 WHERE id = $3 " , index , Time . utc , plid )
2019-08-05 16:49:13 -07:00
env . response . status_code = 204
end
# patch "/api/v1/auth/playlists/:plid/videos/:index" do |env|
# TODO: Playlist stub
# end
2019-04-18 14:23:50 -07:00
get " /api/v1/auth/tokens " do | env |
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
scopes = env . get ( " scopes " ) . as ( Array ( String ) )
tokens = PG_DB . query_all ( " SELECT id, issued FROM session_ids WHERE email = $1 " , user . email , as : { session : String , issued : Time } )
JSON . build do | json |
json . array do
tokens . each do | token |
json . object do
json . field " session " , token [ :session ]
json . field " issued " , token [ :issued ] . to_unix
end
end
end
end
end
post " /api/v1/auth/tokens/register " do | env |
user = env . get ( " user " ) . as ( User )
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
case env . request . headers [ " Content-Type " ]?
when " application/x-www-form-urlencoded "
scopes = env . params . body . select { | k , v | k . match ( / ^scopes \ [ \ d+ \ ]$ / ) } . map { | k , v | v }
callback_url = env . params . body [ " callbackUrl " ]?
expire = env . params . body [ " expire " ]? . try & . to_i?
when " application/json "
scopes = env . params . json [ " scopes " ] . as ( Array ) . map { | v | v . as_s }
callback_url = env . params . json [ " callbackUrl " ]? . try & . as ( String )
expire = env . params . json [ " expire " ]? . try & . as ( Int64 )
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Invalid or missing header 'Content-Type' " )
2019-04-18 14:23:50 -07:00
end
if callback_url && callback_url . empty?
callback_url = nil
end
if callback_url
callback_url = URI . parse ( callback_url )
end
if sid = env . get? ( " sid " ) . try & . as ( String )
env . response . content_type = " text/html "
csrf_token = generate_response ( sid , { " :authorize_token " } , HMAC_KEY , PG_DB , use_nonce : true )
next templated " authorize_token "
else
env . response . content_type = " application/json "
superset_scopes = env . get ( " scopes " ) . as ( Array ( String ) )
authorized_scopes = [ ] of String
scopes . each do | scope |
if scopes_include_scope ( superset_scopes , scope )
authorized_scopes << scope
end
end
access_token = generate_token ( user . email , authorized_scopes , expire , HMAC_KEY , PG_DB )
if callback_url
2019-09-24 10:31:33 -07:00
access_token = URI . encode_www_form ( access_token )
2019-04-18 14:23:50 -07:00
if query = callback_url . query
query = HTTP :: Params . parse ( query . not_nil! )
else
query = HTTP :: Params . new
end
query [ " token " ] = access_token
callback_url . query = query . to_s
env . redirect callback_url . to_s
else
access_token
end
end
end
post " /api/v1/auth/tokens/unregister " do | env |
2020-11-30 01:59:21 -08:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2019-04-18 14:23:50 -07:00
env . response . content_type = " application/json "
user = env . get ( " user " ) . as ( User )
scopes = env . get ( " scopes " ) . as ( Array ( String ) )
session = env . params . json [ " session " ]? . try & . as ( String )
session || = env . get ( " session " ) . as ( String )
# Allow tokens to revoke other tokens with correct scope
if session == env . get ( " session " ) . as ( String )
PG_DB . exec ( " DELETE FROM session_ids * WHERE id = $1 " , session )
elsif scopes_include_scope ( scopes , " GET:tokens " )
PG_DB . exec ( " DELETE FROM session_ids * WHERE id = $1 " , session )
else
2020-11-30 01:59:21 -08:00
next error_json ( 400 , " Cannot revoke session #{ session } " )
2019-04-18 14:23:50 -07:00
end
env . response . status_code = 204
end
2018-08-07 11:10:52 -07:00
get " /api/manifest/dash/id/videoplayback " do | env |
2019-04-12 09:08:33 -07:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 11:23:27 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-07 11:10:52 -07:00
env . redirect " /videoplayback? #{ env . params . query } "
end
get " /api/manifest/dash/id/videoplayback/* " do | env |
2019-04-12 09:08:33 -07:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 11:23:27 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-07 11:10:52 -07:00
env . redirect env . request . path . lchop ( " /api/manifest/dash/id " )
end
2018-07-16 06:18:59 -07:00
get " /api/manifest/dash/id/:id " do | env |
env . response . headers . add ( " Access-Control-Allow-Origin " , " * " )
env . response . content_type = " application/dash+xml "
local = env . params . query [ " local " ]? . try & . == " true "
id = env . params . url [ " id " ]
2018-11-17 15:37:57 -08:00
region = env . params . query [ " region " ]?
2018-07-16 06:18:59 -07:00
2019-06-04 18:54:38 -07:00
# Since some implementations create playlists based on resolution regardless of different codecs,
2019-06-06 19:32:39 -07:00
# we can opt to only add a source to a representation if it has a unique height within that representation
2020-01-08 17:27:21 -08:00
unique_res = env . params . query [ " unique_res " ]? . try { | q | ( q == " true " || q == " 1 " ) . to_unsafe }
2019-06-04 18:54:38 -07:00
2018-07-16 06:18:59 -07:00
begin
2019-06-28 19:17:56 -07:00
video = get_video ( id , PG_DB , region : region )
2018-10-06 20:22:22 -07:00
rescue ex : VideoRedirect
2019-09-08 09:08:59 -07:00
next env . redirect env . request . resource . gsub ( id , ex . video_id )
2018-07-16 06:18:59 -07:00
rescue ex
2019-03-23 08:24:30 -07:00
env . response . status_code = 403
next
2018-07-16 06:18:59 -07:00
end
2020-06-15 15:33:23 -07:00
if dashmpd = video . dash_manifest_url
2020-01-08 17:27:21 -08:00
manifest = YT_POOL . client & . get ( URI . parse ( dashmpd ) . full_path ) . body
2018-07-16 06:18:59 -07:00
manifest = manifest . gsub ( / <BaseURL>[^<]+< \/ BaseURL> / ) do | baseurl |
url = baseurl . lchop ( " <BaseURL> " )
url = url . rchop ( " </BaseURL> " )
if local
2021-01-10 15:00:45 -08:00
uri = URI . parse ( url )
url = " #{ uri . full_path } host/ #{ uri . host } / "
2018-07-16 06:18:59 -07:00
end
" <BaseURL> #{ url } </BaseURL> "
end
next manifest
end
2020-06-15 15:33:23 -07:00
adaptive_fmts = video . adaptive_fmts
2018-07-16 06:18:59 -07:00
if local
adaptive_fmts . each do | fmt |
2020-06-15 15:33:23 -07:00
fmt [ " url " ] = JSON :: Any . new ( URI . parse ( fmt [ " url " ] . as_s ) . full_path )
2018-07-16 06:18:59 -07:00
end
end
2020-06-15 15:33:23 -07:00
audio_streams = video . audio_streams
video_streams = video . video_streams . sort_by { | stream | { stream [ " width " ] . as_i , stream [ " fps " ] . as_i } } . reverse
2018-08-04 21:07:38 -07:00
2019-04-12 09:08:33 -07:00
XML . build ( indent : " " , encoding : " UTF-8 " ) do | xml |
2018-08-11 13:01:22 -07:00
xml . element ( " MPD " , " xmlns " : " urn:mpeg:dash:schema:mpd:2011 " ,
2019-06-04 18:54:38 -07:00
" profiles " : " urn:mpeg:dash:profile:full:2011 " , minBufferTime : " PT1.5S " , type : " static " ,
2019-07-29 17:41:45 -07:00
mediaPresentationDuration : " PT #{ video . length_seconds } S " ) do
2018-07-16 06:18:59 -07:00
xml . element ( " Period " ) do
2019-04-12 09:19:54 -07:00
i = 0
2019-04-12 06:04:59 -07:00
{ " audio/mp4 " , " audio/webm " } . each do | mime_type |
2020-06-15 15:33:23 -07:00
mime_streams = audio_streams . select { | stream | stream [ " mimeType " ] . as_s . starts_with? mime_type }
next if mime_streams . empty?
2019-07-09 08:08:00 -07:00
2019-04-12 09:19:54 -07:00
xml . element ( " AdaptationSet " , id : i , mimeType : mime_type , startWithSAP : 1 , subsegmentAlignment : true ) do
2019-07-09 08:08:00 -07:00
mime_streams . each do | fmt |
2020-06-15 15:33:23 -07:00
codecs = fmt [ " mimeType " ] . as_s . split ( " codecs= " ) [ 1 ] . strip ( '"' )
bandwidth = fmt [ " bitrate " ] . as_i
itag = fmt [ " itag " ] . as_i
url = fmt [ " url " ] . as_s
2019-04-12 06:04:59 -07:00
xml . element ( " Representation " , id : fmt [ " itag " ] , codecs : codecs , bandwidth : bandwidth ) do
xml . element ( " AudioChannelConfiguration " , schemeIdUri : " urn:mpeg:dash:23003:3:audio_channel_configuration:2011 " ,
value : " 2 " )
xml . element ( " BaseURL " ) { xml . text url }
2020-06-15 15:33:23 -07:00
xml . element ( " SegmentBase " , indexRange : " #{ fmt [ " indexRange " ] [ " start " ] } - #{ fmt [ " indexRange " ] [ " end " ] } " ) do
xml . element ( " Initialization " , range : " #{ fmt [ " initRange " ] [ " start " ] } - #{ fmt [ " initRange " ] [ " end " ] } " )
2019-04-12 06:04:59 -07:00
end
2018-07-16 06:18:59 -07:00
end
end
end
2019-04-12 09:19:54 -07:00
i += 1
2018-07-16 06:18:59 -07:00
end
2020-06-15 15:33:23 -07:00
potential_heights = { 4320 , 2160 , 1440 , 1080 , 720 , 480 , 360 , 240 , 144 }
2019-04-12 06:04:59 -07:00
{ " video/mp4 " , " video/webm " } . each do | mime_type |
2020-06-15 15:33:23 -07:00
mime_streams = video_streams . select { | stream | stream [ " mimeType " ] . as_s . starts_with? mime_type }
2020-01-08 17:27:21 -08:00
next if mime_streams . empty?
2019-07-09 08:08:00 -07:00
2019-06-06 19:32:39 -07:00
heights = [ ] of Int32
2019-04-12 09:19:54 -07:00
xml . element ( " AdaptationSet " , id : i , mimeType : mime_type , startWithSAP : 1 , subsegmentAlignment : true , scanType : " progressive " ) do
2019-07-09 08:08:00 -07:00
mime_streams . each do | fmt |
2020-06-15 15:33:23 -07:00
codecs = fmt [ " mimeType " ] . as_s . split ( " codecs= " ) [ 1 ] . strip ( '"' )
bandwidth = fmt [ " bitrate " ] . as_i
itag = fmt [ " itag " ] . as_i
url = fmt [ " url " ] . as_s
width = fmt [ " width " ] . as_i
height = fmt [ " height " ] . as_i
2019-05-30 13:39:02 -07:00
# Resolutions reported by YouTube player (may not accurately reflect source)
2020-06-15 15:33:23 -07:00
height = potential_heights . min_by { | i | ( height - i ) . abs }
2019-06-04 18:54:38 -07:00
next if unique_res && heights . includes? height
heights << height
2019-04-12 06:04:59 -07:00
xml . element ( " Representation " , id : itag , codecs : codecs , width : width , height : height ,
startWithSAP : " 1 " , maxPlayoutRate : " 1 " ,
bandwidth : bandwidth , frameRate : fmt [ " fps " ] ) do
xml . element ( " BaseURL " ) { xml . text url }
2020-06-15 15:33:23 -07:00
xml . element ( " SegmentBase " , indexRange : " #{ fmt [ " indexRange " ] [ " start " ] } - #{ fmt [ " indexRange " ] [ " end " ] } " ) do
xml . element ( " Initialization " , range : " #{ fmt [ " initRange " ] [ " start " ] } - #{ fmt [ " initRange " ] [ " end " ] } " )
2019-04-12 06:04:59 -07:00
end
2018-07-16 06:18:59 -07:00
end
end
end
2019-04-12 09:19:54 -07:00
i += 1
2018-07-16 06:18:59 -07:00
end
end
end
end
end
2018-07-27 16:25:58 -07:00
get " /api/manifest/hls_variant/* " do | env |
2020-06-15 15:33:23 -07:00
response = YT_POOL . client & . get ( env . request . path )
2018-07-27 16:25:58 -07:00
2020-06-15 15:33:23 -07:00
if response . status_code != 200
env . response . status_code = response . status_code
2019-03-23 08:24:30 -07:00
next
2018-07-27 16:25:58 -07:00
end
2019-04-25 10:41:35 -07:00
local = env . params . query [ " local " ]? . try & . == " true "
2018-07-27 16:25:58 -07:00
env . response . content_type = " application/x-mpegURL "
env . response . headers . add ( " Access-Control-Allow-Origin " , " * " )
2018-08-04 21:07:38 -07:00
2020-06-15 15:10:30 -07:00
manifest = response . body
2019-04-25 10:41:35 -07:00
if local
2020-06-15 15:10:30 -07:00
manifest = manifest . gsub ( " https://www.youtube.com " , HOST_URL )
2019-04-25 10:41:35 -07:00
manifest = manifest . gsub ( " index.m3u8 " , " index.m3u8?local=true " )
end
manifest
2018-07-27 16:25:58 -07:00
end
get " /api/manifest/hls_playlist/* " do | env |
2020-06-15 15:33:23 -07:00
response = YT_POOL . client & . get ( env . request . path )
2018-07-27 16:25:58 -07:00
2020-06-15 15:33:23 -07:00
if response . status_code != 200
env . response . status_code = response . status_code
2019-03-23 08:24:30 -07:00
next
2018-07-27 16:25:58 -07:00
end
2019-04-25 10:41:35 -07:00
local = env . params . query [ " local " ]? . try & . == " true "
env . response . content_type = " application/x-mpegURL "
env . response . headers . add ( " Access-Control-Allow-Origin " , " * " )
2020-06-15 15:10:30 -07:00
manifest = response . body
2019-04-25 10:41:35 -07:00
if local
2019-07-05 10:08:39 -07:00
manifest = manifest . gsub ( / ^https: \/ \/ r \ d---.{11} \ .c \ .youtube \ .com[^ \ n]* /m ) do | match |
path = URI . parse ( match ) . path
path = path . lchop ( " /videoplayback/ " )
path = path . rchop ( " / " )
path = path . gsub ( / mime \/ \ w+ \/ \ w+ / ) do | mimetype |
mimetype = mimetype . split ( " / " )
mimetype [ 0 ] + " / " + mimetype [ 1 ] + " %2F " + mimetype [ 2 ]
end
path = path . split ( " / " )
raw_params = { } of String = > Array ( String )
path . each_slice ( 2 ) do | pair |
key , value = pair
2019-09-24 10:31:33 -07:00
value = URI . decode_www_form ( value )
2019-07-05 10:08:39 -07:00
if raw_params [ key ]?
raw_params [ key ] << value
else
raw_params [ key ] = [ value ]
end
end
raw_params = HTTP :: Params . new ( raw_params )
if fvip = raw_params [ " hls_chunk_host " ] . match ( / r(?<fvip> \ d+)--- / )
raw_params [ " fvip " ] = fvip [ " fvip " ]
end
raw_params [ " local " ] = " true "
2019-04-25 10:41:35 -07:00
2020-06-15 15:10:30 -07:00
" #{ HOST_URL } /videoplayback? #{ raw_params } "
2019-07-05 10:08:39 -07:00
end
end
2018-07-27 16:25:58 -07:00
manifest
end
2019-01-27 18:35:32 -08:00
# YouTube /videoplayback links expire after 6 hours,
# so we have a mechanism here to redirect to the latest version
get " /latest_version " do | env |
2019-02-24 09:04:46 -08:00
if env . params . query [ " download_widget " ]?
download_widget = JSON . parse ( env . params . query [ " download_widget " ] )
2019-04-11 10:08:43 -07:00
2019-02-24 09:04:46 -08:00
id = download_widget [ " id " ] . as_s
title = download_widget [ " title " ] . as_s
2019-04-11 10:08:43 -07:00
if label = download_widget [ " label " ]?
env . redirect " /api/v1/captions/ #{ id } ?label= #{ label } &title= #{ title } "
next
else
2021-01-09 11:40:01 -08:00
itag = download_widget [ " itag " ] . as_s . to_i
2019-04-12 05:29:47 -07:00
local = " true "
end
2019-04-11 10:08:43 -07:00
end
2019-02-24 09:04:46 -08:00
id || = env . params . query [ " id " ]?
2020-06-15 15:33:23 -07:00
itag || = env . params . query [ " itag " ]? . try & . to_i
2019-01-27 18:35:32 -08:00
2019-02-09 10:28:43 -08:00
region = env . params . query [ " region " ]?
2019-02-24 09:04:46 -08:00
local || = env . params . query [ " local " ]?
2019-01-27 19:20:52 -08:00
local || = " false "
local = local == " true "
2019-01-27 18:35:32 -08:00
if ! id || ! itag
2019-03-23 08:24:30 -07:00
env . response . status_code = 400
next
2019-01-27 18:35:32 -08:00
end
2019-06-28 19:17:56 -07:00
video = get_video ( id , PG_DB , region : region )
2019-01-27 18:35:32 -08:00
2020-06-15 15:33:23 -07:00
fmt = video . fmt_stream . find ( nil ) { | f | f [ " itag " ] . as_i == itag } || video . adaptive_fmts . find ( nil ) { | f | f [ " itag " ] . as_i == itag }
url = fmt . try & . [ " url " ]? . try & . as_s
2019-01-27 18:35:32 -08:00
2020-06-15 15:33:23 -07:00
if ! url
2019-03-23 08:24:30 -07:00
env . response . status_code = 404
next
2019-01-27 19:20:52 -08:00
end
2020-06-15 15:33:23 -07:00
url = URI . parse ( url ) . full_path . not_nil! if local
url = " #{ url } &title= #{ title } " if title
2019-02-24 09:04:46 -08:00
2019-01-27 19:20:52 -08:00
env . redirect url
2019-01-27 18:35:32 -08:00
end
2018-08-07 11:25:22 -07:00
options " /videoplayback " do | env |
2019-04-12 09:08:33 -07:00
env . response . headers . delete ( " Content-Type " )
2018-08-04 13:30:44 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 07:43:47 -07:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-04 13:30:44 -07:00
end
2018-08-07 09:39:56 -07:00
options " /videoplayback/* " do | env |
2019-04-12 09:08:33 -07:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 09:39:56 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 07:43:47 -07:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-07 09:39:56 -07:00
end
2018-08-07 11:18:38 -07:00
options " /api/manifest/dash/id/videoplayback " do | env |
2019-04-12 09:08:33 -07:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 11:18:38 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 07:43:47 -07:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-07 11:18:38 -07:00
end
options " /api/manifest/dash/id/videoplayback/* " do | env |
2019-04-12 09:08:33 -07:00
env . response . headers . delete ( " Content-Type " )
2018-08-07 11:18:38 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-09 07:43:47 -07:00
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
2018-08-07 11:18:38 -07:00
end
2018-08-07 09:39:56 -07:00
get " /videoplayback/* " do | env |
2018-06-06 15:55:51 -07:00
path = env . request . path
2018-08-07 09:39:56 -07:00
2018-08-07 09:49:14 -07:00
path = path . lchop ( " /videoplayback/ " )
path = path . rchop ( " / " )
2018-07-15 19:53:24 -07:00
2018-08-07 09:49:14 -07:00
path = path . gsub ( / mime \/ \ w+ \/ \ w+ / ) do | mimetype |
mimetype = mimetype . split ( " / " )
mimetype [ 0 ] + " / " + mimetype [ 1 ] + " %2F " + mimetype [ 2 ]
end
2018-07-15 19:53:24 -07:00
2018-08-07 09:49:14 -07:00
path = path . split ( " / " )
2018-06-06 15:55:51 -07:00
2018-08-07 09:49:14 -07:00
raw_params = { } of String = > Array ( String )
path . each_slice ( 2 ) do | pair |
key , value = pair
2019-09-24 10:31:33 -07:00
value = URI . decode_www_form ( value )
2018-06-06 15:55:51 -07:00
2018-08-07 09:49:14 -07:00
if raw_params [ key ]?
raw_params [ key ] << value
else
raw_params [ key ] = [ value ]
2018-06-06 15:55:51 -07:00
end
2018-08-07 09:49:14 -07:00
end
2018-06-06 15:55:51 -07:00
2018-08-07 09:49:14 -07:00
query_params = HTTP :: Params . new ( raw_params )
2018-08-07 09:39:56 -07:00
2018-08-11 12:29:51 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2018-08-07 09:39:56 -07:00
env . redirect " /videoplayback? #{ query_params } "
end
get " /videoplayback " do | env |
2020-11-30 01:59:21 -08:00
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
2018-08-07 09:49:14 -07:00
query_params = env . params . query
2018-04-15 18:47:37 -07:00
2019-03-11 12:07:55 -07:00
fvip = query_params [ " fvip " ]? || " 3 "
2019-05-30 18:47:04 -07:00
mns = query_params [ " mn " ]? . try & . split ( " , " )
mns || = [ ] of String
2019-03-11 12:07:55 -07:00
2019-03-27 12:59:53 -07:00
if query_params [ " region " ]?
region = query_params [ " region " ]
query_params . delete ( " region " )
end
2019-03-11 11:14:30 -07:00
if query_params [ " host " ]? && ! query_params [ " host " ] . empty?
host = " https:// #{ query_params [ " host " ] } "
2019-03-11 11:32:46 -07:00
query_params . delete ( " host " )
2019-03-11 11:14:30 -07:00
else
2019-03-11 12:07:55 -07:00
host = " https://r #{ fvip } --- #{ mns . pop } .googlevideo.com "
2019-03-11 11:14:30 -07:00
end
2018-04-15 18:47:37 -07:00
url = " /videoplayback? #{ query_params . to_s } "
2019-03-11 09:43:48 -07:00
headers = HTTP :: Headers . new
2019-06-23 06:39:14 -07:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-03-11 09:43:48 -07:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2019-01-24 11:52:33 -08:00
2019-08-27 07:53:44 -07:00
client = make_client ( URI . parse ( host ) , region )
2019-10-26 08:43:28 -07:00
response = HTTP :: Client :: Response . new ( 500 )
2020-03-06 10:50:00 -08:00
error = " "
2019-08-27 07:53:44 -07:00
5 . times do
begin
response = client . head ( url , headers )
if response . headers [ " Location " ]?
location = URI . parse ( response . headers [ " Location " ] )
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2020-12-22 21:52:23 -08:00
new_host = " #{ location . scheme } :// #{ location . host } "
if new_host != host
host = new_host
client . close
client = make_client ( URI . parse ( new_host ) , region )
end
2019-08-27 07:53:44 -07:00
url = " #{ location . full_path } &host= #{ location . host } #{ region ? " ®ion= #{ region } " : " " } "
else
break
end
rescue Socket :: Addrinfo :: Error
if ! mns . empty?
mn = mns . pop
end
fvip = " 3 "
host = " https://r #{ fvip } --- #{ mn } .googlevideo.com "
client = make_client ( URI . parse ( host ) , region )
rescue ex
2020-03-06 10:50:00 -08:00
error = ex . message
2019-08-27 07:53:44 -07:00
end
end
if response . status_code >= 400
env . response . status_code = response . status_code
2020-03-06 10:50:00 -08:00
env . response . content_type = " text/plain "
next error
2019-08-27 07:53:44 -07:00
end
2019-07-05 09:34:22 -07:00
if url . includes? " &file=seg.ts "
2019-07-07 07:07:53 -07:00
if CONFIG . disabled? ( " livestreams " )
2020-11-30 01:59:21 -08:00
next error_template ( 403 , " Administrator has disabled this endpoint. " )
2019-07-07 07:07:53 -07:00
end
2019-07-05 09:34:22 -07:00
begin
client . get ( url , headers ) do | response |
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-07-05 09:34:22 -07:00
env . response . headers [ key ] = value
end
end
2018-04-15 18:47:37 -07:00
2019-07-05 09:34:22 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2019-07-04 13:30:00 -07:00
2019-07-05 09:34:22 -07:00
if location = response . headers [ " Location " ]?
location = URI . parse ( location )
location = " #{ location . full_path } &host= #{ location . host } "
2019-07-04 13:30:00 -07:00
2019-07-05 09:34:22 -07:00
if region
location += " ®ion= #{ region } "
end
2019-05-26 08:53:56 -07:00
2019-07-05 09:34:22 -07:00
next env . redirect location
end
IO . copy ( response . body_io , env . response )
end
rescue ex
end
else
2019-07-07 07:07:53 -07:00
if query_params [ " title " ]? && CONFIG . disabled? ( " downloads " ) ||
CONFIG . disabled? ( " dash " )
2020-11-30 01:59:21 -08:00
next error_template ( 403 , " Administrator has disabled this endpoint. " )
2019-07-07 07:07:53 -07:00
end
2019-07-05 09:34:22 -07:00
content_length = nil
first_chunk = true
range_start , range_end = parse_range ( env . request . headers [ " Range " ]? )
chunk_start = range_start
chunk_end = range_end
if ! chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
2019-07-04 13:30:00 -07:00
end
2019-05-19 05:12:45 -07:00
2019-07-05 09:34:22 -07:00
# TODO: Record bytes written so we can restart after a chunk fails
while true
if ! range_end && content_length
range_end = content_length
end
2019-03-27 12:59:53 -07:00
2019-07-05 09:34:22 -07:00
if range_end && chunk_start > range_end
break
end
if range_end && chunk_end > range_end
chunk_end = range_end
end
2019-03-27 12:59:53 -07:00
2019-07-05 09:34:22 -07:00
headers [ " Range " ] = " bytes= #{ chunk_start } - #{ chunk_end } "
2019-07-04 13:30:00 -07:00
2019-07-05 09:34:22 -07:00
begin
client . get ( url , headers ) do | response |
if first_chunk
if ! env . request . headers [ " Range " ]? && response . status_code == 206
env . response . status_code = 200
else
env . response . status_code = response . status_code
end
2019-07-04 13:30:00 -07:00
2019-07-05 09:34:22 -07:00
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase ) && key . downcase != " content-range "
2019-07-05 09:34:22 -07:00
env . response . headers [ key ] = value
end
2019-07-04 13:30:00 -07:00
end
2019-07-05 09:34:22 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2019-07-04 13:30:00 -07:00
2019-07-05 09:34:22 -07:00
if location = response . headers [ " Location " ]?
location = URI . parse ( location )
2019-08-27 06:08:26 -07:00
location = " #{ location . full_path } &host= #{ location . host } #{ region ? " ®ion= #{ region } " : " " } "
2019-07-05 09:34:22 -07:00
env . redirect location
break
end
if title = query_params [ " title " ]?
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
2019-09-24 10:31:33 -07:00
env . response . headers [ " Content-Disposition " ] = " attachment; filename= \" #{ URI . encode_www_form ( title ) } \" ; filename*=UTF-8'' #{ URI . encode_www_form ( title ) } "
2019-07-05 09:34:22 -07:00
end
if ! response . headers . includes_word? ( " Transfer-Encoding " , " chunked " )
content_length = response . headers [ " Content-Range " ] . split ( " / " ) [ - 1 ] . to_i64
if env . request . headers [ " Range " ]?
env . response . headers [ " Content-Range " ] = " bytes #{ range_start } - #{ range_end || ( content_length - 1 ) } / #{ content_length } "
env . response . content_length = ( ( range_end . try & . + 1 ) || content_length ) - range_start
else
env . response . content_length = content_length
end
2019-07-05 09:02:12 -07:00
end
2019-07-04 13:30:00 -07:00
end
2019-07-01 08:45:09 -07:00
2019-07-05 09:34:22 -07:00
proxy_file ( response , env )
end
rescue ex
if ex . message != " Error reading socket: Connection reset by peer "
break
2019-08-27 07:53:44 -07:00
else
2020-12-22 21:52:23 -08:00
client . close
2019-08-27 07:53:44 -07:00
client = make_client ( URI . parse ( host ) , region )
2019-07-05 09:34:22 -07:00
end
2019-07-01 08:45:09 -07:00
end
2019-07-05 09:02:12 -07:00
2019-07-05 09:34:22 -07:00
chunk_start = chunk_end + 1
chunk_end += HTTP_CHUNK_SIZE
first_chunk = false
2019-03-25 14:32:11 -07:00
end
2019-05-26 08:53:56 -07:00
end
2020-12-22 21:52:23 -08:00
client . close
2018-09-14 19:24:28 -07:00
end
2018-09-17 16:39:28 -07:00
get " /ggpht/* " do | env |
url = env . request . path . lchop ( " /ggpht " )
2020-05-08 07:00:53 -07:00
headers = HTTP :: Headers { " :authority " = > " yt3.ggpht.com " }
2019-06-23 06:39:14 -07:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-04-11 15:00:00 -07:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2019-05-26 07:41:12 -07:00
begin
2020-03-06 10:53:35 -08:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-07-03 11:13:40 -07:00
env . response . status_code = response . status_code
2019-05-26 08:53:56 -07:00
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-05-26 08:53:56 -07:00
env . response . headers [ key ] = value
end
2019-05-19 05:12:45 -07:00
end
2019-04-11 15:00:00 -07:00
2019-07-03 11:13:40 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300
2019-07-03 11:54:15 -07:00
env . response . headers . delete ( " Transfer-Encoding " )
2019-05-26 08:53:56 -07:00
break
end
2019-04-11 15:00:00 -07:00
2019-05-26 08:53:56 -07:00
proxy_file ( response , env )
end
2019-05-26 07:41:12 -07:00
rescue ex
end
2019-04-11 15:00:00 -07:00
end
2020-10-25 01:35:16 -07:00
options " /sb/:authority/:id/:storyboard/:index " do | env |
2019-05-02 12:20:19 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
env . response . headers [ " Access-Control-Allow-Methods " ] = " GET, OPTIONS "
env . response . headers [ " Access-Control-Allow-Headers " ] = " Content-Type, Range "
end
2020-10-25 01:35:16 -07:00
get " /sb/:authority/:id/:storyboard/:index " do | env |
authority = env . params . url [ " authority " ]
2019-04-11 15:00:00 -07:00
id = env . params . url [ " id " ]
storyboard = env . params . url [ " storyboard " ]
index = env . params . url [ " index " ]
2020-03-06 10:53:35 -08:00
url = " /sb/ #{ id } / #{ storyboard } / #{ index } ? #{ env . params . query } "
headers = HTTP :: Headers . new
2020-10-25 01:35:16 -07:00
headers [ " :authority " ] = " #{ authority } .ytimg.com "
2019-04-11 15:00:00 -07:00
2019-06-23 06:39:14 -07:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-03-11 09:43:48 -07:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2018-09-17 16:39:28 -07:00
2019-05-26 07:41:12 -07:00
begin
2020-03-06 10:53:35 -08:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-05-26 08:53:56 -07:00
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-05-26 08:53:56 -07:00
env . response . headers [ key ] = value
end
2019-05-19 05:12:45 -07:00
end
2018-09-17 16:39:28 -07:00
2020-03-04 12:36:39 -08:00
env . response . headers [ " Connection " ] = " close "
2019-07-03 11:13:40 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300
2019-07-03 11:54:15 -07:00
env . response . headers . delete ( " Transfer-Encoding " )
2019-05-26 08:53:56 -07:00
break
end
2018-09-17 16:39:28 -07:00
2019-05-26 08:53:56 -07:00
proxy_file ( response , env )
end
2019-05-26 07:41:12 -07:00
rescue ex
end
2018-09-17 16:39:28 -07:00
end
2019-08-16 13:46:37 -07:00
get " /s_p/:id/:name " do | env |
id = env . params . url [ " id " ]
name = env . params . url [ " name " ]
url = env . request . resource
2020-05-08 07:00:53 -07:00
headers = HTTP :: Headers { " :authority " = > " i9.ytimg.com " }
2019-08-16 13:46:37 -07:00
REQUEST_HEADERS_WHITELIST . each do | header |
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
begin
2020-03-06 10:53:35 -08:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-08-16 13:46:37 -07:00
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-08-16 13:46:37 -07:00
env . response . headers [ key ] = value
end
2019-11-01 09:02:38 -07:00
end
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300 && response . status_code != 404
env . response . headers . delete ( " Transfer-Encoding " )
break
end
proxy_file ( response , env )
end
rescue ex
end
end
get " /yts/img/:name " do | env |
headers = HTTP :: Headers . new
REQUEST_HEADERS_WHITELIST . each do | header |
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
begin
YT_POOL . client & . get ( env . request . resource , headers ) do | response |
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-11-01 09:02:38 -07:00
env . response . headers [ key ] = value
end
2019-08-16 13:46:37 -07:00
end
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
if response . status_code >= 300 && response . status_code != 404
env . response . headers . delete ( " Transfer-Encoding " )
break
end
proxy_file ( response , env )
end
rescue ex
end
end
2018-09-14 19:24:28 -07:00
get " /vi/:id/:name " do | env |
id = env . params . url [ " id " ]
name = env . params . url [ " name " ]
2020-05-08 07:00:53 -07:00
headers = HTTP :: Headers { " :authority " = > " i.ytimg.com " }
2020-03-06 10:53:35 -08:00
2018-09-14 19:24:28 -07:00
if name == " maxres.jpg "
2020-06-15 15:10:30 -07:00
build_thumbnails ( id ) . each do | thumb |
2020-03-06 10:53:35 -08:00
if YT_POOL . client & . head ( " /vi/ #{ id } / #{ thumb [ :url ] } .jpg " , headers ) . status_code == 200
2018-09-14 19:24:28 -07:00
name = thumb [ :url ] + " .jpg "
break
end
end
end
url = " /vi/ #{ id } / #{ name } "
2019-06-23 06:39:14 -07:00
REQUEST_HEADERS_WHITELIST . each do | header |
2019-03-11 09:43:48 -07:00
if env . request . headers [ header ]?
headers [ header ] = env . request . headers [ header ]
end
end
2018-09-14 19:24:28 -07:00
2019-05-26 07:41:12 -07:00
begin
2020-03-06 10:53:35 -08:00
YT_POOL . client & . get ( url , headers ) do | response |
2019-05-26 08:53:56 -07:00
env . response . status_code = response . status_code
response . headers . each do | key , value |
2019-11-24 10:41:47 -08:00
if ! RESPONSE_HEADERS_BLACKLIST . includes? ( key . downcase )
2019-05-26 08:53:56 -07:00
env . response . headers [ key ] = value
end
2019-05-19 05:12:45 -07:00
end
2018-09-14 19:24:28 -07:00
2019-07-03 11:13:40 -07:00
env . response . headers [ " Access-Control-Allow-Origin " ] = " * "
2019-07-03 11:54:15 -07:00
if response . status_code >= 300 && response . status_code != 404
env . response . headers . delete ( " Transfer-Encoding " )
2019-05-26 08:53:56 -07:00
break
end
2018-09-14 19:24:28 -07:00
2019-05-26 08:53:56 -07:00
proxy_file ( response , env )
end
2019-05-26 07:41:12 -07:00
rescue ex
end
2018-04-15 18:47:37 -07:00
end
2019-10-26 21:19:05 -07:00
get " /Captcha " do | env |
2020-05-08 07:00:53 -07:00
headers = HTTP :: Headers { " :authority " = > " accounts.google.com " }
response = YT_POOL . client & . get ( env . request . resource , headers )
2019-10-26 21:19:05 -07:00
env . response . headers [ " Content-Type " ] = response . headers [ " Content-Type " ]
response . body
end
2019-08-21 16:23:20 -07:00
# Undocumented, creates anonymous playlist with specified 'video_ids', max 50 videos
2019-05-03 07:11:27 -07:00
get " /watch_videos " do | env |
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( env . request . resource )
2019-05-03 07:11:27 -07:00
if url = response . headers [ " Location " ]?
url = URI . parse ( url ) . full_path
next env . redirect url
end
env . response . status_code = response . status_code
end
2018-02-10 07:15:23 -08:00
error 404 do | env |
2019-03-27 03:28:53 -07:00
if md = env . request . path . match ( / ^ \/ (?<id>([a-zA-Z0-9_-]{11})|( \ w+))$ / )
2019-04-17 12:46:00 -07:00
item = md [ " id " ]
2018-10-06 20:19:36 -07:00
2019-04-17 12:46:00 -07:00
# Check if item is branding URL e.g. https://youtube.com/gaming
2019-10-25 09:58:16 -07:00
response = YT_POOL . client & . get ( " / #{ item } " )
2019-03-27 03:28:53 -07:00
if response . status_code == 301
2020-01-14 05:21:17 -08:00
response = YT_POOL . client & . get ( URI . parse ( response . headers [ " Location " ] ) . full_path )
2019-03-27 03:28:53 -07:00
end
2019-06-07 10:42:07 -07:00
if response . body . empty?
env . response . headers [ " Location " ] = " / "
halt env , status_code : 302
end
2019-03-27 03:28:53 -07:00
html = XML . parse_html ( response . body )
2020-01-14 05:21:17 -08:00
ucid = html . xpath_node ( % q ( / / link [ @rel = " canonical " ] ) ) . try & . [ " href " ] . split ( " / " ) [ - 1 ]
2019-03-27 03:28:53 -07:00
if ucid
2020-01-14 05:21:17 -08:00
env . response . headers [ " Location " ] = " /channel/ #{ ucid } "
2019-03-27 03:28:53 -07:00
halt env , status_code : 302
end
2018-10-06 20:19:36 -07:00
params = [ ] of String
env . params . query . each do | k , v |
params << " #{ k } = #{ v } "
end
params = params . join ( " & " )
2019-04-17 12:46:00 -07:00
url = " /watch?v= #{ item } "
2018-10-06 20:19:36 -07:00
if ! params . empty?
url += " & #{ params } "
end
2019-04-17 12:46:00 -07:00
# Check if item is video ID
2019-10-25 09:58:16 -07:00
if item . match ( / ^[a-zA-Z0-9_-]{11}$ / ) && YT_POOL . client & . head ( " /watch?v= #{ item } " ) . status_code != 404
2019-02-21 13:07:22 -08:00
env . response . headers [ " Location " ] = url
halt env , status_code : 302
end
end
2019-01-12 11:18:08 -08:00
env . response . headers [ " Location " ] = " / "
halt env , status_code : 302
2017-12-30 13:21:43 -08:00
end
2020-11-30 01:59:21 -08:00
error 500 do | env , ex |
locale = LOCALES [ env . get ( " preferences " ) . as ( Preferences ) . locale ]?
error_template ( 500 , ex )
2017-12-30 13:21:43 -08:00
end
2018-03-09 09:28:57 -08:00
static_headers do | response , filepath , filestat |
2019-05-08 06:58:10 -07:00
response . headers . add ( " Cache-Control " , " max-age=2629800 " )
2018-03-09 09:28:57 -08:00
end
2017-11-22 23:48:55 -08:00
public_folder " assets "
2018-04-15 20:56:58 -07:00
2018-07-30 16:42:45 -07:00
Kemal . config . powered_by_header = false
2018-04-15 20:56:58 -07:00
add_handler FilteredCompressHandler . new
2019-02-02 20:48:47 -08:00
add_handler APIHandler . new
2019-04-18 14:23:50 -07:00
add_handler AuthHandler . new
2019-03-23 08:24:30 -07:00
add_handler DenyFrame . new
2019-04-18 14:23:50 -07:00
add_context_storage_type ( Array ( String ) )
2019-02-24 07:49:48 -08:00
add_context_storage_type ( Preferences )
2019-04-18 14:23:50 -07:00
add_context_storage_type ( User )
2017-11-22 23:48:55 -08:00
2021-01-04 07:51:06 -08:00
Kemal . config . logger = LOGGER
2019-09-23 10:05:29 -07:00
Kemal . config . host_binding = Kemal . config . host_binding != " 0.0.0.0 " ? Kemal . config . host_binding : CONFIG . host_binding
Kemal . config . port = Kemal . config . port != 3000 ? Kemal . config . port : CONFIG . port
2017-11-22 23:48:55 -08:00
Kemal . run