Merge pull request #3214 from SamantazFox/fix-scheduled-streams
This commit is contained in:
		
							
								
								
									
										2
									
								
								mocks
									
									
									
									
									
								
							
							
								
								
								
								
								
							
						
						
									
										2
									
								
								mocks
									
									
									
									
									
								
							 Submodule mocks updated: 020337194d...c401dd9203
									
								
							
							
								
								
									
										113
									
								
								spec/invidious/videos/scheduled_live_extract_spec.cr
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										113
									
								
								spec/invidious/videos/scheduled_live_extract_spec.cr
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,113 @@
 | 
			
		||||
require "../../parsers_helper.cr"
 | 
			
		||||
 | 
			
		||||
Spectator.describe Invidious::Hashtag do
 | 
			
		||||
  it "parses scheduled livestreams data (test 1)" do
 | 
			
		||||
    # Enable mock
 | 
			
		||||
    _player = load_mock("video/scheduled_live_nintendo.player")
 | 
			
		||||
    _next = load_mock("video/scheduled_live_nintendo.next")
 | 
			
		||||
 | 
			
		||||
    raw_data = _player.merge!(_next)
 | 
			
		||||
    info = parse_video_info("QMGibBzTu0g", raw_data)
 | 
			
		||||
 | 
			
		||||
    # Some basic verifications
 | 
			
		||||
    expect(typeof(info)).to eq(Hash(String, JSON::Any))
 | 
			
		||||
 | 
			
		||||
    expect(info["shortDescription"].as_s).to eq(
 | 
			
		||||
      "Tune in on 6/22 at 7 a.m. PT for a livestreamed Xenoblade Chronicles 3 Direct presentation featuring roughly 20 minutes of information about the upcoming RPG adventure for Nintendo Switch."
 | 
			
		||||
    )
 | 
			
		||||
    expect(info["descriptionHtml"].as_s).to eq(
 | 
			
		||||
      "Tune in on 6/22 at 7 a.m. PT for a livestreamed Xenoblade Chronicles 3 Direct presentation featuring roughly 20 minutes of information about the upcoming RPG adventure for Nintendo Switch."
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect(info["likes"].as_i).to eq(2_283)
 | 
			
		||||
 | 
			
		||||
    expect(info["genre"].as_s).to eq("Gaming")
 | 
			
		||||
    expect(info["genreUrl"].raw).to be_nil
 | 
			
		||||
    expect(info["genreUcid"].as_s).to be_empty
 | 
			
		||||
    expect(info["license"].as_s).to be_empty
 | 
			
		||||
 | 
			
		||||
    expect(info["authorThumbnail"].as_s).to eq(
 | 
			
		||||
      "https://yt3.ggpht.com/ytc/AKedOLTt4vtjREUUNdHlyu9c4gtJjG90M9jQheRlLKy44A=s48-c-k-c0x00ffffff-no-rj"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect(info["authorVerified"].as_bool).to be_true
 | 
			
		||||
    expect(info["subCountText"].as_s).to eq("8.5M")
 | 
			
		||||
 | 
			
		||||
    expect(info["relatedVideos"].as_a.size).to eq(20)
 | 
			
		||||
 | 
			
		||||
    # related video #1
 | 
			
		||||
    expect(info["relatedVideos"][3]["id"].as_s).to eq("a-SN3lLIUEo")
 | 
			
		||||
    expect(info["relatedVideos"][3]["author"].as_s).to eq("Nintendo")
 | 
			
		||||
    expect(info["relatedVideos"][3]["ucid"].as_s).to eq("UCGIY_O-8vW4rfX98KlMkvRg")
 | 
			
		||||
    expect(info["relatedVideos"][3]["view_count"].as_s).to eq("147796")
 | 
			
		||||
    expect(info["relatedVideos"][3]["short_view_count"].as_s).to eq("147K")
 | 
			
		||||
    expect(info["relatedVideos"][3]["author_verified"].as_s).to eq("true")
 | 
			
		||||
 | 
			
		||||
    # Related video #2
 | 
			
		||||
    expect(info["relatedVideos"][16]["id"].as_s).to eq("l_uC1jFK0lo")
 | 
			
		||||
    expect(info["relatedVideos"][16]["author"].as_s).to eq("Nintendo")
 | 
			
		||||
    expect(info["relatedVideos"][16]["ucid"].as_s).to eq("UCGIY_O-8vW4rfX98KlMkvRg")
 | 
			
		||||
    expect(info["relatedVideos"][16]["view_count"].as_s).to eq("53510")
 | 
			
		||||
    expect(info["relatedVideos"][16]["short_view_count"].as_s).to eq("53K")
 | 
			
		||||
    expect(info["relatedVideos"][16]["author_verified"].as_s).to eq("true")
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  it "parses scheduled livestreams data (test 2)" do
 | 
			
		||||
    # Enable mock
 | 
			
		||||
    _player = load_mock("video/scheduled_live_PBD-Podcast.player")
 | 
			
		||||
    _next = load_mock("video/scheduled_live_PBD-Podcast.next")
 | 
			
		||||
 | 
			
		||||
    raw_data = _player.merge!(_next)
 | 
			
		||||
    info = parse_video_info("RG0cjYbXxME", raw_data)
 | 
			
		||||
 | 
			
		||||
    # Some basic verifications
 | 
			
		||||
    expect(typeof(info)).to eq(Hash(String, JSON::Any))
 | 
			
		||||
 | 
			
		||||
    expect(info["shortDescription"].as_s).to start_with(
 | 
			
		||||
      <<-TXT
 | 
			
		||||
      PBD Podcast Episode 171. In this episode, Patrick Bet-David is joined by Dr. Patrick Moore and Adam Sosnick.
 | 
			
		||||
 | 
			
		||||
      Join the channel to get exclusive access to perks: https://bit.ly/3Q9rSQL
 | 
			
		||||
      TXT
 | 
			
		||||
    )
 | 
			
		||||
    expect(info["descriptionHtml"].as_s).to start_with(
 | 
			
		||||
      <<-TXT
 | 
			
		||||
      PBD Podcast Episode 171. In this episode, Patrick Bet-David is joined by Dr. Patrick Moore and Adam Sosnick.
 | 
			
		||||
 | 
			
		||||
      Join the channel to get exclusive access to perks: <a href="https://bit.ly/3Q9rSQL">bit.ly/3Q9rSQL</a>
 | 
			
		||||
      TXT
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect(info["likes"].as_i).to eq(22)
 | 
			
		||||
 | 
			
		||||
    expect(info["genre"].as_s).to eq("Entertainment")
 | 
			
		||||
    expect(info["genreUrl"].raw).to be_nil
 | 
			
		||||
    expect(info["genreUcid"].as_s).to be_empty
 | 
			
		||||
    expect(info["license"].as_s).to be_empty
 | 
			
		||||
 | 
			
		||||
    expect(info["authorThumbnail"].as_s).to eq(
 | 
			
		||||
      "https://yt3.ggpht.com/61ArDiQshJrvSXcGLhpFfIO3hlMabe2fksitcf6oGob0Mdr5gztdkXxRljICUodL4iuTSrtxW4A=s48-c-k-c0x00ffffff-no-rj"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    expect(info["authorVerified"].as_bool).to be_false
 | 
			
		||||
    expect(info["subCountText"].as_s).to eq("227K")
 | 
			
		||||
 | 
			
		||||
    expect(info["relatedVideos"].as_a.size).to eq(20)
 | 
			
		||||
 | 
			
		||||
    # related video #1
 | 
			
		||||
    expect(info["relatedVideos"][2]["id"]).to eq("La9oLLoI5Rc")
 | 
			
		||||
    expect(info["relatedVideos"][2]["author"]).to eq("Tom Bilyeu")
 | 
			
		||||
    expect(info["relatedVideos"][2]["ucid"]).to eq("UCnYMOamNKLGVlJgRUbamveA")
 | 
			
		||||
    expect(info["relatedVideos"][2]["view_count"]).to eq("13329149")
 | 
			
		||||
    expect(info["relatedVideos"][2]["short_view_count"]).to eq("13M")
 | 
			
		||||
    expect(info["relatedVideos"][2]["author_verified"]).to eq("true")
 | 
			
		||||
 | 
			
		||||
    # Related video #2
 | 
			
		||||
    expect(info["relatedVideos"][9]["id"]).to eq("IQ_4fvpzYuA")
 | 
			
		||||
    expect(info["relatedVideos"][9]["author"]).to eq("Business Today")
 | 
			
		||||
    expect(info["relatedVideos"][9]["ucid"]).to eq("UCaPHWiExfUWaKsUtENLCv5w")
 | 
			
		||||
    expect(info["relatedVideos"][9]["view_count"]).to eq("26432")
 | 
			
		||||
    expect(info["relatedVideos"][9]["short_view_count"]).to eq("26K")
 | 
			
		||||
    expect(info["relatedVideos"][9]["author_verified"]).to eq("true")
 | 
			
		||||
  end
 | 
			
		||||
end
 | 
			
		||||
@@ -6,6 +6,7 @@ require "protodec/utils"
 | 
			
		||||
 | 
			
		||||
require "spectator"
 | 
			
		||||
 | 
			
		||||
require "../src/invidious/exceptions"
 | 
			
		||||
require "../src/invidious/helpers/macros"
 | 
			
		||||
require "../src/invidious/helpers/logger"
 | 
			
		||||
require "../src/invidious/helpers/utils"
 | 
			
		||||
 
 | 
			
		||||
@@ -1,3 +1,11 @@
 | 
			
		||||
# InfoExceptions are for displaying information to the user.
 | 
			
		||||
#
 | 
			
		||||
# An InfoException might or might not indicate that something went wrong.
 | 
			
		||||
# Historically Invidious didn't differentiate between these two options, so to
 | 
			
		||||
# maintain previous functionality InfoExceptions do not print backtraces.
 | 
			
		||||
class InfoException < Exception
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
# Exception used to hold the bogus UCID during a channel search.
 | 
			
		||||
class ChannelSearchException < InfoException
 | 
			
		||||
  getter channel : String
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,3 @@
 | 
			
		||||
# InfoExceptions are for displaying information to the user.
 | 
			
		||||
#
 | 
			
		||||
# An InfoException might or might not indicate that something went wrong.
 | 
			
		||||
# Historically Invidious didn't differentiate between these two options, so to
 | 
			
		||||
# maintain previous functionality InfoExceptions do not print backtraces.
 | 
			
		||||
class InfoException < Exception
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
# -------------------
 | 
			
		||||
#  Issue template
 | 
			
		||||
# -------------------
 | 
			
		||||
 
 | 
			
		||||
@@ -886,13 +886,13 @@ def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
def extract_video_info(video_id : String, proxy_region : String? = nil, context_screen : String? = nil)
 | 
			
		||||
  params = {} of String => JSON::Any
 | 
			
		||||
 | 
			
		||||
  # Init client config for the API
 | 
			
		||||
  client_config = YoutubeAPI::ClientConfig.new(proxy_region: proxy_region)
 | 
			
		||||
  if context_screen == "embed"
 | 
			
		||||
    client_config.client_type = YoutubeAPI::ClientType::TvHtml5ScreenEmbed
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  # Fetch data from the player endpoint
 | 
			
		||||
  player_response = YoutubeAPI.player(video_id: video_id, params: "", client_config: client_config)
 | 
			
		||||
 | 
			
		||||
  playability_status = player_response.dig?("playabilityStatus", "status").try &.as_s
 | 
			
		||||
@@ -903,26 +903,29 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
 | 
			
		||||
    reason ||= subreason.try &.[]("runs").as_a.map(&.[]("text")).join("")
 | 
			
		||||
    reason ||= player_response.dig("playabilityStatus", "reason").as_s
 | 
			
		||||
 | 
			
		||||
    params["reason"] = JSON::Any.new(reason)
 | 
			
		||||
 | 
			
		||||
    # Stop here if video is not a scheduled livestream
 | 
			
		||||
    if playability_status != "LIVE_STREAM_OFFLINE"
 | 
			
		||||
      return params
 | 
			
		||||
      return {
 | 
			
		||||
        "reason" => JSON::Any.new(reason),
 | 
			
		||||
      }
 | 
			
		||||
    end
 | 
			
		||||
  else
 | 
			
		||||
    reason = nil
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  params["shortDescription"] = player_response.dig?("videoDetails", "shortDescription") || JSON::Any.new(nil)
 | 
			
		||||
 | 
			
		||||
  # Don't fetch the next endpoint if the video is unavailable.
 | 
			
		||||
  if !params["reason"]?
 | 
			
		||||
  if {"OK", "LIVE_STREAM_OFFLINE"}.any?(playability_status)
 | 
			
		||||
    next_response = YoutubeAPI.next({"videoId": video_id, "params": ""})
 | 
			
		||||
    player_response = player_response.merge(next_response)
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  params = parse_video_info(video_id, player_response)
 | 
			
		||||
  params["reason"] = JSON::Any.new(reason) if reason
 | 
			
		||||
 | 
			
		||||
  # Fetch the video streams using an Android client in order to get the decrypted URLs and
 | 
			
		||||
  # maybe fix throttling issues (#2194).See for the explanation about the decrypted URLs:
 | 
			
		||||
  # https://github.com/TeamNewPipe/NewPipeExtractor/issues/562
 | 
			
		||||
  if !params["reason"]?
 | 
			
		||||
  if reason.nil?
 | 
			
		||||
    if context_screen == "embed"
 | 
			
		||||
      client_config.client_type = YoutubeAPI::ClientType::AndroidScreenEmbed
 | 
			
		||||
    else
 | 
			
		||||
@@ -940,10 +943,15 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  # TODO: clean that up
 | 
			
		||||
  {"captions", "microformat", "playabilityStatus", "storyboards", "videoDetails"}.each do |f|
 | 
			
		||||
    params[f] = player_response[f] if player_response[f]?
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  return params
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any)) : Hash(String, JSON::Any)
 | 
			
		||||
  # Top level elements
 | 
			
		||||
 | 
			
		||||
  main_results = player_response.dig?("contents", "twoColumnWatchNextResults")
 | 
			
		||||
@@ -997,8 +1005,6 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  params["relatedVideos"] = JSON::Any.new(related)
 | 
			
		||||
 | 
			
		||||
  # Likes
 | 
			
		||||
 | 
			
		||||
  toplevel_buttons = video_primary_renderer
 | 
			
		||||
@@ -1019,42 +1025,36 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  params["likes"] = JSON::Any.new(likes || 0_i64)
 | 
			
		||||
  params["dislikes"] = JSON::Any.new(0_i64)
 | 
			
		||||
 | 
			
		||||
  # Description
 | 
			
		||||
 | 
			
		||||
  short_description = player_response.dig?("videoDetails", "shortDescription")
 | 
			
		||||
 | 
			
		||||
  description_html = video_secondary_renderer.try &.dig?("description", "runs")
 | 
			
		||||
    .try &.as_a.try { |t| content_to_comment_html(t, video_id) }
 | 
			
		||||
 | 
			
		||||
  params["descriptionHtml"] = JSON::Any.new(description_html || "<p></p>")
 | 
			
		||||
 | 
			
		||||
  # Video metadata
 | 
			
		||||
 | 
			
		||||
  metadata = video_secondary_renderer
 | 
			
		||||
    .try &.dig?("metadataRowContainer", "metadataRowContainerRenderer", "rows")
 | 
			
		||||
      .try &.as_a
 | 
			
		||||
 | 
			
		||||
  params["genre"] = params["microformat"]?.try &.["playerMicroformatRenderer"]?.try &.["category"]? || JSON::Any.new("")
 | 
			
		||||
  params["genreUrl"] = JSON::Any.new(nil)
 | 
			
		||||
  genre = player_response.dig?("microformat", "playerMicroformatRenderer", "category")
 | 
			
		||||
  genre_ucid = nil
 | 
			
		||||
  license = nil
 | 
			
		||||
 | 
			
		||||
  metadata.try &.each do |row|
 | 
			
		||||
    title = row["metadataRowRenderer"]?.try &.["title"]?.try &.["simpleText"]?.try &.as_s
 | 
			
		||||
    metadata_title = row.dig?("metadataRowRenderer", "title", "simpleText").try &.as_s
 | 
			
		||||
    contents = row.dig?("metadataRowRenderer", "contents", 0)
 | 
			
		||||
 | 
			
		||||
    if title.try &.== "Category"
 | 
			
		||||
    if metadata_title == "Category"
 | 
			
		||||
      contents = contents.try &.dig?("runs", 0)
 | 
			
		||||
 | 
			
		||||
      params["genre"] = JSON::Any.new(contents.try &.["text"]?.try &.as_s || "")
 | 
			
		||||
      params["genreUcid"] = JSON::Any.new(contents.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]?
 | 
			
		||||
        .try &.["browseId"]?.try &.as_s || "")
 | 
			
		||||
    elsif title.try &.== "License"
 | 
			
		||||
      contents = contents.try &.["runs"]?
 | 
			
		||||
        .try &.as_a[0]?
 | 
			
		||||
 | 
			
		||||
      params["license"] = JSON::Any.new(contents.try &.["text"]?.try &.as_s || "")
 | 
			
		||||
    elsif title.try &.== "Licensed to YouTube by"
 | 
			
		||||
      params["license"] = JSON::Any.new(contents.try &.["simpleText"]?.try &.as_s || "")
 | 
			
		||||
      genre = contents.try &.["text"]?
 | 
			
		||||
      genre_ucid = contents.try &.dig?("navigationEndpoint", "browseEndpoint", "browseId")
 | 
			
		||||
    elsif metadata_title == "License"
 | 
			
		||||
      license = contents.try &.dig?("runs", 0, "text")
 | 
			
		||||
    elsif metadata_title == "Licensed to YouTube by"
 | 
			
		||||
      license = contents.try &.["simpleText"]?
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
@@ -1062,20 +1062,30 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
 | 
			
		||||
 | 
			
		||||
  if author_info = video_secondary_renderer.try &.dig?("owner", "videoOwnerRenderer")
 | 
			
		||||
    author_thumbnail = author_info.dig?("thumbnail", "thumbnails", 0, "url")
 | 
			
		||||
    params["authorThumbnail"] = JSON::Any.new(author_thumbnail.try &.as_s || "")
 | 
			
		||||
 | 
			
		||||
    author_verified = has_verified_badge?(author_info["badges"]?)
 | 
			
		||||
    params["authorVerified"] = JSON::Any.new(author_verified)
 | 
			
		||||
 | 
			
		||||
    subs_text = author_info["subscriberCountText"]?
 | 
			
		||||
      .try { |t| t["simpleText"]? || t.dig?("runs", 0, "text") }
 | 
			
		||||
      .try &.as_s.split(" ", 2)[0]
 | 
			
		||||
 | 
			
		||||
    params["subCountText"] = JSON::Any.new(subs_text || "-")
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  # Return data
 | 
			
		||||
 | 
			
		||||
  params = {
 | 
			
		||||
    "shortDescription" => JSON::Any.new(short_description.try &.as_s || nil),
 | 
			
		||||
    "relatedVideos"    => JSON::Any.new(related),
 | 
			
		||||
    "likes"            => JSON::Any.new(likes || 0_i64),
 | 
			
		||||
    "dislikes"         => JSON::Any.new(0_i64),
 | 
			
		||||
    "descriptionHtml"  => JSON::Any.new(description_html || "<p></p>"),
 | 
			
		||||
    "genre"            => JSON::Any.new(genre.try &.as_s || ""),
 | 
			
		||||
    "genreUrl"         => JSON::Any.new(nil),
 | 
			
		||||
    "genreUcid"        => JSON::Any.new(genre_ucid.try &.as_s || ""),
 | 
			
		||||
    "license"          => JSON::Any.new(license.try &.as_s || ""),
 | 
			
		||||
    "authorThumbnail"  => JSON::Any.new(author_thumbnail.try &.as_s || ""),
 | 
			
		||||
    "authorVerified"   => JSON::Any.new(author_verified),
 | 
			
		||||
    "subCountText"     => JSON::Any.new(subs_text || "-"),
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  return params
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user