Add support for Crystal 0.27.0

This commit is contained in:
Omar Roth 2018-11-04 09:37:12 -06:00
parent c912e63fb5
commit 4f856dd898
8 changed files with 29 additions and 29 deletions

View File

@ -13,6 +13,7 @@ dependencies:
github: detectlanguage/detectlanguage-crystal github: detectlanguage/detectlanguage-crystal
kemal: kemal:
github: kemalcr/kemal github: kemalcr/kemal
commit: b389022
pg: pg:
github: will/crystal-pg github: will/crystal-pg

View File

@ -2096,7 +2096,7 @@ get "/api/v1/videos/:id" do |env|
json.field "description", description json.field "description", description
json.field "descriptionHtml", video.description json.field "descriptionHtml", video.description
json.field "published", video.published.epoch json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "keywords", video.keywords json.field "keywords", video.keywords
@ -2290,7 +2290,7 @@ get "/api/v1/trending" do |env|
json.field "authorId", video.ucid json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}" json.field "authorUrl", "/channel/#{video.ucid}"
json.field "published", video.published.epoch json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "description", video.description json.field "description", video.description
json.field "descriptionHtml", video.description_html json.field "descriptionHtml", video.description_html
@ -2320,7 +2320,7 @@ get "/api/v1/top" do |env|
json.field "author", video.author json.field "author", video.author
json.field "authorId", video.ucid json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}" json.field "authorUrl", "/channel/#{video.ucid}"
json.field "published", video.published.epoch json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
description = video.description.gsub("<br>", "\n") description = video.description.gsub("<br>", "\n")
@ -2370,7 +2370,7 @@ get "/api/v1/channels/:ucid" do |env|
total_views = 0_i64 total_views = 0_i64
sub_count = 0_i64 sub_count = 0_i64
joined = Time.epoch(0) joined = Time.unix(0)
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"])) metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item| metadata.each do |item|
case item.content case item.content
@ -2426,7 +2426,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "subCount", sub_count json.field "subCount", sub_count
json.field "totalViews", total_views json.field "totalViews", total_views
json.field "joined", joined.epoch json.field "joined", joined.to_unix
json.field "paid", paid json.field "paid", paid
json.field "isFamilyFriendly", is_family_friendly json.field "isFamilyFriendly", is_family_friendly
@ -2460,7 +2460,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "descriptionHtml", video.description_html json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views json.field "viewCount", video.views
json.field "published", video.published.epoch json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid json.field "paid", video.paid
@ -2517,7 +2517,7 @@ end
json.field "descriptionHtml", video.description_html json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views json.field "viewCount", video.views
json.field "published", video.published.epoch json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid json.field "paid", video.paid
@ -2565,7 +2565,7 @@ get "/api/v1/channels/search/:ucid" do |env|
json.field "descriptionHtml", item.description_html json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views json.field "viewCount", item.views
json.field "published", item.published.epoch json.field "published", item.published.to_unix
json.field "publishedText", "#{recode_date(item.published)} ago" json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now json.field "liveNow", item.live_now
@ -2688,7 +2688,7 @@ get "/api/v1/search" do |env|
json.field "descriptionHtml", item.description_html json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views json.field "viewCount", item.views
json.field "published", item.published.epoch json.field "published", item.published.to_unix
json.field "publishedText", "#{recode_date(item.published)} ago" json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now json.field "liveNow", item.live_now
@ -2809,7 +2809,7 @@ get "/api/v1/playlists/:plid" do |env|
json.field "videoCount", playlist.video_count json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views json.field "viewCount", playlist.views
json.field "updated", playlist.updated.epoch json.field "updated", playlist.updated.to_unix
json.field "videos" do json.field "videos" do
json.array do json.array do

View File

@ -165,14 +165,14 @@ end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil) def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated if auto_generated
seed = Time.epoch(1525757349) seed = Time.unix(1525757349)
until seed >= Time.now until seed >= Time.now
seed += 1.month seed += 1.month
end end
timestamp = seed - (page - 1).months timestamp = seed - (page - 1).months
page = "#{timestamp.epoch}" page = "#{timestamp.to_unix}"
switch = "\x36" switch = "\x36"
else else
page = "#{page}" page = "#{page}"

View File

@ -8,11 +8,11 @@ end
class RedditComment class RedditComment
module TimeConverter module TimeConverter
def self.from_json(value : JSON::PullParser) : Time def self.from_json(value : JSON::PullParser) : Time
Time.epoch(value.read_float.to_i) Time.unix(value.read_float.to_i)
end end
def self.to_json(value : Time, json : JSON::Builder) def self.to_json(value : Time, json : JSON::Builder)
json.number(value.epoch) json.number(value.to_unix)
end end
end end
@ -58,7 +58,7 @@ end
def fetch_youtube_comments(id, continuation, proxies, format) def fetch_youtube_comments(id, continuation, proxies, format)
client = make_client(YT_URL) client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1") html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
headers = HTTP::Headers.new headers = HTTP::Headers.new
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"] headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
body = html.body body = html.body
@ -83,7 +83,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port]) proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
proxy_client.set_proxy(proxy) proxy_client.set_proxy(proxy)
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1") response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
proxy_headers = HTTP::Headers.new proxy_headers = HTTP::Headers.new
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"] proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
proxy_html = response.body proxy_html = response.body
@ -140,8 +140,8 @@ def fetch_youtube_comments(id, continuation, proxies, format)
headers["content-type"] = "application/x-www-form-urlencoded" headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ==" headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1" headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1" headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-youtube-client-name"] = "1" headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719" headers["x-youtube-client-version"] = "2.20180719"
@ -229,7 +229,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
json.field "content", content json.field "content", content
json.field "contentHtml", content_html json.field "contentHtml", content_html
json.field "published", published.epoch json.field "published", published.to_unix
json.field "publishedText", "#{recode_date(published)} ago" json.field "publishedText", "#{recode_date(published)} ago"
json.field "likeCount", node_comment["likeCount"] json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"] json.field "commentId", node_comment["commentId"]
@ -327,7 +327,7 @@ def template_youtube_comments(comments)
<a href="#{child["authorUrl"]}">#{child["author"]}</a> <a href="#{child["authorUrl"]}">#{child["author"]}</a>
</b> </b>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p> <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{recode_date(Time.epoch(child["published"].as_i64))} ago #{recode_date(Time.unix(child["published"].as_i64))} ago
| |
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])} <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
</p> </p>

View File

@ -329,7 +329,7 @@ def extract_items(nodeset, ucid = nil)
rescue ex rescue ex
end end
begin begin
published ||= Time.epoch(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64) published ||= Time.unix(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex rescue ex
end end
published ||= Time.now published ||= Time.now

View File

@ -26,7 +26,7 @@ def fetch_mix(rdid, video_id, cookies = nil)
if cookies if cookies
headers = cookies.add_request_headers(headers) headers = cookies.add_request_headers(headers)
end end
response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en", headers) response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/) yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data if yt_data

View File

@ -30,7 +30,7 @@ def fetch_playlist_videos(plid, page, video_count, continuation = nil)
client = make_client(YT_URL) client = make_client(YT_URL)
if continuation if continuation
html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1") html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
html = XML.parse_html(html.body) html = XML.parse_html(html.body)
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i? index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
@ -167,11 +167,10 @@ def fetch_playlist(plid)
raise "Invalid playlist." raise "Invalid playlist."
end end
body = response.body.gsub(<<-END_BUTTON body = response.body.gsub(%(
<button class="yt-uix-button yt-uix-button-size-default yt-uix-button-link yt-uix-expander-head playlist-description-expander yt-uix-inlineedit-ignore-edit" type="button" onclick=";return false;"><span class="yt-uix-button-content"> less <img alt="" src="/yts/img/pixel-vfl3z5WfW.gif"> <button class="yt-uix-button yt-uix-button-size-default yt-uix-button-link yt-uix-expander-head playlist-description-expander yt-uix-inlineedit-ignore-edit" type="button" onclick=";return false;"><span class="yt-uix-button-content"> less <img alt="" src="/yts/img/pixel-vfl3z5WfW.gif">
</span></button> </span></button>
END_BUTTON ), "")
, "")
document = XML.parse_html(body) document = XML.parse_html(body)
title = document.xpath_node(%q(//h1[@class="pl-header-title"])) title = document.xpath_node(%q(//h1[@class="pl-header-title"]))

View File

@ -319,7 +319,7 @@ class Video
clen = url.match(/clen\/(?<clen>\d+)/).try &.["clen"] clen = url.match(/clen\/(?<clen>\d+)/).try &.["clen"]
clen ||= "0" clen ||= "0"
lmt = url.match(/lmt\/(?<lmt>\d+)/).try &.["lmt"] lmt = url.match(/lmt\/(?<lmt>\d+)/).try &.["lmt"]
lmt ||= "#{((Time.now + 1.hour).epoch_f.to_f64 * 1000000).to_i64}" lmt ||= "#{((Time.now + 1.hour).to_unix_f.to_f64 * 1000000).to_i64}"
segment_list = representation.xpath_node(%q(.//segmentlist)).not_nil! segment_list = representation.xpath_node(%q(.//segmentlist)).not_nil!
init = segment_list.xpath_node(%q(.//initialization)) init = segment_list.xpath_node(%q(.//initialization))
@ -546,7 +546,7 @@ def fetch_video(id, proxies)
spawn do spawn do
client = make_client(YT_URL) client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1") html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/) if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
next html_channel.send(md["id"]) next html_channel.send(md["id"])
@ -620,7 +620,7 @@ def fetch_video(id, proxies)
client.connect_timeout = 10.seconds client.connect_timeout = 10.seconds
client.set_proxy(proxy) client.set_proxy(proxy)
html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1").body) html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1").body)
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body) info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if info["reason"]? if info["reason"]?