2018-03-07 15:58:33 -08:00
|
|
|
macro add_mapping(mapping)
|
|
|
|
def initialize({{*mapping.keys.map { |id| "@#{id}".id }}})
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_a
|
|
|
|
return [{{*mapping.keys.map { |id| "@#{id}".id }}}]
|
|
|
|
end
|
|
|
|
|
|
|
|
DB.mapping({{mapping}})
|
|
|
|
end
|
|
|
|
|
|
|
|
macro templated(filename)
|
|
|
|
render "src/views/#{{{filename}}}.ecr", "src/views/layout.ecr"
|
|
|
|
end
|
|
|
|
|
2018-03-09 10:42:23 -08:00
|
|
|
class Config
|
|
|
|
YAML.mapping({
|
|
|
|
pool_size: Int32,
|
|
|
|
threads: Int32,
|
|
|
|
db: NamedTuple(
|
|
|
|
user: String,
|
|
|
|
password: String,
|
|
|
|
host: String,
|
|
|
|
port: Int32,
|
|
|
|
dbname: String,
|
|
|
|
),
|
2018-03-16 17:52:25 -07:00
|
|
|
dl_api_key: String?,
|
2018-03-09 10:42:23 -08:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
class Video
|
|
|
|
module HTTPParamConverter
|
|
|
|
def self.from_rs(rs)
|
|
|
|
HTTP::Params.parse(rs.read(String))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-07 15:58:33 -08:00
|
|
|
add_mapping({
|
2018-01-27 18:09:27 -08:00
|
|
|
id: String,
|
|
|
|
info: {
|
|
|
|
type: HTTP::Params,
|
|
|
|
default: HTTP::Params.parse(""),
|
|
|
|
converter: Video::HTTPParamConverter,
|
|
|
|
},
|
|
|
|
updated: Time,
|
|
|
|
title: String,
|
|
|
|
views: Int64,
|
|
|
|
likes: Int32,
|
|
|
|
dislikes: Int32,
|
|
|
|
wilson_score: Float64,
|
2018-02-02 19:44:10 -08:00
|
|
|
published: Time,
|
2018-02-26 16:58:45 -08:00
|
|
|
description: String,
|
2018-03-16 17:45:37 -07:00
|
|
|
language: String?,
|
2018-01-27 18:09:27 -08:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-03-24 20:38:35 -07:00
|
|
|
class InvidiousChannel
|
|
|
|
module XMLConverter
|
|
|
|
def self.from_rs(rs)
|
|
|
|
XML.parse_html(rs.read(String))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
add_mapping({
|
2018-03-28 20:29:54 -07:00
|
|
|
id: String,
|
2018-03-24 20:38:35 -07:00
|
|
|
author: String,
|
2018-03-28 20:29:54 -07:00
|
|
|
updated: Time,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class ChannelVideo
|
|
|
|
add_mapping({
|
|
|
|
id: String,
|
|
|
|
title: String,
|
|
|
|
published: Time,
|
|
|
|
updated: Time,
|
|
|
|
ucid: String,
|
|
|
|
author: String,
|
2018-03-24 20:38:35 -07:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-03-29 19:41:05 -07:00
|
|
|
class User
|
|
|
|
add_mapping({
|
|
|
|
id: String,
|
|
|
|
updated: Time,
|
|
|
|
notifications: Int32,
|
|
|
|
subscriptions: Array(String),
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-03-03 13:06:14 -08:00
|
|
|
class RedditSubmit
|
|
|
|
JSON.mapping({
|
|
|
|
data: RedditSubmitData,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class RedditSubmitData
|
|
|
|
JSON.mapping({
|
|
|
|
children: Array(RedditThread),
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class RedditThread
|
|
|
|
JSON.mapping({
|
|
|
|
data: RedditThreadData,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
class RedditThreadData
|
|
|
|
JSON.mapping({
|
|
|
|
subreddit: String,
|
|
|
|
id: String,
|
|
|
|
num_comments: Int32,
|
|
|
|
score: Int32,
|
|
|
|
author: String,
|
|
|
|
permalink: String,
|
|
|
|
title: String,
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2018-01-20 16:19:12 -08:00
|
|
|
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
|
|
|
|
def ci_lower_bound(pos, n)
|
|
|
|
if n == 0
|
2018-01-27 18:09:27 -08:00
|
|
|
return 0.0
|
2018-01-20 16:19:12 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
# z value here represents a confidence level of 0.95
|
|
|
|
z = 1.96
|
|
|
|
phat = 1.0*pos/n
|
|
|
|
|
|
|
|
return (phat + z*z/(2*n) - z * Math.sqrt((phat*(1 - phat) + z*z/(4*n))/n))/(1 + z*z/n)
|
|
|
|
end
|
|
|
|
|
|
|
|
def elapsed_text(elapsed)
|
|
|
|
millis = elapsed.total_milliseconds
|
|
|
|
return "#{millis.round(2)}ms" if millis >= 1
|
|
|
|
|
|
|
|
"#{(millis * 1000).round(2)}µs"
|
|
|
|
end
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
def get_client(pool)
|
|
|
|
while pool.empty?
|
2018-01-20 16:19:12 -08:00
|
|
|
sleep rand(0..10).milliseconds
|
|
|
|
end
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
return pool.shift
|
2018-01-20 16:19:12 -08:00
|
|
|
end
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
def fetch_video(id, client)
|
2018-03-04 06:54:19 -08:00
|
|
|
info = client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en").body
|
2018-03-09 08:55:14 -08:00
|
|
|
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}").body
|
2018-01-20 16:19:12 -08:00
|
|
|
|
|
|
|
html = XML.parse_html(html)
|
2018-01-27 18:09:27 -08:00
|
|
|
info = HTTP::Params.parse(info)
|
2018-01-20 16:19:12 -08:00
|
|
|
|
|
|
|
if info["reason"]?
|
2018-02-02 20:04:34 -08:00
|
|
|
info = client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en").body
|
|
|
|
info = HTTP::Params.parse(info)
|
|
|
|
if info["reason"]?
|
|
|
|
raise info["reason"]
|
|
|
|
end
|
2018-01-20 16:19:12 -08:00
|
|
|
end
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
title = info["title"]
|
|
|
|
|
|
|
|
views = info["view_count"].to_i64
|
2018-01-20 16:19:12 -08:00
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
likes = html.xpath_node(%q(//button[@title="I like this"]/span))
|
2018-03-14 16:06:21 -07:00
|
|
|
likes = likes.try &.content.delete(",").try &.to_i
|
|
|
|
likes ||= 0
|
2018-01-27 18:09:27 -08:00
|
|
|
|
|
|
|
dislikes = html.xpath_node(%q(//button[@title="I dislike this"]/span))
|
2018-03-14 16:06:21 -07:00
|
|
|
dislikes = dislikes.try &.content.delete(",").try &.to_i
|
|
|
|
dislikes ||= 0
|
2018-01-27 18:09:27 -08:00
|
|
|
|
2018-02-26 16:58:45 -08:00
|
|
|
description = html.xpath_node(%q(//p[@id="eow-description"]))
|
|
|
|
description = description ? description.to_xml : ""
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
wilson_score = ci_lower_bound(likes, likes + dislikes)
|
|
|
|
|
2018-02-04 17:42:13 -08:00
|
|
|
published = html.xpath_node(%q(//strong[contains(@class,"watch-time-text")]))
|
2018-02-02 19:44:10 -08:00
|
|
|
if published
|
|
|
|
published = published.content
|
2018-02-04 17:42:13 -08:00
|
|
|
else
|
|
|
|
raise "Could not find date published"
|
|
|
|
end
|
|
|
|
|
|
|
|
published = published.lchop("Published ")
|
|
|
|
published = published.lchop("Started streaming ")
|
2018-03-16 20:34:26 -07:00
|
|
|
published = published.lchop("Streamed live ")
|
|
|
|
published = published.lchop("Uploaded ")
|
2018-02-04 17:42:13 -08:00
|
|
|
published = published.lchop("on ")
|
|
|
|
published = published.lchop("Scheduled for ")
|
2018-02-05 15:57:03 -08:00
|
|
|
if !published.includes?("ago")
|
|
|
|
published = Time.parse(published, "%b %-d, %Y")
|
|
|
|
else
|
|
|
|
# Time matches format "20 hours ago", "40 minutes ago"...
|
2018-02-04 17:42:13 -08:00
|
|
|
delta = published.split(" ")[0].to_i
|
|
|
|
case published
|
|
|
|
when .includes? "minute"
|
|
|
|
published = Time.now - delta.minutes
|
|
|
|
when .includes? "hour"
|
|
|
|
published = Time.now - delta.hours
|
2018-02-05 15:57:03 -08:00
|
|
|
else
|
2018-02-04 17:42:13 -08:00
|
|
|
raise "Could not parse #{published}"
|
2018-02-05 15:57:03 -08:00
|
|
|
end
|
2018-02-02 19:44:10 -08:00
|
|
|
end
|
|
|
|
|
2018-03-16 17:45:37 -07:00
|
|
|
video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description, nil)
|
2018-01-20 16:19:12 -08:00
|
|
|
|
|
|
|
return video
|
|
|
|
end
|
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
def get_video(id, client, db, refresh = true)
|
|
|
|
if db.query_one?("SELECT EXISTS (SELECT true FROM videos WHERE id = $1)", id, as: Bool)
|
|
|
|
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
|
2018-01-20 16:19:12 -08:00
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
# If record was last updated over an hour ago, refresh (expire param in response lasts for 6 hours)
|
2018-03-29 17:21:44 -07:00
|
|
|
if refresh && Time.now - video.updated > 1.hour
|
2018-03-28 20:29:54 -07:00
|
|
|
begin
|
2018-03-31 07:51:44 -07:00
|
|
|
video = fetch_video(id, client)
|
2018-03-29 17:03:00 -07:00
|
|
|
video_array = video.to_a
|
|
|
|
args = arg_array(video_array[1..-1], 2)
|
2018-03-28 20:29:54 -07:00
|
|
|
|
2018-03-29 17:03:00 -07:00
|
|
|
db.exec("UPDATE videos SET (info,updated,title,views,likes,dislikes,wilson_score,published,description,language)\
|
|
|
|
= (#{args}) WHERE id = $1", video_array)
|
2018-03-28 20:29:54 -07:00
|
|
|
rescue ex
|
|
|
|
db.exec("DELETE FROM videos * WHERE id = $1", id)
|
2018-03-31 07:51:44 -07:00
|
|
|
end
|
2018-01-20 16:19:12 -08:00
|
|
|
end
|
|
|
|
else
|
2018-01-27 18:09:27 -08:00
|
|
|
video = fetch_video(id, client)
|
2018-03-04 06:54:19 -08:00
|
|
|
args = arg_array(video.to_a)
|
|
|
|
db.exec("INSERT INTO videos VALUES (#{args})", video.to_a)
|
2018-01-20 16:19:12 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
return video
|
|
|
|
end
|
2018-01-21 15:49:27 -08:00
|
|
|
|
2018-01-27 18:09:27 -08:00
|
|
|
def search(query, client)
|
2018-03-04 06:54:19 -08:00
|
|
|
html = client.get("https://www.youtube.com/results?q=#{query}&sp=EgIQAVAU").body
|
2018-01-21 15:49:27 -08:00
|
|
|
|
|
|
|
html = XML.parse_html(html)
|
|
|
|
|
|
|
|
html.xpath_nodes(%q(//ol[@class="item-section"]/li)).each do |item|
|
|
|
|
root = item.xpath_node(%q(div[contains(@class,"yt-lockup-video")]/div))
|
|
|
|
if root
|
|
|
|
link = root.xpath_node(%q(div[contains(@class,"yt-lockup-thumbnail")]/a/@href))
|
|
|
|
if link
|
|
|
|
yield link.content.split("=")[1]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-02-03 12:41:59 -08:00
|
|
|
|
2018-02-22 11:01:37 -08:00
|
|
|
def splice(a, b)
|
2018-02-03 12:41:59 -08:00
|
|
|
c = a[0]
|
2018-02-11 20:06:29 -08:00
|
|
|
a[0] = a[b % a.size]
|
|
|
|
a[b % a.size] = c
|
|
|
|
return a
|
|
|
|
end
|
|
|
|
|
2018-02-22 11:01:37 -08:00
|
|
|
def decrypt_signature(a)
|
2018-02-13 08:44:11 -08:00
|
|
|
a = a.split("")
|
2018-02-11 20:06:29 -08:00
|
|
|
|
2018-03-07 06:07:40 -08:00
|
|
|
a.reverse!
|
2018-03-22 09:02:15 -07:00
|
|
|
a.delete_at(0..2)
|
2018-03-07 06:07:40 -08:00
|
|
|
a.reverse!
|
2018-03-22 09:02:15 -07:00
|
|
|
a.delete_at(0..2)
|
2018-03-28 16:37:00 -07:00
|
|
|
a = splice(a, 38)
|
|
|
|
a.delete_at(0..0)
|
|
|
|
a = splice(a, 64)
|
|
|
|
a.reverse!
|
|
|
|
a.delete_at(0..1)
|
2018-03-06 20:03:45 -08:00
|
|
|
|
2018-02-13 08:44:11 -08:00
|
|
|
return a.join("")
|
|
|
|
end
|
|
|
|
|
2018-03-16 17:36:49 -07:00
|
|
|
def rank_videos(db, n, pool, filter)
|
2018-02-05 15:56:40 -08:00
|
|
|
top = [] of {Float64, String}
|
|
|
|
|
2018-03-16 21:57:31 -07:00
|
|
|
db.query("SELECT id, wilson_score, published FROM videos WHERE views > 5000 ORDER BY published DESC LIMIT 1000") do |rs|
|
2018-02-05 15:56:40 -08:00
|
|
|
rs.each do
|
|
|
|
id = rs.read(String)
|
|
|
|
wilson_score = rs.read(Float64)
|
|
|
|
published = rs.read(Time)
|
|
|
|
|
|
|
|
# Exponential decay, older videos tend to rank lower
|
2018-02-10 08:06:37 -08:00
|
|
|
temperature = wilson_score * Math.exp(-0.000005*((Time.now - published).total_minutes))
|
2018-02-05 15:56:40 -08:00
|
|
|
top << {temperature, id}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
top.sort!
|
|
|
|
|
|
|
|
# Make hottest come first
|
|
|
|
top.reverse!
|
|
|
|
top = top.map { |a, b| b }
|
|
|
|
|
2018-03-16 17:36:49 -07:00
|
|
|
if filter
|
|
|
|
language_list = [] of String
|
|
|
|
top.each do |id|
|
|
|
|
if language_list.size == n
|
|
|
|
break
|
|
|
|
else
|
|
|
|
client = get_client(pool)
|
2018-03-19 10:35:35 -07:00
|
|
|
begin
|
|
|
|
video = get_video(id, client, db)
|
|
|
|
rescue ex
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2018-03-16 17:36:49 -07:00
|
|
|
pool << client
|
|
|
|
|
2018-03-16 17:45:37 -07:00
|
|
|
if video.language
|
|
|
|
language = video.language
|
|
|
|
else
|
|
|
|
description = XML.parse(video.description)
|
|
|
|
content = [video.title, description.content].join(" ")
|
2018-03-19 10:35:35 -07:00
|
|
|
content = content[0, 10000]
|
2018-03-16 17:36:49 -07:00
|
|
|
|
2018-03-16 17:45:37 -07:00
|
|
|
results = DetectLanguage.detect(content)
|
|
|
|
language = results[0].language
|
2018-03-16 17:36:49 -07:00
|
|
|
|
2018-03-16 17:45:37 -07:00
|
|
|
db.exec("UPDATE videos SET language = $1 WHERE id = $2", language, id)
|
|
|
|
end
|
|
|
|
|
|
|
|
if language == "en"
|
2018-03-16 17:36:49 -07:00
|
|
|
language_list << id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return language_list
|
|
|
|
else
|
|
|
|
return top[0..n - 1]
|
|
|
|
end
|
2018-02-05 15:56:40 -08:00
|
|
|
end
|
2018-02-05 17:07:49 -08:00
|
|
|
|
2018-03-04 20:25:03 -08:00
|
|
|
def make_client(url)
|
|
|
|
context = OpenSSL::SSL::Context::Client.new
|
|
|
|
context.add_options(
|
|
|
|
OpenSSL::SSL::Options::ALL |
|
|
|
|
OpenSSL::SSL::Options::NO_SSL_V2 |
|
|
|
|
OpenSSL::SSL::Options::NO_SSL_V3
|
|
|
|
)
|
2018-02-26 16:59:02 -08:00
|
|
|
client = HTTP::Client.new(url, context)
|
2018-03-04 08:59:03 -08:00
|
|
|
client.read_timeout = 10.seconds
|
|
|
|
client.connect_timeout = 10.seconds
|
2018-02-05 17:07:49 -08:00
|
|
|
return client
|
|
|
|
end
|
2018-03-03 13:06:14 -08:00
|
|
|
|
2018-03-04 08:59:03 -08:00
|
|
|
def get_reddit_comments(id, client, headers)
|
2018-03-04 07:31:26 -08:00
|
|
|
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:youtube.com%20OR%20site:youtu.be)"
|
2018-03-04 08:59:03 -08:00
|
|
|
search_results = client.get("/search.json?q=#{query}", headers)
|
2018-03-09 08:55:14 -08:00
|
|
|
|
2018-03-04 08:59:03 -08:00
|
|
|
if search_results.status_code == 200
|
2018-03-03 17:10:25 -08:00
|
|
|
search_results = RedditSubmit.from_json(search_results.body)
|
2018-03-09 08:55:14 -08:00
|
|
|
|
2018-03-03 17:10:25 -08:00
|
|
|
thread = search_results.data.children.sort_by { |child| child.data.score }[-1]
|
2018-03-09 08:47:50 -08:00
|
|
|
result = client.get("/r/#{thread.data.subreddit}/comments/#{thread.data.id}?limit=100&sort=top", headers).body
|
2018-03-03 17:10:25 -08:00
|
|
|
result = JSON.parse(result)
|
2018-03-04 08:59:03 -08:00
|
|
|
elsif search_results.status_code == 302
|
|
|
|
search_results = client.get(search_results.headers["Location"], headers).body
|
|
|
|
|
|
|
|
result = JSON.parse(search_results)
|
|
|
|
thread = RedditThread.from_json(result[0]["data"]["children"][0].to_json)
|
|
|
|
else
|
|
|
|
raise "Got error code #{search_results.status_code}"
|
2018-03-03 17:10:25 -08:00
|
|
|
end
|
|
|
|
|
2018-03-04 08:59:03 -08:00
|
|
|
comments = result[1]["data"]["children"]
|
2018-03-03 17:10:25 -08:00
|
|
|
return comments, thread
|
2018-03-03 13:06:14 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def template_comments(root)
|
|
|
|
html = ""
|
|
|
|
root.each do |child|
|
|
|
|
if child["data"]["body_html"]?
|
|
|
|
author = child["data"]["author"]
|
|
|
|
score = child["data"]["score"]
|
|
|
|
body_html = HTML.unescape(child["data"]["body_html"].as_s)
|
|
|
|
|
2018-03-06 20:00:35 -08:00
|
|
|
# Replace local links wtih links back to Reddit
|
|
|
|
body_html = fill_links(body_html, "https", "www.reddit.com")
|
|
|
|
|
2018-03-03 13:06:14 -08:00
|
|
|
replies_html = ""
|
|
|
|
if child["data"]["replies"] != ""
|
|
|
|
replies_html = template_comments(child["data"]["replies"]["data"]["children"])
|
|
|
|
end
|
|
|
|
|
|
|
|
content = <<-END_HTML
|
|
|
|
<p>
|
2018-03-06 20:03:45 -08:00
|
|
|
<a href="javascript:void(0)" onclick="toggle(this)">[ - ]</a> #{score} <b>#{author}</b>
|
2018-03-03 13:06:14 -08:00
|
|
|
</p>
|
2018-03-03 20:36:18 -08:00
|
|
|
<div>
|
|
|
|
#{body_html}
|
2018-03-03 13:06:14 -08:00
|
|
|
#{replies_html}
|
2018-03-03 20:36:18 -08:00
|
|
|
</div>
|
2018-03-03 13:06:14 -08:00
|
|
|
END_HTML
|
|
|
|
|
|
|
|
if child["data"]["depth"].as_i > 0
|
|
|
|
html += <<-END_HTML
|
|
|
|
<div class="pure-g">
|
|
|
|
<div class="pure-u-1-24"></div>
|
|
|
|
<div class="pure-u-23-24">
|
|
|
|
#{content}
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
END_HTML
|
|
|
|
else
|
|
|
|
html += <<-END_HTML
|
|
|
|
<div class="pure-g">
|
|
|
|
<div class="pure-u-1">
|
|
|
|
#{content}
|
|
|
|
</div>
|
|
|
|
</div>
|
|
|
|
END_HTML
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
return html
|
|
|
|
end
|
2018-03-03 13:10:56 -08:00
|
|
|
|
|
|
|
def number_with_separator(number)
|
|
|
|
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
|
|
|
|
end
|
2018-03-04 06:54:19 -08:00
|
|
|
|
2018-03-29 17:03:00 -07:00
|
|
|
def arg_array(array, start = 1)
|
2018-03-04 06:54:19 -08:00
|
|
|
args = [] of String
|
2018-03-29 17:03:00 -07:00
|
|
|
(start..array.size + start - 1).each { |i| args << "($#{i})" }
|
2018-03-24 18:57:57 -07:00
|
|
|
args = args.join(",")
|
2018-03-04 06:54:19 -08:00
|
|
|
|
|
|
|
return args
|
|
|
|
end
|
2018-03-06 20:00:35 -08:00
|
|
|
|
|
|
|
def add_alt_links(html)
|
|
|
|
alt_links = [] of {Int32, String}
|
|
|
|
|
|
|
|
# This is painful but is likely the only way to accomplish this in Crystal,
|
|
|
|
# as Crystigiri and others are not able to insert XML Nodes into a document.
|
|
|
|
# The goal here is to use as little regex as possible
|
|
|
|
html.scan(/<a[^>]*>([^<]+)<\/a>/) do |match|
|
|
|
|
anchor = XML.parse_html(match[0])
|
|
|
|
anchor = anchor.xpath_node("//a").not_nil!
|
|
|
|
url = URI.parse(HTML.unescape(anchor["href"]))
|
|
|
|
|
2018-03-09 12:06:35 -08:00
|
|
|
if ["www.youtube.com", "m.youtube.com"].includes?(url.host) && url.path == "/watch"
|
2018-03-06 20:00:35 -08:00
|
|
|
alt_link = <<-END_HTML
|
2018-03-06 20:03:45 -08:00
|
|
|
<a href="#{url.full_path}">
|
2018-03-06 20:00:35 -08:00
|
|
|
<i class="fa fa-link" aria-hidden="true"></i>
|
|
|
|
</a>
|
|
|
|
END_HTML
|
2018-03-09 12:06:35 -08:00
|
|
|
elsif url.host == "youtu.be"
|
|
|
|
alt_link = <<-END_HTML
|
|
|
|
<a href="/watch?v=#{url.full_path.lchop("/")}">
|
|
|
|
<i class="fa fa-link" aria-hidden="true"></i>
|
|
|
|
</a>
|
|
|
|
END_HTML
|
|
|
|
else
|
|
|
|
alt_link = ""
|
2018-03-06 20:00:35 -08:00
|
|
|
end
|
2018-03-09 12:06:35 -08:00
|
|
|
|
|
|
|
alt_links << {match.end.not_nil!, alt_link}
|
2018-03-06 20:00:35 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
alt_links.reverse!
|
|
|
|
alt_links.each do |position, alt_link|
|
|
|
|
html = html.insert(position, alt_link)
|
|
|
|
end
|
|
|
|
|
|
|
|
return html
|
|
|
|
end
|
|
|
|
|
|
|
|
def fill_links(html, scheme, host)
|
|
|
|
html = XML.parse_html(html)
|
|
|
|
|
|
|
|
html.xpath_nodes("//a").each do |match|
|
|
|
|
url = URI.parse(match["href"])
|
2018-03-25 20:21:24 -07:00
|
|
|
# Reddit links don't have host
|
|
|
|
if !url.host
|
2018-03-06 20:00:35 -08:00
|
|
|
url.scheme = scheme
|
|
|
|
url.host = host
|
|
|
|
match["href"] = url
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
html = html.to_xml
|
|
|
|
end
|
2018-03-16 09:40:29 -07:00
|
|
|
|
|
|
|
def login_req(login_form, f_req)
|
|
|
|
data = {
|
|
|
|
"pstMsg" => "1",
|
|
|
|
"checkConnection" => "youtube",
|
|
|
|
"checkedDomains" => "youtube",
|
|
|
|
"hl" => "en",
|
|
|
|
"deviceinfo" => %q([null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]),
|
|
|
|
"f.req" => f_req,
|
|
|
|
"flowName" => "GlifWebSignIn",
|
|
|
|
"flowEntry" => "ServiceLogin",
|
|
|
|
}
|
|
|
|
|
|
|
|
data = data.merge(login_form)
|
|
|
|
|
|
|
|
return HTTP::Params.encode(data)
|
|
|
|
end
|
2018-03-24 20:38:35 -07:00
|
|
|
|
|
|
|
def get_channel(id, client, db)
|
|
|
|
if db.query_one?("SELECT EXISTS (SELECT true FROM channels WHERE id = $1)", id, as: Bool)
|
|
|
|
channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
|
|
|
|
|
2018-03-25 20:18:29 -07:00
|
|
|
if Time.now - channel.updated > 1.minutes
|
2018-03-28 20:29:54 -07:00
|
|
|
channel = fetch_channel(id, client, db)
|
2018-03-28 21:05:20 -07:00
|
|
|
channel_array = channel.to_a
|
2018-03-25 20:18:29 -07:00
|
|
|
args = arg_array(channel_array)
|
|
|
|
|
2018-03-28 21:05:20 -07:00
|
|
|
db.exec("INSERT INTO channels VALUES (#{args}) \
|
2018-03-29 16:50:24 -07:00
|
|
|
ON CONFLICT (id) DO UPDATE SET updated = $3", channel_array)
|
2018-03-24 20:38:35 -07:00
|
|
|
end
|
|
|
|
else
|
2018-03-28 20:29:54 -07:00
|
|
|
channel = fetch_channel(id, client, db)
|
2018-03-24 20:38:35 -07:00
|
|
|
args = arg_array(channel.to_a)
|
|
|
|
db.exec("INSERT INTO channels VALUES (#{args})", channel.to_a)
|
|
|
|
end
|
|
|
|
|
|
|
|
return channel
|
|
|
|
end
|
|
|
|
|
2018-03-28 20:29:54 -07:00
|
|
|
def fetch_channel(id, client, db)
|
2018-03-24 20:38:35 -07:00
|
|
|
rss = client.get("/feeds/videos.xml?channel_id=#{id}").body
|
|
|
|
rss = XML.parse_html(rss)
|
|
|
|
|
2018-03-28 20:29:54 -07:00
|
|
|
rss.xpath_nodes("//feed/entry").each do |entry|
|
|
|
|
video_id = entry.xpath_node("videoid").not_nil!.content
|
|
|
|
title = entry.xpath_node("title").not_nil!.content
|
|
|
|
published = Time.parse(entry.xpath_node("published").not_nil!.content, "%FT%X%z")
|
|
|
|
updated = Time.parse(entry.xpath_node("updated").not_nil!.content, "%FT%X%z")
|
|
|
|
author = entry.xpath_node("author/name").not_nil!.content
|
|
|
|
ucid = entry.xpath_node("channelid").not_nil!.content
|
|
|
|
|
|
|
|
video = ChannelVideo.new(video_id, title, published, updated, ucid, author)
|
|
|
|
|
2018-03-31 07:51:44 -07:00
|
|
|
video_array = video.to_a
|
2018-03-28 20:29:54 -07:00
|
|
|
args = arg_array(video_array)
|
|
|
|
|
|
|
|
# TODO: Update record on conflict
|
2018-03-31 07:51:44 -07:00
|
|
|
db.exec("INSERT INTO channel_videos VALUES (#{args})\
|
|
|
|
ON CONFLICT (id) DO NOTHING", video_array)
|
2018-03-28 20:29:54 -07:00
|
|
|
end
|
|
|
|
|
2018-03-24 20:38:35 -07:00
|
|
|
author = rss.xpath_node("//feed/author/name").not_nil!.content
|
|
|
|
|
2018-03-28 20:29:54 -07:00
|
|
|
channel = InvidiousChannel.new(id, author, Time.now)
|
2018-03-24 20:38:35 -07:00
|
|
|
|
|
|
|
return channel
|
|
|
|
end
|
2018-03-29 19:41:05 -07:00
|
|
|
|
|
|
|
def get_user(sid, client, headers, db)
|
|
|
|
if db.query_one?("SELECT EXISTS (SELECT true FROM users WHERE id = $1)", sid, as: Bool)
|
|
|
|
user = db.query_one("SELECT * FROM users WHERE id = $1", sid, as: User)
|
|
|
|
|
|
|
|
if Time.now - user.updated > 1.minutes
|
|
|
|
user = fetch_user(sid, client, headers)
|
|
|
|
user_array = user.to_a
|
|
|
|
args = arg_array(user_array)
|
2018-03-31 07:51:44 -07:00
|
|
|
|
2018-03-29 19:41:05 -07:00
|
|
|
db.exec("INSERT INTO users VALUES (#{args}) \
|
|
|
|
ON CONFLICT (id) DO UPDATE SET updated = $2, subscriptions = $4", user_array)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
user = fetch_user(sid, client, headers)
|
|
|
|
args = arg_array(user.to_a)
|
|
|
|
db.exec("INSERT INTO users VALUES (#{args})", user.to_a)
|
|
|
|
end
|
|
|
|
|
|
|
|
return user
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_user(sid, client, headers)
|
|
|
|
feed = client.get("/subscription_manager?action_takeout=1", headers).body
|
|
|
|
|
|
|
|
channels = [] of String
|
|
|
|
feed = XML.parse_html(feed)
|
|
|
|
feed.xpath_nodes("//opml/outline/outline").each do |channel|
|
|
|
|
id = channel["xmlurl"][-24..-1]
|
|
|
|
get_channel(id, client, PG_DB)
|
|
|
|
|
|
|
|
channels << id
|
|
|
|
end
|
|
|
|
|
|
|
|
user = User.new(sid, Time.now, 0, channels)
|
|
|
|
return user
|
|
|
|
end
|