Merge pull request #1600 from jksladjflkjsadflkjsadf/closeclients
Close http clients after use
This commit is contained in:
commit
82c8f3b556
@ -2133,14 +2133,13 @@ get "/api/v1/annotations/:id" do |env|
|
|||||||
|
|
||||||
file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml")
|
file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml")
|
||||||
|
|
||||||
client = make_client(ARCHIVE_URL)
|
location = make_client(ARCHIVE_URL, &.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}"))
|
||||||
location = client.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}")
|
|
||||||
|
|
||||||
if !location.headers["Location"]?
|
if !location.headers["Location"]?
|
||||||
env.response.status_code = location.status_code
|
env.response.status_code = location.status_code
|
||||||
end
|
end
|
||||||
|
|
||||||
response = make_client(URI.parse(location.headers["Location"])).get(location.headers["Location"])
|
response = make_client(URI.parse(location.headers["Location"]), &.get(location.headers["Location"]))
|
||||||
|
|
||||||
if response.body.empty?
|
if response.body.empty?
|
||||||
env.response.status_code = 404
|
env.response.status_code = 404
|
||||||
@ -3498,8 +3497,12 @@ get "/videoplayback" do |env|
|
|||||||
location = URI.parse(response.headers["Location"])
|
location = URI.parse(response.headers["Location"])
|
||||||
env.response.headers["Access-Control-Allow-Origin"] = "*"
|
env.response.headers["Access-Control-Allow-Origin"] = "*"
|
||||||
|
|
||||||
host = "#{location.scheme}://#{location.host}"
|
new_host = "#{location.scheme}://#{location.host}"
|
||||||
client = make_client(URI.parse(host), region)
|
if new_host != host
|
||||||
|
host = new_host
|
||||||
|
client.close
|
||||||
|
client = make_client(URI.parse(new_host), region)
|
||||||
|
end
|
||||||
|
|
||||||
url = "#{location.full_path}&host=#{location.host}#{region ? "®ion=#{region}" : ""}"
|
url = "#{location.full_path}&host=#{location.host}#{region ? "®ion=#{region}" : ""}"
|
||||||
else
|
else
|
||||||
@ -3530,7 +3533,6 @@ get "/videoplayback" do |env|
|
|||||||
end
|
end
|
||||||
|
|
||||||
begin
|
begin
|
||||||
client = make_client(URI.parse(host), region)
|
|
||||||
client.get(url, headers) do |response|
|
client.get(url, headers) do |response|
|
||||||
response.headers.each do |key, value|
|
response.headers.each do |key, value|
|
||||||
if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
|
if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
|
||||||
@ -3571,8 +3573,6 @@ get "/videoplayback" do |env|
|
|||||||
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
|
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
|
||||||
end
|
end
|
||||||
|
|
||||||
client = make_client(URI.parse(host), region)
|
|
||||||
|
|
||||||
# TODO: Record bytes written so we can restart after a chunk fails
|
# TODO: Record bytes written so we can restart after a chunk fails
|
||||||
while true
|
while true
|
||||||
if !range_end && content_length
|
if !range_end && content_length
|
||||||
@ -3636,6 +3636,7 @@ get "/videoplayback" do |env|
|
|||||||
if ex.message != "Error reading socket: Connection reset by peer"
|
if ex.message != "Error reading socket: Connection reset by peer"
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
|
client.close
|
||||||
client = make_client(URI.parse(host), region)
|
client = make_client(URI.parse(host), region)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -3645,6 +3646,7 @@ get "/videoplayback" do |env|
|
|||||||
first_chunk = false
|
first_chunk = false
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
client.close
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/ggpht/*" do |env|
|
get "/ggpht/*" do |env|
|
||||||
|
@ -269,6 +269,8 @@ def fetch_reddit_comments(id, sort_by = "confidence")
|
|||||||
raise InfoException.new("Could not fetch comments")
|
raise InfoException.new("Could not fetch comments")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
client.close
|
||||||
|
|
||||||
comments = result[1].data.as(RedditListing).children
|
comments = result[1].data.as(RedditListing).children
|
||||||
return comments, thread
|
return comments, thread
|
||||||
end
|
end
|
||||||
|
@ -108,7 +108,9 @@ def filter_proxies(proxies)
|
|||||||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||||
client.set_proxy(proxy)
|
client.set_proxy(proxy)
|
||||||
|
|
||||||
client.head("/").status_code == 200
|
status_ok = client.head("/").status_code == 200
|
||||||
|
client.close
|
||||||
|
status_ok
|
||||||
rescue ex
|
rescue ex
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
@ -132,6 +134,7 @@ def get_nova_proxies(country_code = "US")
|
|||||||
headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
|
headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
|
||||||
|
|
||||||
response = client.get("/proxy-server-list/country-#{country_code}/", headers)
|
response = client.get("/proxy-server-list/country-#{country_code}/", headers)
|
||||||
|
client.close
|
||||||
document = XML.parse_html(response.body)
|
document = XML.parse_html(response.body)
|
||||||
|
|
||||||
proxies = [] of {ip: String, port: Int32, score: Float64}
|
proxies = [] of {ip: String, port: Int32, score: Float64}
|
||||||
@ -177,6 +180,7 @@ def get_spys_proxies(country_code = "US")
|
|||||||
}
|
}
|
||||||
|
|
||||||
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
|
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
|
||||||
|
client.close
|
||||||
20.times do
|
20.times do
|
||||||
if response.status_code == 200
|
if response.status_code == 200
|
||||||
break
|
break
|
||||||
|
@ -101,6 +101,15 @@ def make_client(url : URI, region = nil)
|
|||||||
return client
|
return client
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def make_client(url : URI, region = nil, &block)
|
||||||
|
client = make_client(url, region)
|
||||||
|
begin
|
||||||
|
yield client
|
||||||
|
ensure
|
||||||
|
client.close
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def decode_length_seconds(string)
|
def decode_length_seconds(string)
|
||||||
length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
|
length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
|
||||||
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
||||||
@ -361,7 +370,7 @@ def subscribe_pubsub(topic, key, config)
|
|||||||
"hub.secret" => key.to_s,
|
"hub.secret" => key.to_s,
|
||||||
}
|
}
|
||||||
|
|
||||||
return make_client(PUBSUB_URL).post("/subscribe", form: body)
|
return make_client(PUBSUB_URL, &.post("/subscribe", form: body))
|
||||||
end
|
end
|
||||||
|
|
||||||
def parse_range(range)
|
def parse_range(range)
|
||||||
|
@ -91,6 +91,8 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
|
|||||||
},
|
},
|
||||||
}.to_json).body)
|
}.to_json).body)
|
||||||
|
|
||||||
|
captcha_client.close
|
||||||
|
|
||||||
raise response["error"].as_s if response["error"]?
|
raise response["error"].as_s if response["error"]?
|
||||||
task_id = response["taskId"].as_i
|
task_id = response["taskId"].as_i
|
||||||
|
|
||||||
|
@ -427,7 +427,7 @@ def generate_captcha(key, db)
|
|||||||
end
|
end
|
||||||
|
|
||||||
def generate_text_captcha(key, db)
|
def generate_text_captcha(key, db)
|
||||||
response = make_client(TEXTCAPTCHA_URL).get("/omarroth@protonmail.com.json").body
|
response = make_client(TEXTCAPTCHA_URL, &.get("/omarroth@protonmail.com.json").body)
|
||||||
response = JSON.parse(response)
|
response = JSON.parse(response)
|
||||||
|
|
||||||
tokens = response["a"].as_a.map do |answer|
|
tokens = response["a"].as_a.map do |answer|
|
||||||
|
Loading…
Reference in New Issue
Block a user