Chunk videoplayback response to avoid throttling
This commit is contained in:
parent
b31d1c06f5
commit
818cd2454d
3 changed files with 66 additions and 30 deletions
|
@ -54,6 +54,7 @@ MAX_ITEMS_PER_PAGE = 1500
|
|||
|
||||
REQUEST_HEADERS_WHITELIST = {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "Content-Length", "If-None-Match", "Range"}
|
||||
RESPONSE_HEADERS_BLACKLIST = {"Access-Control-Allow-Origin", "Alt-Svc", "Server"}
|
||||
HTTP_CHUNK_SIZE = 10485760 # ~10MB
|
||||
|
||||
CURRENT_BRANCH = {{ "#{`git branch | sed -n '/\* /s///p'`.strip}" }}
|
||||
CURRENT_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit`.strip}" }}
|
||||
|
@ -4648,40 +4649,57 @@ get "/videoplayback" do |env|
|
|||
next
|
||||
end
|
||||
|
||||
begin
|
||||
range_begin, range_end = parse_range(env.request.headers["Range"]?)
|
||||
|
||||
client = make_client(URI.parse(host), proxies, region)
|
||||
(range_begin...range_end).each_slice(HTTP_CHUNK_SIZE) do |slice|
|
||||
headers["Range"] = "bytes=#{slice[0]}-#{slice[-1]}"
|
||||
begin
|
||||
client.get(url, headers) do |response|
|
||||
content_range = response.headers["Content-Range"].lchop("bytes ")
|
||||
content_size = content_range.split("/")[-1].to_i
|
||||
|
||||
# Write headers for first chunk
|
||||
if content_range.split("-")[0].to_i == range_begin
|
||||
if !env.request.headers["Range"]? && response.status_code == 206
|
||||
env.response.status_code = 200
|
||||
else
|
||||
env.response.status_code = response.status_code
|
||||
end
|
||||
|
||||
response.headers.each do |key, value|
|
||||
if !RESPONSE_HEADERS_BLACKLIST.includes? key
|
||||
if !RESPONSE_HEADERS_BLACKLIST.includes?(key) && key != "Content-Range"
|
||||
env.response.headers[key] = value
|
||||
end
|
||||
end
|
||||
|
||||
reported_end = range_end ? range_end : content_size
|
||||
env.response.content_length = reported_end - range_begin
|
||||
|
||||
if env.request.headers["Range"]?
|
||||
env.response.headers["Content-Range"] = "bytes #{range_begin}-#{reported_end - 1}/#{content_size}"
|
||||
end
|
||||
|
||||
env.response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
|
||||
if response.headers["Location"]?
|
||||
url = URI.parse(response.headers["Location"])
|
||||
host = url.host
|
||||
|
||||
url = url.full_path
|
||||
url += "&host=#{host}"
|
||||
|
||||
if region
|
||||
url += "®ion=#{region}"
|
||||
end
|
||||
|
||||
next env.redirect url
|
||||
end
|
||||
|
||||
if title = query_params["title"]?
|
||||
# https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
|
||||
env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}"
|
||||
end
|
||||
end
|
||||
|
||||
proxy_file(response, env)
|
||||
end
|
||||
rescue ex
|
||||
# FIXME: Potential bug in HTTP::Client
|
||||
if ex.message == "Error reading socket: Connection reset by peer"
|
||||
next
|
||||
else
|
||||
raise ex
|
||||
end
|
||||
end
|
||||
end
|
||||
rescue ex
|
||||
end
|
||||
end
|
||||
|
|
|
@ -656,7 +656,7 @@ def proxy_file(response, env)
|
|||
end
|
||||
|
||||
# https://stackoverflow.com/a/44802810 <3
|
||||
def copy_in_chunks(input, output, chunk_size = 4096)
|
||||
def copy_in_chunks(input, output, chunk_size = 8192)
|
||||
size = 1
|
||||
while size > 0
|
||||
size = IO.copy(input, output, chunk_size)
|
||||
|
|
|
@ -358,3 +358,21 @@ def subscribe_pubsub(topic, key, config)
|
|||
|
||||
return client.post("/subscribe", form: body)
|
||||
end
|
||||
|
||||
def parse_range(range)
|
||||
if !range
|
||||
return 0, nil
|
||||
end
|
||||
|
||||
ranges = range.lchop("bytes=").split(',')
|
||||
ranges.each do |range|
|
||||
start_range, end_range = range.split('-')
|
||||
|
||||
start_range = start_range.to_i? || 0
|
||||
end_range = end_range.to_i?.try &.+ 1
|
||||
|
||||
return start_range, end_range
|
||||
end
|
||||
|
||||
return 0, nil
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue