Merge pull request #2968 from mathiusD/description-link
This commit is contained in:
commit
6ff3a633f7
2 changed files with 58 additions and 4 deletions
|
@ -6,6 +6,7 @@ record AboutChannel,
|
|||
author_url : String,
|
||||
author_thumbnail : String,
|
||||
banner : String?,
|
||||
description : String,
|
||||
description_html : String,
|
||||
total_views : Int64,
|
||||
sub_count : Int32,
|
||||
|
@ -52,8 +53,7 @@ def get_about_info(ucid, locale) : AboutChannel
|
|||
banners = initdata["header"]["interactiveTabbedHeaderRenderer"]?.try &.["banner"]?.try &.["thumbnails"]?
|
||||
banner = banners.try &.[-1]?.try &.["url"].as_s?
|
||||
|
||||
description = initdata["header"]["interactiveTabbedHeaderRenderer"]["description"]["simpleText"].as_s
|
||||
description_html = HTML.escape(description)
|
||||
description_node = initdata["header"]["interactiveTabbedHeaderRenderer"]["description"]
|
||||
|
||||
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
|
||||
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map(&.as_s)
|
||||
|
@ -75,13 +75,24 @@ def get_about_info(ucid, locale) : AboutChannel
|
|||
author_verified_badge = initdata["header"].dig?("c4TabbedHeaderRenderer", "badges", 0, "metadataBadgeRenderer", "tooltip")
|
||||
author_verified = (author_verified_badge && author_verified_badge == "Verified")
|
||||
|
||||
description = initdata["metadata"]["channelMetadataRenderer"]?.try &.["description"]?.try &.as_s? || ""
|
||||
description_html = HTML.escape(description)
|
||||
description_node = initdata["metadata"]["channelMetadataRenderer"]?.try &.["description"]?
|
||||
|
||||
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
|
||||
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map(&.as_s)
|
||||
end
|
||||
|
||||
description = !description_node.nil? ? description_node.as_s : ""
|
||||
description_html = HTML.escape(description)
|
||||
if !description_node.nil?
|
||||
if description_node.as_h?.nil?
|
||||
description_node = text_to_parsed_content(description_node.as_s)
|
||||
end
|
||||
description_html = parse_content(description_node)
|
||||
if description_html == "" && description != ""
|
||||
description_html = HTML.escape(description)
|
||||
end
|
||||
end
|
||||
|
||||
total_views = 0_i64
|
||||
joined = Time.unix(0)
|
||||
|
||||
|
@ -125,6 +136,7 @@ def get_about_info(ucid, locale) : AboutChannel
|
|||
author_url: author_url,
|
||||
author_thumbnail: author_thumbnail,
|
||||
banner: banner,
|
||||
description: description,
|
||||
description_html: description_html,
|
||||
total_views: total_views,
|
||||
sub_count: sub_count,
|
||||
|
|
|
@ -560,6 +560,48 @@ def fill_links(html, scheme, host)
|
|||
return html.to_xml(options: XML::SaveOptions::NO_DECL)
|
||||
end
|
||||
|
||||
def text_to_parsed_content(text : String) : JSON::Any
|
||||
nodes = [] of JSON::Any
|
||||
# For each line convert line to array of nodes
|
||||
text.split('\n').each do |line|
|
||||
# In first case line is just a simple node before
|
||||
# check patterns inside line
|
||||
# { 'text': line }
|
||||
currentNodes = [] of JSON::Any
|
||||
initialNode = {"text" => line}
|
||||
currentNodes << (JSON.parse(initialNode.to_json))
|
||||
|
||||
# For each match with url pattern, get last node and preserve
|
||||
# last node before create new node with url information
|
||||
# { 'text': match, 'navigationEndpoint': { 'urlEndpoint' : 'url': match } }
|
||||
line.scan(/https?:\/\/[^ ]*/).each do |urlMatch|
|
||||
# Retrieve last node and update node without match
|
||||
lastNode = currentNodes[currentNodes.size - 1].as_h
|
||||
splittedLastNode = lastNode["text"].as_s.split(urlMatch[0])
|
||||
lastNode["text"] = JSON.parse(splittedLastNode[0].to_json)
|
||||
currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
|
||||
# Create new node with match and navigation infos
|
||||
currentNode = {"text" => urlMatch[0], "navigationEndpoint" => {"urlEndpoint" => {"url" => urlMatch[0]}}}
|
||||
currentNodes << (JSON.parse(currentNode.to_json))
|
||||
# If text remain after match create new simple node with text after match
|
||||
afterNode = {"text" => splittedLastNode.size > 0 ? splittedLastNode[1] : ""}
|
||||
currentNodes << (JSON.parse(afterNode.to_json))
|
||||
end
|
||||
|
||||
# After processing of matches inside line
|
||||
# Add \n at end of last node for preserve carriage return
|
||||
lastNode = currentNodes[currentNodes.size - 1].as_h
|
||||
lastNode["text"] = JSON.parse("#{currentNodes[currentNodes.size - 1]["text"]}\n".to_json)
|
||||
currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
|
||||
|
||||
# Finally add final nodes to nodes returned
|
||||
currentNodes.each do |node|
|
||||
nodes << (node)
|
||||
end
|
||||
end
|
||||
return JSON.parse({"runs" => nodes}.to_json)
|
||||
end
|
||||
|
||||
def parse_content(content : JSON::Any, video_id : String? = "") : String
|
||||
content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
||||
content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r, video_id).try &.to_s.gsub("\n", "<br>") } || ""
|
||||
|
|
Loading…
Reference in a new issue