Skip to content

Commit

Permalink
Merge from upstream repo
Browse files Browse the repository at this point in the history
Highlights in this merge:
- Improvements to RSS feed contents: Truncate long titles, render
  linebreaks in tweets
- Support for twitter.com/i/user/[userid] URLs
- Improve bug fix for using hardcoded defaults if replaceYouTube is
  missing or is a blank string in config file
- Add `rel canonical` headers
  • Loading branch information
acarasimon96 committed Dec 30, 2021
2 parents 2b26edc + 51c6605 commit 59b1b63
Show file tree
Hide file tree
Showing 18 changed files with 124 additions and 73 deletions.
7 changes: 7 additions & 0 deletions src/api.nim
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ proc getGraphListById*(id: string): Future[List] {.async.} =
result = parseGraphList(js)

proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
url = listTimeline ? ps
Expand All @@ -40,6 +41,12 @@ proc getProfile*(username: string): Future[Profile] {.async.} =
url = userShow ? ps
result = parseUserShow(await fetch(url, oldApi=true), username)

proc getProfileById*(userId: string): Future[Profile] {.async.} =
let
ps = genParams({"user_id": userId})
url = userShow ? ps
result = parseUserShowId(await fetch(url, oldApi=true), userId)

proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
let
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
Expand Down
12 changes: 8 additions & 4 deletions src/apiutils.nim
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,15 @@ proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
let headers = genHeaders(token)
try:
var resp: AsyncResponse
let body = pool.use(headers):
var body = pool.use(headers):
resp = await c.get($url)
let raw = await resp.body
if raw.len == 0: ""
else: uncompress(raw)
await resp.body

if body.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
body = uncompress(body, dfGzip)
else:
echo "non-gzip body, url: ", url, ", body: ", body

if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
Expand Down
4 changes: 2 additions & 2 deletions src/config.nim
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import parsecfg except Config
import types, strutils

proc get*[T](config: parseCfg.Config; s, v: string; default: T): T =
let val = config.getSectionValue(s, v)
proc get*[T](config: parseCfg.Config; section, key: string; default: T): T =
let val = config.getSectionValue(section, key)
if val.len == 0: return default

when T is int: parseInt(val)
Expand Down
14 changes: 9 additions & 5 deletions src/formatters.nim
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, times, uri, tables, xmltree, htmlparser
import strutils, strformat, times, uri, tables, xmltree, htmlparser, htmlgen
import regex
import types, utils, query

Expand All @@ -15,6 +15,8 @@ const
# wasn't first displayed via a post on the Teddit instance.

twRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?twitter\.com"
twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>"""

cards = "cards.twitter.com/cards"
tco = "https://t.co"

Expand All @@ -28,7 +30,7 @@ const
twitter = parseUri("https://twitter.com")

proc getUrlPrefix*(cfg: Config): string =
if cfg.useHttps: "https://" & cfg.hostname
if cfg.useHttps: https & cfg.hostname
else: "http://" & cfg.hostname

proc stripHtml*(text: string): string =
Expand Down Expand Up @@ -57,10 +59,12 @@ proc replaceUrls*(body: string; prefs: Prefs; absolute=""): string =
result = result.replace("/c/", "/")

if prefs.replaceTwitter.len > 0 and
(twRegex in result or tco in result):
result = result.replace(tco, "https://" & prefs.replaceTwitter & "/t.co")
(twRegex in result or twLinkRegex in result or tco in result):
result = result.replace(tco, https & prefs.replaceTwitter & "/t.co")
result = result.replace(cards, prefs.replaceTwitter & "/cards")
result = result.replace(twRegex, prefs.replaceTwitter)
result = result.replace(twLinkRegex, a(
prefs.replaceTwitter & "$2", href = https & prefs.replaceTwitter & "$1"))

if prefs.replaceReddit.len > 0 and (rdRegex in result or "redd.it" in result):
result = result.replace(rdShortRegex, prefs.replaceReddit & "/comments/")
Expand Down Expand Up @@ -158,7 +162,7 @@ proc getTwitterLink*(path: string; params: Table[string, string]): string =
path = "/search"

if "/search" notin path and query.fromUser.len < 2:
return $(twitter / path ? filterParams(params))
return $(twitter / path)

let p = {
"f": if query.kind == users: "user" else: "live",
Expand Down
20 changes: 16 additions & 4 deletions src/parser.nim
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,18 @@ proc parseUserShow*(js: JsonNode; username: string): Profile =

result = parseProfile(js)

proc parseUserShowId*(js: JsonNode; userId: string): Profile =
if js.isNull:
return Profile(id: userId)

with error, js{"errors"}:
result = Profile(id: userId)
if error.getError == suspended:
result.suspended = true
return

result = parseProfile(js)

proc parseGraphProfile*(js: JsonNode; username: string): Profile =
if js.isNull: return
with error, js{"errors"}:
Expand Down Expand Up @@ -271,17 +283,17 @@ proc parseTweet(js: JsonNode): Tweet =
else: discard

with jsWithheld, js{"withheld_in_countries"}:
var withheldInCountries: seq[string]

if jsWithheld.kind == JArray:
withheldInCountries = jsWithheld.to(seq[string])
let withheldInCountries: seq[string] =
if jsWithheld.kind != JArray: @[]
else: jsWithheld.to(seq[string])

# XX - Content is withheld in all countries
# XY - Content is withheld due to a DMCA request.
if js{"withheld_copyright"}.getBool or
withheldInCountries.len > 0 and ("XX" in withheldInCountries or
"XY" in withheldInCountries or
"withheld" in result.text):
result.text.removeSuffix(" Learn more.")
result.available = false

proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
Expand Down
6 changes: 3 additions & 3 deletions src/prefs_impl.nim
Original file line number Diff line number Diff line change
Expand Up @@ -97,15 +97,15 @@ genPrefs:
"Autoplay gifs"

"Link replacements (blank to disable)":
replaceTwitter(input, "nitter.net"):
replaceTwitter(input, ""):
"Twitter -> Nitter"
placeholder: "Nitter hostname"

replaceYouTube(input, "piped.kavin.rocks"):
replaceYouTube(input, ""):
"YouTube -> Piped/Invidious"
placeholder: "Piped hostname"

replaceReddit(input, "teddit.net"):
replaceReddit(input, ""):
"Reddit -> Teddit/Libreddit"
placeholder: "Teddit hostname"

Expand Down
10 changes: 10 additions & 0 deletions src/redis_cache.nim
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ proc cache*(data: Profile) {.async.} =
pool.withAcquire(r):
r.startPipelining()
discard await r.setex(name.profileKey, baseCacheTime, compress(toFlatty(data)))
discard await r.setex("i:" & data.id , baseCacheTime, data.username)
discard await r.hset(name.pidKey, name, data.id)
discard await r.flushPipeline()

Expand Down Expand Up @@ -110,6 +111,15 @@ proc getCachedProfile*(username: string; fetch=true): Future[Profile] {.async.}
elif fetch:
result = await getProfile(username)

proc getCachedProfileUsername*(userId: string): Future[string] {.async.} =
let username = await get("i:" & userId)
if username != redisNil:
result = username
else:
let profile = await getProfileById(userId)
result = profile.username
await cache(profile)

proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let rail = await get("pr:" & toLower(name))
Expand Down
5 changes: 2 additions & 3 deletions src/routes/search.nim
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import strutils, uri
import jester

import router_utils
import ".."/[query, types, api]
import ".."/[query, types, api, formatters]
import ../views/[general, search]

include "../views/opensearch.nimf"
Expand Down Expand Up @@ -40,7 +40,6 @@ proc createSearchRouter*(cfg: Config) =
redirect("/search?q=" & encodeUrl("#" & @"hash"))

get "/opensearch":
var url = if cfg.useHttps: "https://" else: "http://"
url &= cfg.hostname & "/search?q="
let url = getUrlPrefix(cfg) & "/search?q="
resp Http200, {"Content-Type": "application/opensearchdescription+xml"},
generateOpenSearchXML(cfg.title, cfg.hostname, url)
5 changes: 4 additions & 1 deletion src/routes/status.nim
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ proc createStatusRouter*(cfg: Config) =
cond '.' notin @"name"
let prefs = cookiePrefs()

# used for the infinite scroll feature
if @"scroll".len > 0:
let replies = await getReplies(@"id", getCursor())
if replies.content.len == 0:
Expand All @@ -34,10 +35,12 @@ proc createStatusRouter*(cfg: Config) =
error = conv.tweet.tombstone
resp Http404, showError(error, cfg)

var
let
title = pageTitle(conv.tweet)
ogTitle = pageTitle(conv.tweet.profile)
desc = conv.tweet.text

var
images = conv.tweet.photos
video = ""

Expand Down
30 changes: 22 additions & 8 deletions src/routes/timeline.nim
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,6 @@ proc fetchSingleTimeline*(after: string; query: Query; skipRail=false):

return (profile, timeline, await rail)

proc get*(req: Request; key: string): string =
params(req).getOrDefault(key)

proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1:
Expand All @@ -105,8 +102,22 @@ template respTimeline*(timeline: typed) =
resp Http404, showError("User \"" & @"name" & "\" not found", cfg)
resp t

template respUserId*() =
cond @"user_id".len > 0
let username = await getCachedProfileUsername(@"user_id")
if username.len > 0:
redirect("/" & username)
else:
resp Http404, showError("User not found", cfg)

proc createTimelineRouter*(cfg: Config) =
router timeline:
get "/i/user/@user_id":
respUserId()

get "/intent/user":
respUserId()

get "/@name/?@tab?/?":
cond '.' notin @"name"
cond @"name" notin ["pic", "gif", "video"]
Expand All @@ -120,6 +131,7 @@ proc createTimelineRouter*(cfg: Config) =
if names.len != 1:
query.fromUser = names

# used for the infinite scroll feature
if @"scroll".len > 0:
if query.fromUser.len != 1:
var timeline = await getSearch[Tweet](query, after)
Expand All @@ -132,10 +144,12 @@ proc createTimelineRouter*(cfg: Config) =
timeline.beginning = true
resp $renderTimelineTweets(timeline, prefs, getPath())

var rss = "/$1/$2/rss" % [@"name", @"tab"]
if @"tab".len == 0:
rss = "/$1/rss" % @"name"
elif @"tab" == "search":
rss &= "?" & genQueryUrl(query)
let rss =
if @"tab".len == 0:
"/$1/rss" % @"name"
elif @"tab" == "search":
"/$1/search/rss?$2" % [@"name", genQueryUrl(query)]
else:
"/$1/$2/rss" % [@"name", @"tab"]

respTimeline(await showTimeline(request, query, cfg, prefs, rss, after))
7 changes: 5 additions & 2 deletions src/routes/unsupported.nim
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,13 @@ proc createUnsupportedRouter*(cfg: Config) =
resp renderMain(renderFeature(), request, cfg, themePrefs())

get "/about/feature": feature()
get "/intent/?@i?": feature()
get "/login/?@i?": feature()
get "/@name/lists/?": feature()

get "/intent/?@i?":
cond @"i" notin ["user"]
feature()

get "/i/@i?/?@j?":
cond @"i" notin ["status", "lists"]
cond @"i" notin ["status", "lists" , "user"]
feature()
4 changes: 0 additions & 4 deletions src/sass/timeline.scss
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,4 @@
padding: .75em;
display: flex;
position: relative;

&.unavailable {
flex-direction: column;
}
}
1 change: 1 addition & 0 deletions src/sass/tweet/_base.scss
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,7 @@
box-sizing: border-box;
border-radius: 10px;
background-color: var(--bg_color);
z-index: 2;
}

.tweet-link {
Expand Down
5 changes: 0 additions & 5 deletions src/types.nim
Original file line number Diff line number Diff line change
Expand Up @@ -229,11 +229,6 @@ type
redisMaxConns*: int
redisPassword*: string

replaceTwitter*: string
replaceYouTube*: string
replaceReddit*: string
replaceInstagram*: string

Rss* = object
feed*, cursor*: string

Expand Down
6 changes: 4 additions & 2 deletions src/utils.nim
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,10 @@ proc cleanFilename*(filename: string): string =
result &= ".png"

proc filterParams*(params: Table): seq[(string, string)] =
const filter = ["name", "id", "list", "referer", "scroll"]
toSeq(params.pairs()).filterIt(it[0] notin filter and it[1].len > 0)
const filter = ["name", "tab", "id", "list", "referer", "scroll"]
for p in params.pairs():
if p[1].len > 0 and p[0] notin filter:
result.add p

proc isTwitterUrl*(uri: Uri): bool =
uri.hostname in twitterDomains
Expand Down
Loading

0 comments on commit 59b1b63

Please sign in to comment.