redditimages2telegram/redditimages2telegram.own

152 lines
3.9 KiB
Plaintext
Raw Normal View History

2020-05-26 17:49:13 +03:00
// At the start of a main script, include config
// include "config.own"
use ["std", "math", "http", "json", "functional", "files", "jdbc"]
// Telegram
def toParams(obj) = reduce(obj, "", def(acc, k, v) = acc + k + "=" + urlencode(v) + "&")
2020-05-26 17:49:13 +03:00
def createRawUrl(method, params, token = "") = "https://api.telegram.org/bot" + token + "/" + method + "?" + params + "access_token=" + token
def createUrl(method, params, token = "") = createRawUrl(method, toParams(params), token)
def invokeJson(method, params, callback) = http(createUrl(method, params, config.token), combine(::jsondecode, callback))
def invoke(method, params, callback) = http(createUrl(method, params, config.token), callback)
def sendPhoto(chatId, url) {
invoke("sendPhoto", {
"chat_id": chatId,
"photo": url
}, ::debug)
}
def sendMediaGroup(chatId, media) {
invoke("sendMediaGroup", {
"chat_id": chatId,
"media": jsonencode(media)
}, ::debug)
}
// Reddit
def fetchSubreddit(subreddit) {
url = "https://www.reddit.com/r/" + subreddit + ".json"
data = sync(def(ret) = http(url, combine(::jsondecode, ret))).data ?? []
if (!length(data)) return []
2020-05-26 17:49:13 +03:00
return stream(data.children)
.map(def(child) = child.data)
.limit(min(config["items-in-top"], data.dist ?? 0))
.map(def(post) = {
"id": post.id,
"sub": subreddit,
"url": post.url,
"time": post.created_utc,
"title": post.title,
"permalink": post.permalink,
"flair_text": arrayKeyExists("link_flair_text", post) ? post.link_flair_text : ""
})
.toArray()
}
def fetchAll(subreddits) =
stream(subreddits)
.flatMap(::fetchSubreddit)
.toArray()
// Database
conn = getConnection("jdbc:sqlite:redditimages.db")
st = conn.createStatement()
st.executeUpdate(
"CREATE TABLE IF NOT EXISTS posts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
post_id STRING NOT NULL,
subreddit STRING NOT NULL,
url STRING NOT NULL,
created_at INTEGER NOT NULL,
sent_at INTEGER NOT NULL
)")
st.executeUpdate(
"CREATE UNIQUE INDEX IF NOT EXISTS url_idx ON posts (url)")
st.close()
stIsPostExists = conn.prepareStatement(
"SELECT COUNT(*) FROM posts
WHERE url = ?")
stAddPost = conn.prepareStatement(
"INSERT INTO posts(post_id, subreddit, url, created_at, sent_at)
VALUES(?, ?, ?, ?, ?)")
def isPostUnique(post) {
stIsPostExists.setString(1, post.url)
rs = stIsPostExists.executeQuery()
return rs.getInt(1) == 0
}
def addPost(post) {
stAddPost.setString(1, post.id)
stAddPost.setString(2, post.sub)
stAddPost.setString(3, post.url)
stAddPost.setLong(4, post.time)
stAddPost.setLong(5, time() / 1000)
stAddPost.executeUpdate()
}
// Helpers
def strToHashtag(str) =
str.toLowerCase()
.replaceAll("[^a-z_0-9\s]", "")
.replaceAll("\s+", "_")
def multireplace(str, replacements) {
for re : replacements {
str = str.replace(re[0], re[1])
}
return str
}
def safe(str) = str.replace("&", "&")
.replace("<", "&lt;").replace(">", "&gt;")
2020-05-26 17:49:13 +03:00
def getCaption(post) {
tag = ""
if (length(post.flair_text) > 0) {
tag = " #" + strToHashtag(post.sub + "_" + post.flair_text)
2020-05-26 17:49:13 +03:00
}
return sprintf(
"<a href=\"%s\">%s</a>\n" +
"<a href=\"https://reddit.com%s\">🗨 Comments</a>%s",
safe(post.url),
safe(post.title),
safe(post.permalink),
2020-05-26 17:49:13 +03:00
tag
)
}
media = stream(fetchAll(config.subreddits))
.filter(def(p) = reduce([".jpg", ".png"], false, def(acc, ext) = acc || indexOf(p.url, ext) > 0))
.filter(::isPostUnique)
// .peek(def(p) = sendPhoto(config.peer, p.url))
.peek(::addPost)
.map(def(p) = {
"type": "photo",
"media": p.url,
"caption": getCaption(p),
"parse_mode": "html"
})
.limit(10)
.toArray()
debug(jsonencode(media))
if (length(media) > 0) {
sendMediaGroup(config.peer, media)
}
stIsPostExists.close()
stAddPost.close()
conn.close()
def debug(r) {
// echo(r)
}
sleep(10)
use "java"
System = newClass("java.lang.System")
System.exit(0)