48 lines
1.7 KiB
Lua
48 lines
1.7 KiB
Lua
-- TODO: Add the other routes here
|
|
add_route("eurogamer", "/eurogamer")
|
|
function eurogamer.route(args)
|
|
-- TODO: This should not be handled by lua
|
|
-- Golang needs to bind the database to lua
|
|
local rssFeed = rss:get("https://www.eurogamer.net/feed/news")
|
|
local entries = parse_xml_feed(rssFeed)
|
|
|
|
local newEntries = {}
|
|
local scraped = {}
|
|
local to_scrape = rss.limit(entries, 25)
|
|
|
|
-- Check if the selected entries have already been scraped
|
|
scraped, to_scrape = db:check(entries)
|
|
for _, entry in ipairs(to_scrape) do
|
|
log.debug("Scraping: " .. entry:link())
|
|
local article = get(entry:link())
|
|
local post = html.new(article)
|
|
post:remove("header")
|
|
post:remove("script")
|
|
post:remove(".poll_wrapper")
|
|
post:remove(".poll_container")
|
|
post:remove(".poll")
|
|
post:remove(".poll_leaderboard")
|
|
post:remove(".advert_container")
|
|
post:remove(".article_footer")
|
|
post:remove(".adsbygoogle")
|
|
post:remove(".google-news-link")
|
|
|
|
local content = post:select(".article_body")
|
|
entry:description(content)
|
|
|
|
db:insert(entry)
|
|
table.insert(newEntries, entry)
|
|
|
|
os.execute("sleep 0.25")
|
|
end
|
|
-- Fetch the scraped entries from the database
|
|
local localEntries = db:getRss(scraped)
|
|
|
|
-- Merge the two lists
|
|
newEntries = rss.merge(newEntries, localEntries)
|
|
-- Create a new rss feed from the merged list
|
|
local image = RssImage.new("Eurogamer News", "https://eurogamer.net/favicon.ico", "https://eurogamer.net")
|
|
local feed = create_rss_feed("Eurogamer News", "https://eurogamer.net", image, newEntries)
|
|
return feed
|
|
end
|