45 lines
1.5 KiB
Lua
45 lines
1.5 KiB
Lua
local WEBSITE_NAME = "9to5mac"
|
|
local WEBSITE_HOME = "https://9to5mac.com"
|
|
|
|
add_route("Nineto5mac", "/9to5mac")
|
|
function Nineto5mac.route(args)
|
|
local rssFeed = rss:get("https://9to5mac.com/feed/")
|
|
local entries = parse_xml_feed(rssFeed)
|
|
|
|
local newEntries = {}
|
|
local to_scrape = {}
|
|
local scraped = {}
|
|
|
|
-- NOTE: 25 limit for right now
|
|
to_scrape = rss.limit(entries, 25)
|
|
-- Check if the selected entries have already been scraped
|
|
scraped, to_scrape = db:check(to_scrape)
|
|
for _, entry in ipairs(to_scrape) do
|
|
log.debug("Scraping: " .. entry:link())
|
|
local article = get(entry:link())
|
|
local post = html.new(article)
|
|
post:remove("header")
|
|
post:remove("script")
|
|
post:remove(".ad-disclaimer-container")
|
|
post:remove("#after_disclaimer_placement")
|
|
post:remove(".adsbygoogle")
|
|
post:remove(".google-news-link")
|
|
|
|
local content = post:select(".post-content")
|
|
entry:description(content)
|
|
|
|
db:insert(entry)
|
|
table.insert(newEntries, entry)
|
|
|
|
os.execute("sleep 0.25")
|
|
end
|
|
-- Fetch the scraped entries from the database
|
|
local localEntries = db:getRss(scraped)
|
|
-- Merge the two lists
|
|
newEntries = rss.merge(localEntries, newEntries)
|
|
-- Create a new rss feed from the merged list
|
|
local image = RssImage.new(WEBSITE_NAME, "https://9to5mac.com/favicon.ico", WEBSITE_HOME)
|
|
local feed = create_rss_feed(WEBSITE_NAME, WEBSITE_HOME, image, newEntries)
|
|
return feed
|
|
end
|