2022-01-21 21:40:23 +03:00
|
|
|
import os from 'os'
|
|
|
|
import fs from 'fs'
|
|
|
|
import path from 'path'
|
|
|
|
|
2021-07-14 23:49:18 +03:00
|
|
|
import Parser from 'rss-parser'
|
2021-04-08 23:51:37 +03:00
|
|
|
|
2022-03-03 23:32:34 +03:00
|
|
|
const CHANGELOG_CACHE_FILE_PATH = process.env.CHANGELOG_CACHE_FILE_PATH
|
2022-03-30 04:30:49 +03:00
|
|
|
// This is useful to set when doing things like sync search.
|
|
|
|
const CHANGELOG_DISABLED = Boolean(JSON.parse(process.env.CHANGELOG_DISABLED || 'false'))
|
2022-03-03 23:32:34 +03:00
|
|
|
|
2021-12-12 00:52:27 +03:00
|
|
|
async function getRssFeed(url) {
|
2021-04-08 23:51:37 +03:00
|
|
|
const parser = new Parser({ timeout: 5000 })
|
|
|
|
const feedUrl = `${url}/feed`
|
|
|
|
let feed
|
|
|
|
|
|
|
|
try {
|
|
|
|
feed = await parser.parseURL(feedUrl)
|
|
|
|
} catch (err) {
|
|
|
|
console.error(`cannot get ${feedUrl}: ${err.message}`)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
return feed
|
|
|
|
}
|
|
|
|
|
2022-03-03 23:32:34 +03:00
|
|
|
export async function getChangelogItems(prefix, feedUrl, ignoreCache = false) {
|
2022-03-30 04:30:49 +03:00
|
|
|
if (CHANGELOG_DISABLED) {
|
|
|
|
if (process.env.NODE_ENV === 'development') {
|
|
|
|
console.warn(`Downloading changelog (${feedUrl}) items is disabled.`)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2022-03-03 23:32:34 +03:00
|
|
|
if (!ignoreCache) {
|
|
|
|
const fromCache = getChangelogItemsFromCache(prefix, feedUrl)
|
|
|
|
if (fromCache) return fromCache
|
|
|
|
}
|
2021-12-12 00:52:27 +03:00
|
|
|
|
|
|
|
const feed = await getRssFeed(feedUrl)
|
|
|
|
|
2021-04-08 23:51:37 +03:00
|
|
|
if (!feed || !feed.items) {
|
|
|
|
console.log(feed)
|
|
|
|
console.error('feed is not valid or has no items')
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// only show the first 3 posts
|
2021-07-15 00:35:01 +03:00
|
|
|
const changelog = feed.items.slice(0, 3).map((item) => {
|
|
|
|
// remove the prefix if it exists (Ex: 'GitHub Actions: '), where the colon and expected whitespace should be hardcoded.
|
|
|
|
const title = prefix ? item.title.replace(new RegExp(`^${prefix}`), '') : item.title
|
|
|
|
return {
|
|
|
|
// capitalize the first letter of the title
|
|
|
|
title: title.trim().charAt(0).toUpperCase() + title.slice(1),
|
|
|
|
date: item.isoDate,
|
|
|
|
href: item.link,
|
|
|
|
}
|
|
|
|
})
|
2021-04-08 23:51:37 +03:00
|
|
|
|
2021-12-12 00:52:27 +03:00
|
|
|
// We don't cache the raw payload we'd get from the network request
|
|
|
|
// because it would waste memory. Instead we store the "serialized"
|
|
|
|
// object that's created from the raw payload.
|
2022-01-21 21:40:23 +03:00
|
|
|
setChangelogItemsCache(prefix, feedUrl, changelog)
|
2021-12-12 00:52:27 +03:00
|
|
|
|
2021-04-08 23:51:37 +03:00
|
|
|
return changelog
|
|
|
|
}
|
2022-01-21 21:40:23 +03:00
|
|
|
|
|
|
|
const globalCache = new Map()
|
|
|
|
|
|
|
|
function getChangelogCacheKey(prefix, feedUrl) {
|
|
|
|
// Return a string that is only letters so it's safe to use this
|
|
|
|
// for the filename when caching to disk.
|
|
|
|
return `${prefix || ''}${feedUrl}`.replace(/[^a-z]+/gi, '')
|
|
|
|
}
|
|
|
|
|
|
|
|
function getDiskCachePath(prefix, feedUrl) {
|
|
|
|
// When in local development or in tests, use disk caching
|
|
|
|
if (process.env.NODE_ENV === 'test' || process.env.NODE_ENV === 'development') {
|
2022-03-03 23:32:34 +03:00
|
|
|
if (CHANGELOG_CACHE_FILE_PATH) {
|
|
|
|
return CHANGELOG_CACHE_FILE_PATH
|
|
|
|
}
|
2022-01-21 21:40:23 +03:00
|
|
|
const cacheKey = getChangelogCacheKey(prefix, feedUrl)
|
|
|
|
const date = new Date().toISOString().split('T')[0]
|
|
|
|
const fileName = `changelogcache-${cacheKey}-${date}.json`
|
|
|
|
return path.join(os.tmpdir(), fileName)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function getChangelogItemsFromCache(prefix, feedUrl) {
|
|
|
|
const cacheKey = getChangelogCacheKey(prefix, feedUrl)
|
|
|
|
|
|
|
|
if (globalCache.get(cacheKey)) {
|
|
|
|
return globalCache.get(cacheKey)
|
|
|
|
}
|
|
|
|
|
|
|
|
const diskCachePath = getDiskCachePath(prefix, feedUrl)
|
|
|
|
if (diskCachePath) {
|
|
|
|
try {
|
|
|
|
const payload = JSON.parse(fs.readFileSync(diskCachePath, 'utf-8'))
|
|
|
|
if (process.env.NODE_ENV === 'development')
|
|
|
|
console.log(`Changelog disk-cache HIT on ${diskCachePath}`)
|
|
|
|
// Also, for next time, within this Node process, put it into
|
|
|
|
// the global cache so we don't need to read from disk again.
|
|
|
|
globalCache.set(cacheKey, payload)
|
|
|
|
return payload
|
|
|
|
} catch (err) {
|
|
|
|
// If it wasn't on disk, that's fine.
|
|
|
|
if (err.code === 'ENOENT') return
|
|
|
|
// The JSON.parse() most likely failed. Ignore the error
|
|
|
|
// but delete the file so it won't be attempted again.
|
|
|
|
if (err instanceof SyntaxError) {
|
|
|
|
fs.unlinkSync(diskCachePath)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
throw err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function setChangelogItemsCache(prefix, feedUrl, payload) {
|
|
|
|
const cacheKey = getChangelogCacheKey(prefix, feedUrl)
|
|
|
|
globalCache.set(cacheKey, payload)
|
|
|
|
|
|
|
|
const diskCachePath = getDiskCachePath(prefix, feedUrl)
|
|
|
|
// Note that `diskCachePath` is falsy if NODE_ENV==production which
|
|
|
|
// means we're not writing to disk in production.
|
|
|
|
if (diskCachePath) {
|
|
|
|
fs.writeFileSync(diskCachePath, JSON.stringify(payload), 'utf-8')
|
|
|
|
if (process.env.NODE_ENV === 'development')
|
|
|
|
console.log(`Wrote changelog cache to disk ${diskCachePath}`)
|
|
|
|
}
|
|
|
|
}
|