updates to support configurable intervals, feeds and better hashtag support
This commit is contained in:
parent
8e99a2b491
commit
2c219e1d9e
16
.env.sample
16
.env.sample
@ -1,7 +1,17 @@
|
||||
# Mastodon Info
|
||||
MASTODON_ACCESS_KEY=
|
||||
MASTODON_API_URL=
|
||||
MASTODON_ACCOUNT_ID=
|
||||
MASTODON_API_URL=https://example.com/api/v1/
|
||||
|
||||
# Prefix for the post
|
||||
POST_PREFIX=
|
||||
# Hashtags for the post
|
||||
POST_HASHTAG=gaming,videogames,news
|
||||
|
||||
# Number of posts to create
|
||||
MAX_POST_PER_SCAN=2
|
||||
|
||||
# Feeds to scan
|
||||
#3600000 means every hours 60 * 60 * 1000
|
||||
FEED_INTERVAL=3600000
|
||||
|
||||
#isNews = true; will post a contentSnippet. If this is too long choose isNews false to post the shorter summary
|
||||
FEEDS='{"feeds": [{"url": "https://example.com/feeds/feed.atom", "isNews": true, "tag": "Post Tag"}]}'
|
83
bot.js
83
bot.js
@ -8,7 +8,11 @@ const axios = require("axios");
|
||||
const urlMetadata = require("url-metadata");
|
||||
let Mastodon = require("mastodon-api");
|
||||
let Parser = require("rss-parser");
|
||||
let parser = new Parser();
|
||||
let parser = new Parser({
|
||||
headers: {
|
||||
Accept: "application/rss+xml, application/xml",
|
||||
},
|
||||
});
|
||||
let maxPostPerScan = process.env.MAX_POST_PER_SCAN;
|
||||
|
||||
const M = new Mastodon({
|
||||
@ -19,15 +23,18 @@ const M = new Mastodon({
|
||||
|
||||
const download_image = async (url, image_path) => {
|
||||
let response = await axios({
|
||||
method: "get",
|
||||
url,
|
||||
responseType: "stream",
|
||||
});
|
||||
|
||||
console.log(response);
|
||||
|
||||
return new Promise((resolve, reject) =>
|
||||
response.data
|
||||
.pipe(fs.createWriteStream(image_path))
|
||||
.on("finish", () => {
|
||||
console.log("---- Image Written Succesfully");
|
||||
console.log("---- Image Written Succesfully", url);
|
||||
resolve(true);
|
||||
})
|
||||
.on("error", (e) => {
|
||||
@ -42,7 +49,7 @@ const download_image = async (url, image_path) => {
|
||||
|
||||
setInterval(async () => {
|
||||
await runBot();
|
||||
}, 60 * 60 * 1000);
|
||||
}, process.env.FEED_INTERVAL);
|
||||
})();
|
||||
|
||||
async function getLastPostDate() {
|
||||
@ -58,49 +65,59 @@ async function getLastPostDate() {
|
||||
|
||||
async function readFeeds() {
|
||||
console.log("Processing Feeds: readFeeds()");
|
||||
let feed = await parser.parseURL("http://feeds.feedburner.com/ign/games-all");
|
||||
return feed;
|
||||
let feeds = JSON.parse(process.env.FEEDS || {});
|
||||
let feed = await parser.parseURL(feeds.feeds[0].url);
|
||||
return { feed, isNews: feeds.feeds[0] };
|
||||
}
|
||||
|
||||
async function processFeed(feed, postDate) {
|
||||
async function processFeed(feed, postDate, feedOptions) {
|
||||
let count = 0;
|
||||
let validFeeds = feed.items
|
||||
.filter((item) => {
|
||||
let pubDate = new Date(item.pubDate);
|
||||
return pubDate > postDate;
|
||||
return pubDate < postDate;
|
||||
})
|
||||
.slice(0, maxPostPerScan);
|
||||
|
||||
return Promise.all(
|
||||
validFeeds.map(async (item) => {
|
||||
let currentCount = count++;
|
||||
let path;
|
||||
|
||||
let metadata = await urlMetadata(item.link);
|
||||
if (feedOptions.isNews) {
|
||||
let currentCount = count++;
|
||||
|
||||
// Download feed item image
|
||||
let path = Path.resolve(
|
||||
__dirname,
|
||||
"images",
|
||||
`post-image-${currentCount}`
|
||||
);
|
||||
await download_image(metadata.image, path);
|
||||
let metadata = await urlMetadata(item.link);
|
||||
|
||||
return postFeedItem(path, item);
|
||||
// Download feed item image
|
||||
path = Path.resolve(__dirname, "images", `post-image-${currentCount}`);
|
||||
await download_image(metadata.image, path);
|
||||
}
|
||||
|
||||
return postFeedItem(path, item, feedOptions);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async function postFeedItem(path, item) {
|
||||
let mediaup = await M.post("media", {
|
||||
file: fs.createReadStream(path),
|
||||
});
|
||||
async function postFeedItem(path, item, feedOptions) {
|
||||
if (feedOptions.isNews) {
|
||||
let mediaup = await M.post("media", {
|
||||
file: fs.createReadStream(path),
|
||||
});
|
||||
|
||||
return M.post("statuses", {
|
||||
status: `${item.title}\n\n${
|
||||
item.contentSnippet ? "\n\n" + item.contentSnippet : ""
|
||||
}\n\n#NeoVibe #${process.env.POST_HASHTAG}\n\n${item.link}`,
|
||||
media_ids: [mediaup.data.id],
|
||||
});
|
||||
return M.post("statuses", {
|
||||
status: `${feedOptions.tag}: ${item.title}\n\n${
|
||||
item.contentSnippet ? "\n\n" + item.contentSnippet : ""
|
||||
}\n\n#NeoVibe ${getHashTags()}\n\n${item.link}`,
|
||||
media_ids: [mediaup.data.id],
|
||||
});
|
||||
} else {
|
||||
return M.post("statuses", {
|
||||
status: `${feedOptions.tag}: ${
|
||||
item.title
|
||||
}\n\n#NeoVibe ${getHashTags()}\n\n${item.link}`,
|
||||
media_ids: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function runBot() {
|
||||
@ -110,13 +127,23 @@ async function runBot() {
|
||||
|
||||
let postDate = await getLastPostDate();
|
||||
|
||||
let processedFeed = await processFeed(feed, postDate);
|
||||
let processedFeed = await processFeed(feed.feed, postDate, feed.isNews);
|
||||
|
||||
console.log("Completed Running Bot: runBot()");
|
||||
|
||||
return processedFeed;
|
||||
}
|
||||
|
||||
function getHashTags() {
|
||||
let hashTags = process.env.POST_HASHTAG.split(",")
|
||||
.map((hashtag) => {
|
||||
return `#${hashtag}`;
|
||||
})
|
||||
.join(" ");
|
||||
|
||||
return hashTags;
|
||||
}
|
||||
|
||||
const requestListener = function (req, res) {
|
||||
res.writeHead(200);
|
||||
res.end("Hello, World!");
|
||||
|
Loading…
Reference in New Issue
Block a user