Skip to content
This repository has been archived by the owner on Mar 17, 2022. It is now read-only.

Commit

Permalink
Add winston logger
Browse files Browse the repository at this point in the history
  • Loading branch information
wasdennnoch committed Dec 15, 2017
1 parent 8a1fe2f commit 7b6ea1f
Show file tree
Hide file tree
Showing 8 changed files with 127 additions and 74 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@
node_modules/
src/test.js
src/json/linkCache.json
src/json/stats.json
src/json/stats.json
main.log
38 changes: 38 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"gfycat-sdk": "^1.4.10",
"request": "^2.83.0",
"request-promise-native": "^1.0.5",
"snoowrap": "^1.15.1"
"snoowrap": "^1.15.1",
"winston": "^2.4.0"
}
}
101 changes: 66 additions & 35 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,39 @@ const URL = require('url');
const snoowrap = require('snoowrap');
const Gfycat = require('gfycat-sdk');
const request = require('request-promise-native');
const winston = require('winston');
const Config = require('./utils/config');
const log = require('./utils/log');

const PROD = process.env.PROD || false;

global.logger = new winston.Logger({
level: 'silly',
transports: [new winston.transports.File({
name: 'main',
filename: 'main.log',
handleExceptions: true,
humanReadableUnhandledException: true,
level: 'info'
})],
exitOnError: false
});
if (!PROD) {
logger.add(winston.transports.Console, {
handleExceptions: true,
humanReadableUnhandledException: true,
json: false,
level: 'silly'
});
}

process.on('uncaughtException', e => {
logger.error('Uncaught exception', e);
});
process.on('unhandledRejection', e => {
logger.error('Uncaught exception', e);
});

const c = new Config();
const PROD = c.PROD;

const reddit = new snoowrap({
userAgent: c.userAgent,
Expand All @@ -34,18 +62,19 @@ let lastPost = undefined;
let loops = 0; // keep track of loops here to regulary persist stats/cache
let deferredPosts = [];

log('[anti-gif-bot] Ready.');
logger.info(`Production: ${PROD}`);
logger.info('Set up.');

module.exports.start = () => {
if (!loopInterval) {
log('[anti-gif-bot] Started.');
logger.info('Started.');
loopInterval = setInterval(update, c.updateInterval);
update();
}
};
module.exports.stop = () => {
if (loopInterval) {
log('[anti-gif-bot] Stopped.');
logger.info('Stopped.');
clearInterval(loopInterval);
loopInterval = null;
}
Expand Down Expand Up @@ -87,10 +116,10 @@ async function update() {
I will move the stats to a DB since they're just way too much for some JSON.
Firebase looks good for that since it's not *that* much data (and it's free).
*/
// TODO new gfycat link types:
// https://thumbs.gfycat.com/UnfinishedContentCoyote-small.gif
// https://thumbs.gfycat.com/UnfinishedContentCoyote-max-14mb.gif

// TODO new gfycat link types:
// https://thumbs.gfycat.com/UnfinishedContentCoyote-small.gif
// https://thumbs.gfycat.com/UnfinishedContentCoyote-max-14mb.gif

try {

Expand All @@ -106,7 +135,8 @@ async function update() {
if (submissions.length > 0) {
lastPost = submissions[0].name;
} else {
throw new Error('No items returned by Reddit, skipping loop');
logger.warn('No items returned by Reddit, skipping loop');
return;
}
const sorted = [];
submissions.forEach(post => {
Expand Down Expand Up @@ -160,7 +190,7 @@ async function update() {
webmSave: undefined
});
} else {
if (!PROD) log(`Ignoring gif; ignored domain: ${ignoredDomain} (${domain}); ignored sureddit: ${ignoredSubreddit} (${subreddit})`);
logger.debug(`Ignoring gif; ignored domain: ${ignoredDomain} (${domain}); ignored sureddit: ${ignoredSubreddit} (${subreddit})`);
}
}
}
Expand Down Expand Up @@ -188,7 +218,7 @@ async function update() {
const mp4Bigger = post.mp4Save < 0;
const canBeBigger = !post.uploaded && includesPartial(c.mp4CanBeBiggerDomains, post.domain);
if (mp4Bigger && !canBeBigger) {
if (!PROD) log("mp4 is bigger but can't have a better quality!");
logger.debug("mp4 is bigger but can't have a better quality!");
continue;
}
const templates = c.replyTemplates;
Expand Down Expand Up @@ -226,7 +256,7 @@ async function update() {
if (PROD) {
await post.submission.reply(reply);
} else {
log(reply);
logger.debug(reply);
}
} catch (e) {
if (e.toString().includes('403'))
Expand Down Expand Up @@ -255,20 +285,20 @@ async function parsePost(post) {
const domain = post.domain;
const gif = post.gif;
if (!PROD) {
log();
log(`Got post by: ${post.author} in ${post.subreddit}`);
log(`Url: ${post.url}`);
log(`Gif: ${post.gif}`);
log(`mp4: ${post.mp4}`);
log(`Deferred: ${post.deferred}`);
log(`Defer count: ${post.deferCount}`);
log(`Uploading: ${post.uploading}`);
log(`Uploaded: ${post.uploaded}`);
const tempSubmission = post.submission;
post.submission = undefined; // Don't log the huge submission object
log(JSON.stringify(post));
logger.debug(`
Got post by: ${post.author} in ${post.subreddit}
Url: ${post.url}
Gif: ${post.gif}
mp4: ${post.mp4}
Deferred: ${post.deferred}
Defer count: ${post.deferCount}
Uploading: ${post.uploading}
Uploaded: ${post.uploaded}
${JSON.stringify(post)}
`);
post.submission = tempSubmission;
log();
}
let link = post.mp4;
let skipToEnd = false;
Expand All @@ -292,7 +322,7 @@ async function parsePost(post) {
}
if (post.author === '[deleted]') {
post.deferred = false;
if (!PROD) log('Ignoring post since it got deleted');
logger.debug('Ignoring post since it got deleted');
return; // If post got deleted during deferral just ignore it
}

Expand All @@ -305,14 +335,14 @@ async function parsePost(post) {
if (!gifCheck.success) {
if (gifCheck.statusCodeOk) { // Ignore if not found at all
if (!gifCheck.rightType) {
if (!PROD) log(`Not a gif link: ${post.url}`);
logger.debug(`Not a gif link: ${post.url}`);
} else if (gifCheck.size === -1) {
prepareAndUploadPost(post); // Size unknown; it's an unknown hoster anyways since others send a content-length
} else {
if (!PROD) log(`Gif too small (and not deferred), skipping (size: ${gifCheck.size})`);
logger.debug(`Gif too small (and not deferred), skipping (size: ${gifCheck.size})`);
}
} else {
if (!PROD) log(`Not a working link, got status code ${gifCheck.statusCode}: ${post.url}`);
logger.warn(`Not a working link, got status code ${gifCheck.statusCode}: ${post.url} ${post.url !== post.gif ? post.gif : ""}`);
}
return;
}
Expand All @@ -337,7 +367,7 @@ async function parsePost(post) {
}

if (!link) {
if (!PROD) log(`No link gotten for ${gif}`);
logger.debug(`No link gotten for ${gif}`);
return;
}

Expand Down Expand Up @@ -452,8 +482,8 @@ async function checkUrl(url, filetype, checksize) {
result.success = result.rightType && result.aboveSizeThreshold;
}
if (!PROD) {
log(`Checked ${url}`);
log(JSON.stringify(result));
logger.debug(`Checked ${url}
${JSON.stringify(result)}`);
}
} catch (e) {
c.stats.onLoopError(e);
Expand All @@ -464,9 +494,9 @@ async function checkUrl(url, filetype, checksize) {
async function uploadPost(post) {
if (!PROD) {
const time = 5000 + Math.random() * 40000;
log(`Waiting with fake upload for ${time}ms`);
logger.debug(`Waiting with fake upload for ${time}ms`);
await delay(time);
log(`Not uploading ${post.url}`);
logger.debug(`Not uploading ${post.url}`);
post.mp4 = 'https://gfycat.com/UncomfortablePleasedAnemoneshrimp';
post.uploading = false;
post.uploaded = true;
Expand Down Expand Up @@ -544,9 +574,10 @@ function getReadableFileSize(bytes) {

function calculateSaves(post) {
post.mp4Save = (post.gifSize - post.mp4Size) / post.gifSize * 100;
if (post.webmSize)
if (post.webmSize) {
post.webmSave = (post.gifSize - post.webmSize) / post.gifSize * 100;
if (!PROD) log(`Link stats: mp4 size: ${post.mp4Size} (webm: ${post.webmSize}); mp4save ${post.mp4Save}% (webmsave: ${post.webmSave}%)`);
}
logger.debug(`Link stats: mp4 size: ${post.mp4Size} (webm: ${post.webmSize}); mp4save ${post.mp4Save}% (webmsave: ${post.webmSave}%)`);
}

// Since Numer.toFixed doesn't worlk like you would expect it to...
Expand Down
13 changes: 5 additions & 8 deletions src/utils/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ const fs = require('fs');
const pkgReader = require('./package-reader');
const Stats = require('./stats');
const LinkCache = require('./link-cache');
const log = require('./log');

class Config {

Expand All @@ -16,7 +15,6 @@ class Config {
this.statsPath = path.join(__dirname, '..', 'json', 'stats.json');
this.secretPath = path.join(__dirname, '..', '..', '.secret');
this.prod = process.env.PROD || false;
log(`Production: ${this.PROD}`);
this.load();
setInterval(this.checkForUpdates.bind(this), 1000 * 60);
}
Expand All @@ -28,13 +26,13 @@ class Config {

save() {
fs.writeFile(this.configPath, JSON.stringify(this.config, null, 2), (e) => {
if (e) log(`[!]-- Error saving config: ${e.toString()}`);
if (e) logger.error('Error saving config', e);
});
}

checkForUpdates() {
if (fs.existsSync(this.newConfigPath)) {
log('New config detected, reloading.');
logger.infolog('New config detected, reloading.');
try {
this.config = JSON.parse(fs.readFileSync(this.newConfigPath, 'utf8'));
this.save();
Expand All @@ -45,8 +43,7 @@ class Config {
this.cache; // Reinit link cache in case the sizes changed
}
} catch (e) {
log('An error occurred while loading the new config');
log(e);
logger.error('An error occurred while loading the new config', e);
}
}
}
Expand All @@ -61,7 +58,7 @@ class Config {

get stats() {
if (!this.sts) {
this.sts = new Stats(this.statsPath, this.PROD);
this.sts = new Stats(this.statsPath);
}
return this.sts;
}
Expand Down Expand Up @@ -132,7 +129,7 @@ class Config {


get userAgent() {
return `bot:anti-gif-bot:${this.botVersion}`;
return `bot:anti-gif-bot:${this.botVersion}:/u/MrWasdennnoch`;
}

get botVersion() {
Expand Down
4 changes: 2 additions & 2 deletions src/utils/link-cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ class LinkCache {
Object.assign(json, JSON.parse(fs.readFileSync(this.path, {encoding: 'utf8'}) || '{}'));
this.imageCache = json.imageCache;
if (json.version < this.version) {
console.log(`[LinkCache] Version difference detected (cache: ${json.version}, current ${this.version}), upgrading cache...`);
logger.info(`[LinkCache] Version difference detected (cache: ${json.version}, current ${this.version}), upgrading cache...`);
this.imageCache.forEach((item, index, arr) => {
if (item.mp4Size === null) {
arr.splice(index, 1);
}
});
this.save();
console.log('Upgrade done.');
logger.info('Upgrade done.');
}
}

Expand Down
Loading

0 comments on commit 7b6ea1f

Please sign in to comment.