Skip to content
Permalink
Browse files

fix(sarjana/getDatas): fix main functionality

  • Loading branch information...
108kb committed Apr 20, 2019
1 parent 52ca370 commit c4320cae677a816f56bef80e3199d3e5b1dd162b
Showing with 16 additions and 23 deletions.
  1. +16 −23 sarjana/getDatas.js
@@ -1,3 +1,4 @@
const { CronJob } = require('cron')
const cuid = require('cuid')
const consola = require('consola')
const fetch = require('node-fetch')
@@ -52,36 +53,28 @@ getDataQueue
fetch(`https://stalker.108kb.io/api/getData/${siteData[0].id}?deleteCache=true`, {
headers: { 'x-secret-word': process.env.SECRET_WORD }
})
.then(res => res.json())
.then(() => consola.info(`Finished flushing cache for site_id ${siteData[0].id}`))
.catch(err => consola.error(err))
.then(res => res.json())
.then(() => consola.info(`Finished flushing cache for site_id ${siteData[0].id}`))
.catch(err => consola.error(err))

consola.success(`[job:getData:${job.id}] Done working on job #${job.id}`)
})
.on('failed', messages => consola.error(messages))

async function main () {
let isTodayJobDone = null
const getLatestData = await db('datas').select('created_at').limit(1)

if (getLatestData.length) {
const today = new Date().toDateString()
isTodayJobDone = new Date(getLatestData[0].created_at + '00').toDateString() === today
}

if (isTodayJobDone) {
consola.info('Jobs for today is done!')
return false
}

const urls = await db.from('sites').select('url')
async function startWorker () {
const urls = await db.from('sites').select('url', 'updated_at')

return Promise.all(
urls.map(async data => getDataQueue.add('get-data',
{ url: data.url },
{ repeat: { cron: '00 11 * * *' } }
))
urls
.filter(data => new Date(data.updated_at).toDateString() < new Date().toDateString())
.map(async data => getDataQueue.add('get-data', { url: data.url }))
)
}

main()
const job = new CronJob({
cronTime: '00 04 * * *',
onTick: startWorker,
timeZone: 'Asia/Jakarta'
})

job.start()

0 comments on commit c4320ca

Please sign in to comment.
You can’t perform that action at this time.