diff --git a/.env.example b/.env.example
index 57e5fd5..b8d7b9a 100644
--- a/.env.example
+++ b/.env.example
@@ -1 +1,2 @@
-EXCEL_FILENAME=somefile.xlsx
\ No newline at end of file
+EXCEL_FILENAME=somefile.xlsx
+CSV_FILENAME=somefile.csv
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 689d49e..0025161 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,3 +6,4 @@ node_modules/
*.zip
*.json
*.env
+*.~lock.*
diff --git a/README.md b/README.md
index 8c04aa7..a6e6535 100644
--- a/README.md
+++ b/README.md
@@ -104,7 +104,8 @@ Sub item is in the same line as the unordered main item.
| Variable Name | Description |
| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
- | EXCEL_FILENAME | Excel file name relative to to the `/src/01_recommendations` directory |
+ | EXCEL_FILENAME | Excel file name relative to the `/src/01_recommendations` scripts directory |
+ | CSV_FILENAME | CSV file name relative to the `/src/02_crop_calendar` scripts |
| FIREBASE_SERVICE_ACC | The project's private key file contents, condensed into one line and minus all whitespace characters.
The service account JSON file is generated from the Firebase project's **Project Settings** page, on **Project Settings** -> **Service accounts** -> **Generate new private key** |
| FIREBASE_PRIVATE_KEY | The `private_key` entry from the service account JSON file.
**NOTE:** Take note to make sure that the value starts and ends with a double-quote on WINDOWS OS localhost. Some systems may or may not require the double-quotes (i.e., Ubuntu).
|
@@ -120,9 +121,15 @@ Fix lint errors.
### `npm run process:recommendations`
-Normalize an unconventional, complex excel file into an array of simple JS objects with columns containing messy (ordered and unordered) bullet lists converted into organized HTML list tags.
+Normalize an excel file into an array of simple JS objects with columns containing messy HTML tags bullet lists.
-> **WARNING:** The script does not support `-` (dash) symbols.
+### `npm run process:calendar`
+
+Normalize and upload the new cropping calendar data.
+
+### `npm run process:details`
+
+Normalize and upload the new municipalities and other details attached to the crop recommendations.
@ciatph
20221205
diff --git a/package.json b/package.json
index abf14e7..0326ac2 100644
--- a/package.json
+++ b/package.json
@@ -5,7 +5,9 @@
"main": "index.js",
"scripts": {
"start": "node src/scripts/main.js",
- "process:recommendations": "node src/01_recommendations/index.js",
+ "process:recommendations": "node src/01_recommendations",
+ "process:calendar": "node src/02_crop_calendar",
+ "process:details": "node src/03_recommendations_detail",
"lint": "eslint src",
"lint:fix": "eslint src --fix"
},
diff --git a/src/01_recommendations/index.js b/src/01_recommendations/index.js
index c3872ec..fede17b 100644
--- a/src/01_recommendations/index.js
+++ b/src/01_recommendations/index.js
@@ -5,10 +5,13 @@ const TendayTab = require('./src/classes/tendaytab')
const SpecialTab = require('./src/classes/specialtab')
const { uploadToFirestore } = require('../lib/uploadtofirestore')
const { extractExcelData } = require('./src/extract')
+const { dataToCsv } = require('./src/tocsv')
+// Path: /n_list_crop_recommendations/{type}.data[]
const main = async () => {
const data = []
const query = []
+ const upload = false
// Excel file path
const filePath = path.join(__dirname, process.env.EXCEL_FILENAME)
@@ -26,27 +29,51 @@ const main = async () => {
excelTabs.forEach((item, index) => {
data.push(extractExcelData(item, filePath))
- query.push(uploadToFirestore('n_list_crop_recommendations', item.type, data[index].recommendations))
})
} catch (err) {
console.log(`[ERROR]: ${err.message}`)
process.exit(1)
}
- try {
- // Upload data to Firestore
- let logs = 'Extracted data:\n'
- data.forEach(item => {
- logs += `${item.recommendations.type}: ${item.recommendations.data.length} rows\n`
- })
+ // Write unique crop stages to CSV
+ const uniqueStages = [...data[0].cropstages, ...data[1].cropstages, ...data[2].cropstages]
+ .filter((x, i, a) => a.indexOf(x) === i)
+ .reduce((list, item, index) => {
+ list.push({ id: index + 1, name: item })
+ return list
+ }, [])
- console.log(`${logs}\nUploading data to Firestore...`)
- await Promise.all(query)
- console.log('Data upload success!')
- process.exit(0)
- } catch (err) {
- console.log(`[ERROR]: ${err.message}`)
- process.exit(1)
+ // Write unique farm operations to CSV
+ const uniqueActivities = [...data[0].farmoperations, ...data[1].farmoperations, ...data[2].farmoperations]
+ .filter((x, i, a) => a.indexOf(x) === i)
+ .reduce((list, item, index) => {
+ list.push({ id: index + 1, name: item })
+ return list
+ }, [])
+
+ dataToCsv(uniqueStages, path.join(__dirname, 'crop_stages.csv'))
+ dataToCsv(uniqueActivities, path.join(__dirname, 'farm_operations.csv'))
+
+ if (upload) {
+ try {
+ data.forEach((item, index) => {
+ query.push(uploadToFirestore('n_list_crop_recommendations', item.recommendations.type, item.recommendations))
+ })
+
+ // Upload data to Firestore
+ let logs = 'Extracted data:\n'
+ data.forEach(item => {
+ logs += `${item.recommendations.type}: ${item.recommendations.data.length} rows\n`
+ })
+
+ console.log(`${logs}\nUploading data to Firestore...`)
+ await Promise.all(query)
+ console.log('Data upload success!')
+ process.exit(0)
+ } catch (err) {
+ console.log(`[ERROR]: ${err.message}`)
+ process.exit(1)
+ }
}
}
diff --git a/src/01_recommendations/src/tocsv.js b/src/01_recommendations/src/tocsv.js
new file mode 100644
index 0000000..84c04dc
--- /dev/null
+++ b/src/01_recommendations/src/tocsv.js
@@ -0,0 +1,12 @@
+const { CsvToFireStore } = require('csv-firestore')
+const csvHandler = new CsvToFireStore()
+
+module.exports.dataToCsv = (data, filePath) => {
+ try {
+ csvHandler.write(data, filePath)
+ console.log(`Created ${filePath}\n`)
+ return true
+ } catch (err) {
+ throw new Error(err.message)
+ }
+}
diff --git a/src/02_crop_calendar/constants.js b/src/02_crop_calendar/constants.js
new file mode 100644
index 0000000..1ce24ce
--- /dev/null
+++ b/src/02_crop_calendar/constants.js
@@ -0,0 +1,10 @@
+const CROP_STAGE_LABELS = {
+ 'plant/trans': 'Newly Planted',
+ 'veg/repro': 'Vegetative/Reproductive',
+ mat: 'Maturing',
+ lprep: 'Preparation Stage'
+}
+
+module.exports = {
+ CROP_STAGE_LABELS
+}
diff --git a/src/02_crop_calendar/cropping_calendar.js b/src/02_crop_calendar/cropping_calendar.js
new file mode 100644
index 0000000..5df64e4
--- /dev/null
+++ b/src/02_crop_calendar/cropping_calendar.js
@@ -0,0 +1,159 @@
+const { CsvToFireStore } = require('csv-firestore')
+const { CROP_STAGE_LABELS } = require('./constants')
+
+class CroppingCalendar extends CsvToFireStore {
+ constructor (csvFilePath) {
+ super(csvFilePath)
+
+ /** Province { id, name } */
+ this.provinces = []
+
+ /** Municipality {id, name, province } */
+ this.municipalities = []
+
+ /** Crop { id, name } */
+ this.crops = []
+
+ /** Crop stages from months */
+ this.crop_stages = []
+
+ this.count = 0
+ }
+
+ /**
+ * Check if a value exists in a specified Object[] array
+ * @param {String} param - Array name to check
+ * @param {*} value - Value to find in the Object[] array
+ */
+ itemExists (param, value) {
+ let exists = false
+
+ switch (param) {
+ case 'province':
+ exists = Object.values(this.provinces).map(x => x.name).includes(value)
+ break
+ case 'municipality':
+ exists = Object.values(this.municipalities).map(x => x.unique).includes(value)
+ break
+ case 'crop':
+ exists = Object.values(this.crops).map(x => x.name).includes(value)
+ break
+ case 'crop_stage':
+ exists = Object.values(this.crop_stages).map(x => x.name).includes(value)
+ break
+ default: break
+ }
+
+ return exists
+ }
+
+ /**
+ * Remove whitespace on start and end of string
+ * @param {String} value - String text
+ */
+ removeSpecialChars (value) {
+ if (value === undefined) {
+ return ''
+ }
+
+ return value.trim()
+ }
+
+ /**
+ * Override CsvToFireStore's read() method to parse the crop recommedations CSV file
+ * @param {Object} row - Read row in a CSV file with keys as CSV headers
+ */
+ read (row) {
+ this.count += 1
+ const headers = Object.keys(row)
+ const obj = { id: this.count }
+
+ headers.forEach(item => {
+ const include = item.length > 0
+
+ if (!include) {
+ return
+ }
+
+ let key = item.toLowerCase()
+
+ if (key === 'prov') {
+ key = 'province'
+ } else if (key === 'muni') {
+ key = 'municipality'
+ }
+
+ const value = row[item].trim()
+
+ // Extract unique provinces
+ if (key === 'province' && !this.itemExists('province', value) && value !== '') {
+ this.provinces.push({
+ id: this.provinces.length + 1,
+ name: value
+ })
+ }
+
+ // Extract unique municipalities
+ if (key === 'municipality' && value !== '') {
+ const combo = `${row.prov.trim()}|${value}`
+
+ if (!this.itemExists('municipality', combo)) {
+ this.municipalities.push({
+ id: this.municipalities.length + 1,
+ province: row.prov.trim(),
+ name: value,
+ unique: combo
+ })
+ }
+ }
+
+ // Extract unique crop names
+ if (key === 'crop' && !this.itemExists('crop', value) && value !== '') {
+ this.crops.push({
+ id: this.crops.length + 1,
+ name: value
+ })
+ }
+
+ // Extract unique crop stages
+ if (!['province', 'municipality', 'crop'].includes(key)) {
+ let cleanStage = value
+ cleanStage = cleanStage.substring(0, cleanStage.indexOf('_'))
+
+ if (!this.itemExists('crop_stage', cleanStage) && cleanStage !== '') {
+ this.crop_stages.push({
+ id: this.crop_stages.length + 1,
+ name: cleanStage,
+ label: CROP_STAGE_LABELS[cleanStage]
+ })
+ }
+ }
+
+ if (include && ['province', 'municipality', 'crop'].includes(key)) {
+ obj[key] = value
+ }
+ })
+
+ // Extract and merge crop stages per month
+ const months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
+
+ for (let i = 1; i <= 12; i += 1) {
+ const mdata = []
+ const index = (i < 10) ? `0${i}` : i
+ const m1 = row[`${index}_15_CAL`].slice(0, row[`${index}_15_CAL`].indexOf('_'))
+ const m2 = row[`${index}_30_CAL`].slice(0, row[`${index}_30_CAL`].indexOf('_'))
+
+ const firstHalf = (m1 === '') ? 'none' : m1
+ mdata.push(firstHalf)
+
+ const secondHalf = (m2 === '') ? 'none' : m2
+ mdata.push(secondHalf)
+
+ obj[months[i - 1]] = mdata.toString()
+ }
+
+ this.csv_rows.push(obj)
+ }
+}
+
+module.exports = CroppingCalendar
diff --git a/src/02_crop_calendar/index.js b/src/02_crop_calendar/index.js
new file mode 100644
index 0000000..c2c3b0e
--- /dev/null
+++ b/src/02_crop_calendar/index.js
@@ -0,0 +1,102 @@
+require('dotenv').config()
+const path = require('path')
+const CroppingCalendar = require('./cropping_calendar')
+const { uploadToFirestore } = require('../lib/uploadtofirestore')
+
+// Path: /n_cropping_calendar_merged/{province}.data[]
+const main = async () => {
+ const handler = new CroppingCalendar(path.resolve(__dirname, process.env.CSV_FILENAME))
+ const upload = false
+ const write = true
+
+ // Cropping Calendar-specific tables and firestore collection names
+ const newTables = {
+ provinces: 'n_provinces',
+ municipalities: 'n_municipalities',
+ crops: 'n_crops',
+ crop_stages: 'n_crop_stages'
+ }
+
+ try {
+ console.log('Reading CSV...')
+ await handler.readCSV()
+
+ if (upload) {
+ // Group data by province
+ const data = handler.data().reduce((group, row, index) => {
+ const province = row.province.trim()
+
+ if (group[province] === undefined) {
+ group[province] = []
+ }
+
+ const obj = {}
+ for (const key in row) {
+ if (!['id', 'province'].includes(key)) {
+ obj[key] = row[key].trim()
+ }
+ }
+
+ group[province].push(obj)
+ return { ...group }
+ }, {})
+
+ console.log('\nUploading data to firestore...')
+ const query = []
+
+ // Upload full collections
+ for (const collection in newTables) {
+ query.push(handler.firestoreUpload(
+ newTables[collection],
+ true,
+ (collection === 'municipalities')
+ ? handler[collection].map(x => ({ id: x.id, province: x.province, name: x.name }))
+ : handler[collection]
+ ))
+ }
+
+ // Upload calendar documents
+ let logs = ''
+
+ for (const province in data) {
+ // Logs
+ logs += `${province}: ${data[province].length} items\n`
+
+ // Upload query
+ query.push(uploadToFirestore('n_cropping_calendar_merged', province, { data: data[province] }))
+ }
+
+ console.log(logs)
+ console.log('Uploading data to Firestore...')
+ await Promise.all(query)
+ console.log('Upload success!')
+ }
+
+ if (write) {
+ console.log('\nWriting data to CSV...')
+ handler.write(handler.data(), path.resolve(__dirname, 'data.csv'))
+
+ for (const collection in newTables) {
+ handler.write(
+ (collection === 'municipalities')
+ ? handler[collection].map(x => ({ id: x.id, province: x.province, name: x.name }))
+ : handler[collection],
+ path.resolve(__dirname, `${newTables[collection]}.csv`
+ ))
+
+ console.log(`${collection}: ${handler[collection].length}`)
+ }
+ }
+
+ console.log('\n------------------------------\nProcessing finished. Stats:')
+ console.log(`cropping calendar: ${handler.data().length}`)
+
+ console.log('\n')
+ } catch (err) {
+ console.log(err)
+ }
+}
+
+(async () => {
+ await main()
+})()
diff --git a/src/03_recommendations_detail/constants.js b/src/03_recommendations_detail/constants.js
new file mode 100644
index 0000000..6803f32
--- /dev/null
+++ b/src/03_recommendations_detail/constants.js
@@ -0,0 +1,20 @@
+const NORMAL_COLUMN_NAMES = {
+ Crop: 'crop',
+ 'Crop Stage': 'crop_stage',
+ 'Farm Operation': 'farm_operation',
+ Municipality: 'municipality',
+ Province: 'province'
+}
+
+const EXCEL_COLUMN_NAMES = {
+ CROP: 'Crop',
+ CROP_STAGE: 'Crop Stage',
+ FARM_OPERATION: 'Farm Operation',
+ MUNICIPALITY: 'Municipality',
+ PROVINCE: 'Province'
+}
+
+module.exports = {
+ NORMAL_COLUMN_NAMES,
+ EXCEL_COLUMN_NAMES
+}
diff --git a/src/03_recommendations_detail/index.js b/src/03_recommendations_detail/index.js
new file mode 100644
index 0000000..f7e1a21
--- /dev/null
+++ b/src/03_recommendations_detail/index.js
@@ -0,0 +1,67 @@
+require('dotenv').config()
+const path = require('path')
+const { uploadToFirestore } = require('../lib/uploadtofirestore')
+const XLSXWrapper = require('../lib/xlsxwrapper')
+const { NORMAL_COLUMN_NAMES, EXCEL_COLUMN_NAMES } = require('./constants')
+const { CROP_STAGE_LABELS } = require('../02_crop_calendar/constants')
+
+// Path: /n_recommendations_details/{province}.data[]
+const main = async () => {
+ const filePath = path.join(__dirname, '..', '01_recommendations', process.env.EXCEL_FILENAME)
+
+ // Read the excel file
+ const excel = new XLSXWrapper(filePath)
+
+ // Read sheet data from excel file
+ const excelData = excel.getDataSheet(3)
+
+ // Crop stage codes and full labels
+ const cropStageCodes = Object.keys(CROP_STAGE_LABELS)
+
+ // Group data by province
+ const data = excelData.reduce((group, row, index) => {
+ const province = row[EXCEL_COLUMN_NAMES.PROVINCE].trim()
+
+ if (group[province] === undefined) {
+ group[province] = []
+ }
+
+ const obj = {}
+ for (const key in row) {
+ if (key !== EXCEL_COLUMN_NAMES.PROVINCE) {
+ obj[NORMAL_COLUMN_NAMES[key]] = row[key].trim()
+
+ if (key === EXCEL_COLUMN_NAMES.CROP_STAGE) {
+ obj.stage = cropStageCodes.find(x => CROP_STAGE_LABELS[x] === row[key].trim()) || ''
+ }
+ }
+ }
+
+ group[province].push(obj)
+ return { ...group }
+ }, {})
+
+ // Upload data to Firestore
+ try {
+ const query = []
+ let logs = ''
+
+ for (const province in data) {
+ // Logs
+ logs += `${province}: ${data[province].length} items\n`
+
+ // Upload query
+ query.push(uploadToFirestore('n_recommendations_details', province, { data: data[province] }))
+ }
+
+ console.log(logs)
+ console.log('Uploading data to Firestore...')
+ await Promise.all(query)
+ console.log('Upload success!')
+ } catch (err) {
+ console.log(`[ERROR]: ${err.message}`)
+ process.exit(1)
+ }
+}
+
+main()
diff --git a/src/lib/uploadtofirestore/index.js b/src/lib/uploadtofirestore/index.js
index 482ee4a..216f147 100644
--- a/src/lib/uploadtofirestore/index.js
+++ b/src/lib/uploadtofirestore/index.js
@@ -22,7 +22,6 @@ module.exports.uploadToFirestore = async (collectionName, docName, jsonData) =>
.doc(docName)
.set(jsonData)
- console.log(docRef)
return docRef
} catch (err) {
throw new Error(err.message)