In this guide substitute projectname
, dbname
, tablename
, and colnameN
for your own values.
- New repo on GitHub, choose .gitignore: Node, a license, and clone it to your machine
- sh:
cd projectname && atom .
- sh:
express --git --view ejs
- sh:
npm i && nodemon
- All good if
http://localhost:3000
loads - Ensure .gitignore has:
node_modules/
.DS_Store
.env
yarn.lock
package-lock.json
- Create a
.eslintrc
in project root if you wish. If so, also run sh:npm i --save-dev eslint
- git add, commit, push
- Ensure you have knex and postgreSQL installed globally on your machine. Test by typing
knex
andpsql
respectively, into your shell. - sh:
createdb dbname-dev
to create the dev database in psql - sh:
npm i --save knex pg
to install knex + postgres modules for your project - Create a
knex.js
in your project root with the following:
// Require knex + detect environment
const environment = process.env.NODE_ENV || 'development'
const knexConfig = require('./knexfile')[environment]
const knex = require('knex')(knexConfig)
module.exports = knex
- Create a
knexfile.js
in your project root with the following:
// Define DB connections for different environments
module.exports = {
development: {
client: 'pg',
connection: 'postgres://localhost/dbname-dev'
},
test: {},
production: {
client: 'pg',
connection: process.env.DATABASE_URL
}
}
- git add, commit, push
- sh:
knex migrate:make tablename
to make migration file for a db table - Find the migration file it just created and make it look like:
exports.up = function(knex, Promise) {
return knex.schema.createTable('tablename', function(table) {
// TABLE COLUMN DEFINITIONS HERE
table.increments()
table.string('colname1', 255).notNullable().defaultTo('')
table.string('colname2', 255).notNullable().defaultTo('')
table.string('colname3', 255).notNullable().defaultTo('')
table.timestamps(true, true)
// OR
// table.dateTime('created_at').notNullable().defaultTo(knex.raw('now()'))
// table.dateTime('updated_at').notNullable().defaultTo(knex.raw('now()'))
})
}
exports.down = function(knex, Promise) {
return knex.schema.dropTableIfExists('tablename')
}
- Repeat the last 2 steps for each table in your ERD
- sh:
knex migrate:latest
to run all migrations, creating the tables in your psql db - If
knex migrate:latest
messed something up you can alwaysknex migrate:rollback
- sh:
psql dbname-dev
to verify it created your tables correctly - In psql:
\d tablename
- git add, commit, push
- sh:
knex seed:make 001_tablename
- Edit the seed file it created for your table to look similar to this:
exports.seed = function(knex, Promise) {
// Deletes ALL existing entries
return knex('tablename').del()
.then(function() {
// Inserts seed entries
return knex('tablename').insert([
{id: 1, colname1: '', colname2: '', colname3: ''},
{id: 2, colname1: '', colname2: '', colname3: ''},
{id: 3, colname1: '', colname2: '', colname3: ''}
])
.then(function() {
// Moves id column (PK) auto-incrementer to correct value after inserts
return knex.raw("SELECT setval('tablename_id_seq', (SELECT MAX(id) FROM tablename))")
})
})
}
- Repeat the last 2 steps for each table in your ERD. Give each seed file a unique 3 digit prefix so you can control the order in which seeding occurs.
- sh:
knex seed:run
to insert all seed data into your database - sh:
psql dbname-dev
to verify it seeded correctly - In psql:
SELECT * FROM tablename
- You can always change your seed files and rerun:
knex seed:run
- git add, commit, push
- In your project root under
routes/
, create a new filetablename.js
- Setup your basic routes for that file:
const express = require('express')
const router = express.Router()
const knex = require('../knex')
// READ ALL records for this table
router.get('/', (req, res, next) => {
res.send('ALL RECORDS')
})
// READ ONE record for this table
router.get('/:id', (req, res, next) => {
res.send('ONE RECORD')
})
// CREATE ONE record for this table
router.post('/', (req, res, next) => {
res.send('CREATED RECORD')
})
// UPDATE ONE record for this table
router.put('/:id', (req, res, next) => {
res.send('UPDATED RECORD')
})
// DELETE ONE record for this table
router.delete('/:id', (req, res, next) => {
res.send('DELETED RECORD')
})
module.exports = router
- In your
app.js
make to require the route file:
var tablenameRouter = require('./routes/tablename')
and to use it:
app.use('/tablename', tablenameRouter)
- sh:
nodemon
and test our GET/POST/PUT/DELETE routes with HTTPie - If all is well git add, commit, push
- Go back to our route file. Make the GET routes work with Knex:
// READ ALL records for this table
router.get('/', (req, res, next) => {
knex('tablename')
.then((rows) => {
res.json(rows)
})
.catch((err) => {
next(err)
})
})
// READ ONE record for this table
router.get('/:id', (req, res, next) => {
knex('tablename')
.where('id',req.params.id)
.then((rows) => {
res.json(rows)
})
.catch((err) => {
next(err)
})
})
- Make the POST route work with Knex:
// CREATE ONE record for this table
router.post('/', (req, res, next) => {
knex('tablename')
.insert({
"colname1": req.body.colname1,
"colname2": req.body.colname2,
"colname3": req.body.colname3
})
.returning('*')
.then((data) => {
res.json(data[0])
})
.catch((err) => {
next(err)
})
})
- Make the PUT route work with Knex:
// UPDATE ONE record for this table
router.put('/:id', (req, res, next) => {
knex('tablename')
.where('id', req.params.id)
.then((data) => {
knex('tablename')
.where('id', req.params.id)
.limit(1)
.update({
"colname1": req.body.colname1,
"colname2": req.body.colname2,
"colname3": req.body.colname3
})
.returning('*')
.then((data) => {
res.json(data[0])
})
})
.catch((err) => {
next(err)
})
})
- Make the DELETE route work with Knex:
// DELETE ONE record for this table
router.delete('/:id', function(req, res, next) {
knex('tablename')
.where('id', req.params.id)
.first()
.then((row) => {
if(!row) return next()
knex('tablename')
.del()
.where('id', req.params.id)
.then(() => {
res.send(`ID ${req.params.id} Deleted`)
})
})
.catch((err) => {
next(err)
})
})
- These routes do not have any real input validation or defensive coding going on. A real app will need this to prevent shenanigans from users.
- Test these routes with HTTPie and your database tool
psql
- If all is well, git add, commit, push
- Only one view needed:
views/index.ejs
- Put frontend JS in
public/javascripts/scripts.js
- Put CSS in
public/stylesheets/styles.css
- Reference both of these from
views/index.ejs
with script/link tags - In
public/javascripts/scripts.js
add AJAX code to request JSON from our backend routes.
$(document).ready( function () {
$('#btn-go').click((event) => {
$.ajax({
url: '/tablename',
type: 'GET',
success: (data) => {
console.log(data)
// UPDATE DOM!
$('doohickey').append(data)
},
error: function(jqXhr, textStatus, errorThrown) {
console.log('OOPS:', errorThrown)
}
})
})
})
- We write frontend AJAX like this against our backend routes (API) in various ways and update our DOM accordingly.
- Create templates under
views/
to to render HTML, and change allres.json()
/res.send()
calls in routes tores.render()
calls - Duplicate
views/index.ejs
and rename totablename.ejs
- Utilize template tags and
res.render()
to push data to your templates