Skip to content

Commit

Permalink
resizeImage.js tests (#674)
Browse files Browse the repository at this point in the history
* starter tests for resizeimage

* lint

* tests!!!!

* resolving merge conflicts

* test setting env var

* source bash_env

* changing ipfsPort to 5001 in default-config

* reverting config.yml changes and updating storagepath

* updating env vars for tests

* test fixes

* lint

* moving ipfsPort env var to elif block

* adding tests to cover checking fs for contents

* changing test image dir

* hardcoding values in tests

* more descriptive error msgs

* adding echo statements to run-tests.sh

* updating env vars in config.yml

* removing echo and pipe and source

* refactoring tests

* assert.ok() on error messages

* fix test

* reabse

* Revert "updating docker exec command with updated cn db name"

This reverts commit ceb2e06.

* Revert "Revert "updating docker exec command with updated cn db name""

This reverts commit 26cfc51.

* removing echos

* updating storagePath prefix logic

Co-authored-by: Dheeraj Manjunath <dheerajmanju1@gmail.com>
  • Loading branch information
vicky-g and dmanjunath committed Aug 4, 2020
1 parent ec938de commit 88a4e99
Show file tree
Hide file tree
Showing 11 changed files with 351 additions and 11 deletions.
3 changes: 0 additions & 3 deletions creator-node/scripts/run-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@ export logLevel='info'
# So, if tests are run locally, run docker exec command. Else, run the psql command in the job.
if [ -z "${isCIBuild}" ]; then
docker exec -i cn1_creator-node-db_1 /bin/sh -c "psql -U postgres -tc \"SELECT 1 FROM pg_database WHERE datname = 'audius_creator_node_test'\" | grep -q 1 || psql -U postgres -c \"CREATE DATABASE audius_creator_node_test\""
elif [ -x "$(command -v psql)" ]; then
# taken from https://stackoverflow.com/a/36591842
psql -U postgres -h localhost -p $PG_PORT -tc "SELECT 1 FROM pg_database WHERE datname = 'audius_creator_node_test'" | grep -q 1 || psql -U postgres -h localhost -p $PG_PORT -c "CREATE DATABASE audius_creator_node_test"
fi

mkdir -p $storagePath
Expand Down
15 changes: 7 additions & 8 deletions creator-node/src/resizeImage.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
const Jimp = require('jimp')
const ExifParser = require('exif-parser')
const { logger: genericLogger } = require('./logging')
const config = require('./config')
const { ipfs } = require('./ipfsClient')
const fs = require('fs')
const path = require('path')
const { promisify } = require('util')
const ipfsClient = require('ipfs-http-client')
const writeFile = promisify(fs.writeFile)
const mkdir = promisify(fs.mkdir)

Expand Down Expand Up @@ -112,15 +111,15 @@ module.exports = async (job) => {
square,
logContext
} = job.data
const ipfs = ipfsClient(
config.get('ipfsHost'),
config.get('ipfsPort')
)

const logger = genericLogger.child(logContext)

// Read the image once, clone it later on
let img = await Jimp.read(file)
let img
try {
img = await Jimp.read(file)
} catch (e) {
throw new Error(`Could not generate image buffer during image resize: ${e}`)
}

// Resize all the images
const resizes = await Promise.all(
Expand Down
344 changes: 344 additions & 0 deletions creator-node/test/resizeImage.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,344 @@
const { ipfs } = require('../src/ipfsClient')
const resizeImageJob = require('../src/resizeImage')
const config = require('../src/config')

const fs = require('fs')
const path = require('path')
const sinon = require('sinon')
const assert = require('assert')

// Image buffer for audiusDj.png test image
const imageTestDir = 'resizeImageAssets'
const imageBuffer = fs.readFileSync(path.join(__dirname, imageTestDir, 'audiusDj.png'))

// Will need a '.' in front of storagePath to look at current dir
// a '/' will search the root dir
let storagePath = config.get('storagePath')
if (storagePath.startsWith('/')) {
storagePath = '.' + storagePath
}

// CIDs for audiusDj.png
const DIR_CID_SQUARE = 'QmNfiyESzN4rNQikeHUiF4HBfAEKF38DTo1JtiDMukqwE9'
const CID_1000 = 'QmZg29dJohTJdNodaiLrKcdTBhRhnbHcCijt3i88juyKzh'
const CID_480 = 'QmcThUoKmADpRZmQCNa8W88tcBCHjhSpU8qCWRETks7bAR'
const CID_150 = 'QmSFGj6Hos2RPjGnogeZ1AgNa8tAsdLFYg4tfzMxyLi4Mh'
const DIR_CID_NOT_SQUARE = 'QmNWhyJ7UrWUjpnFhxVSefAsr2etbuYYLfAb3TSC4DujjY'
const CID_640 = 'QmQmsktPHnTvneXpYYLCMmbE8xVp7wRGvmw1nX4zX2dS3v'
const CID_2000 = 'QmdjTLFVyGyzG3pVRsQLSGQq9bK2T2bdth65wrW8xMQZDg'
const CID_ORIGINAL = 'QmWAMpnZo2TC45mnENxXsPPdCDk5osDKEt7vY1FEU3x28L'

describe('test resizeImage', () => {
afterEach(function () {
sinon.restore()
})
/**
* Given: the param image buffer is bad
* When: Jimp reads a bad image buffer
* Then: an error is thrown
*/
it('should throw error if Jimp reads a bad image buffer', async () => {
const imageBuffer = 123
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath,
sizes: {
'150x150.jpg': 150,
'480x480.jpg': 480,
'1000x1000.jpg': 1000
},
square: true,
logContext: {}
}
}

try {
await resizeImageJob(job)
assert.fail('Should not have passed if Jimp reads bad image buffer')
} catch (e) {
console.error(e)
assert.ok(e.message.includes('Could not generate image buffer during image resize'))
}
})

/**
* Given: we are adding the successfully resized images to ipfs
* When: adding to ipfs fails
* Then: an error is thrown
*/
it('should throw error if ipfs is down', async () => {
sinon.stub(ipfs, 'add').throws(new Error('ipfs is down!'))
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath,
sizes: {
'150x150.jpg': 150,
'480x480.jpg': 480,
'1000x1000.jpg': 1000
},
square: true,
logContext: {}
}
}

try {
await resizeImageJob(job)
assert.fail('Should not have passed if ipfs is down')
} catch (e) {
console.error(e)
assert.deepStrictEqual(e.message, 'ipfs is down!')
}
})

/**
* Given: we are creating a directory at the destination path
* When: a bad path is passed in
* Then: an error is thrown
*/
it('should throw error if making a directory with new dest path fails', async () => {
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath: 'some/storage/path',
sizes: {
'150x150.jpg': 150,
'480x480.jpg': 480,
'1000x1000.jpg': 1000
},
square: true,
logContext: {}
}
}

try {
await resizeImageJob(job)
assert.fail('Should not have passed if making new directory at path fails')
} catch (e) {
console.error(e)
assert.ok(e.message)
}
})

/**
* Given: we have successfully resized the images (square)
* When: the images are added to the filesystem
* Then: the images should:
* - be added in the proper file system path
* - correctly resized (150x150, 480x480, 1000x1000, original)
* - is not corrupted
*/
it('should pass with proper contents added to filesystem (square)', async () => {
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath,
sizes: {
'150x150.jpg': 150,
'480x480.jpg': 480,
'1000x1000.jpg': 1000
},
square: true,
logContext: {}
}
}

try {
await resizeImageJob(job)
} catch (e) {
console.error(e)
assert.fail(e)
}

// Check fs contains the dir for square cids
const dirPath = path.join(storagePath, DIR_CID_SQUARE)
assert.ok(fs.existsSync(dirPath))

const dirContentCIDs = new Set([CID_150, CID_480, CID_1000, CID_ORIGINAL])

// Iterate through fs files
fs.readdir(dirPath, (err, files) => {
if (err) assert.fail(`Could not read directory at ${dirPath}`)

// Check that 4 files (tentatively 150x150, 480x480, 1000x1000, original) are present
assert.deepStrictEqual(files.length, 4)

files.map(file => {
// Check that (150x150, 480x480, 1000x1000, original) files exist
assert.ok(dirContentCIDs.has(file))

// Check (150x150, 480x480, 1000x1000, original) file contents are proper
// by comparing the buffers
const fsBuf = fs.readFileSync(path.join(dirPath, file))
const expectedBuf = fs.readFileSync(path.join(__dirname, imageTestDir, DIR_CID_SQUARE, file))
// If comparison does not return 0, buffers are not the same
assert.deepStrictEqual(fsBuf.compare(expectedBuf), 0)

// Remove from set to test that only unique files are added
dirContentCIDs.delete(file)
})
})
})

/**
* Given: we have successfully created the resized images to add to ipfs
* When: we add the resized images to ipfs
* Then: we ensure that what is added to fs is the same as what is added to ipfs
*/
it('should be properly added to ipfs (square)', async () => {
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath,
sizes: {
'150x150.jpg': 150,
'480x480.jpg': 480,
'1000x1000.jpg': 1000
},
square: true,
logContext: {}
}
}

// let resizeImageResp
try {
await resizeImageJob(job)
} catch (e) {
console.error(e)
assert.fail(e)
}

// check what is in file_storage matches what is in ipfs
let ipfsDirContents
try {
ipfsDirContents = await ipfs.ls(DIR_CID_SQUARE)
} catch (e) {
console.error(e)
assert.fail('Directory not found in ipfs.')
}

// Ensure that there are the same number of files uploaded to ipfs and to disk
assert.ok(ipfsDirContents.length === 4)

// If hash found in ipfs is not found in file_storage, fail
ipfsDirContents.map(ipfsFile => {
const fsPathForIpfsFile = path.join(storagePath, DIR_CID_SQUARE, ipfsFile.hash)
if (!fs.existsSync(fsPathForIpfsFile)) {
assert.fail(`File in ipfs not found in file_storage for size ${ipfsFile.name}`)
}
})
})

/**
* Given: we have successfully resized the images (not square)
* When: the images are added to the filesystem
* Then: the images should:
* - be added in the proper file system path
* - correctly resized (640x, 2400x, original)
* - is not corrupted
*/
it('should pass with proper contents added to filesystem (not square)', async () => {
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath,
sizes: {
'640x.jpg': 640,
'2000x.jpg': 2000
},
square: false,
logContext: {}
}
}

try {
await resizeImageJob(job)
} catch (e) {
console.error(e)
assert.fail(e)
}

// Check fs contains the dir for square cids
const dirPath = path.join(storagePath, DIR_CID_NOT_SQUARE)
assert.ok(fs.existsSync(dirPath))

const dirContentCIDs = new Set([CID_640, CID_2000, CID_ORIGINAL])

// Iterate through fs files
fs.readdir(dirPath, (err, files) => {
if (err) assert.fail(`Could not read directory at ${dirPath}`)

// Check that 3 files (tentatively 640x, 2000x, original) are present
assert.deepStrictEqual(files.length, 3)

files.map(file => {
// Check that (640x, 2000x, original) files exist
assert.ok(dirContentCIDs.has(file))

// Check (640x, 2000x, original) file contents are proper by comparing the buffers
const fsBuf = fs.readFileSync(path.join(dirPath, file))
const expectedBuf = fs.readFileSync(path.join(__dirname, imageTestDir, DIR_CID_NOT_SQUARE, file))
// If comparison does not return 0, buffers are not the same
assert.deepStrictEqual(expectedBuf.compare(fsBuf), 0)

// Remove from set to test that only unique files are added
dirContentCIDs.delete(file)
})
})
})

/**
* Given: we have successfully created the resized images to add to ipfs
* When: we add the resized images to ipfs
* Then: we ensure that what is added to fs is the same as what is added to ipfs
*/
it('should pass with happy path (not square)', async () => {
const job = {
data: {
file: imageBuffer,
fileName: 'audiusDj',
storagePath,
sizes: {
'640x.jpg': 640,
'2000x.jpg': 2000
},
square: false,
logContext: {}
}
}

try {
await resizeImageJob(job)
} catch (e) {
console.error(e)
assert.fail(e)
}

// check what is in file_storage matches what is in ipfs
let ipfsDirContents
try {
ipfsDirContents = await ipfs.ls(DIR_CID_NOT_SQUARE)
} catch (e) {
console.error(e)
assert.fail('Directory not found in ipfs.')
}

// Ensure that there are the same number of files uploaded to ipfs and to disk
assert.ok(ipfsDirContents.length === 3)

// If hash found in ipfs is not found in file_storage, fail
ipfsDirContents.map(ipfsFile => {
const fsPathForIpfsFile = path.join(storagePath, DIR_CID_NOT_SQUARE, ipfsFile.hash)
if (!fs.existsSync(fsPathForIpfsFile)) {
assert.fail(`File in ipfs not found in file_storage for size ${ipfsFile.name}`)
}
})
})
})
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added creator-node/test/resizeImageAssets/audiusDj.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 88a4e99

Please sign in to comment.