Skip to content

Commit

Permalink
Merge pull request #322 from XiaoningLiu/memoryOpti
Browse files Browse the repository at this point in the history
Azure Storage Node.js Client Library v2.2.1
  • Loading branch information
vinjiang committed Jul 4, 2017
2 parents 341f76a + ed7f6e2 commit 7b6a50f
Show file tree
Hide file tree
Showing 13 changed files with 138 additions and 13 deletions.
6 changes: 6 additions & 0 deletions ChangeLog.md
@@ -1,6 +1,12 @@
Note: This is an Azure Storage only package. The all up Azure node sdk still has the old storage bits in there. In a future release, those storage bits will be removed and an npm dependency to this storage node sdk will
be taken. This is a GA release and the changes described below indicate the changes from the Azure node SDK 0.9.8 available here - https://github.com/Azure/azure-sdk-for-node.

2017.07 Version 2.2.1

BLOB

* Optimized memory usage especially for uploading blobs with large block size.

2017.06 Version 2.2.0

ALL
Expand Down
4 changes: 4 additions & 0 deletions browser/ChangeLog.md
@@ -1,5 +1,9 @@
Note: This is the change log file for Azure Storage JavaScript Client Library.

2017.07 Version 0.2.2-preview.6

* Generated browser compatible JavaScript files based on Microsoft Azure Storage SDK for Node.js 2.2.1.

2017.06 Version 0.2.2-preview.5

* Generated browser compatible JavaScript files based on Microsoft Azure Storage SDK for Node.js 2.2.0.
Expand Down
1 change: 1 addition & 0 deletions lib/common/common.core.js
Expand Up @@ -54,6 +54,7 @@ exports.ChunkAllocator = require('./streams/chunkallocator');
exports.ChunkStream = require('./streams/chunkstream');
exports.ChunkStreamWithStream = require('./streams/chunkstreamwithstream');
exports.SpeedSummary = require('./streams/speedsummary');
exports.BufferStream = require('./streams/bufferstream');

// Utilities
exports.Constants = require('./util/constants');
Expand Down
9 changes: 8 additions & 1 deletion lib/common/services/storageserviceclient.js
Expand Up @@ -32,6 +32,7 @@ var azureutil = require('../util/util');
var validate = require('../util/validate');
var SR = require('../util/sr');
var WebResource = require('../http/webresource');
var BufferStream = require('../streams/bufferstream.js');

var ServiceSettings = require('./servicesettings');
var StorageServiceSettings = require('./storageservicesettings');
Expand Down Expand Up @@ -191,7 +192,13 @@ StorageServiceClient.prototype.setHost = function (host) {
* @param {function} callback The response callback function.
*/
StorageServiceClient.prototype.performRequest = function (webResource, outputData, options, callback) {
this._performRequest(webResource, { outputData: outputData }, options, callback);
if (!azureutil.isBrowser() && Buffer.isBuffer(outputData)) {
// Request module will take 200MB additional memory when we pass a 100MB buffer as body
// Transfer buffer to stream will highly reduce the memory usage by request module
this._performRequest(webResource, { outputData: new BufferStream(outputData) }, options, callback);
} else {
this._performRequest(webResource, { outputData: outputData }, options, callback);
}
};

/**
Expand Down
20 changes: 12 additions & 8 deletions lib/common/streams/batchoperation.js
Expand Up @@ -27,6 +27,8 @@ var errors = require('../errors/errors');
var ArgumentError = errors.ArgumentError;

var DEFAULT_OPERATION_MEMORY_USAGE = Constants.BlobConstants.DEFAULT_WRITE_BLOCK_SIZE_IN_BYTES;
var DEFAULT_CRITICAL_MEMORY_LIMITATION_32_IN_BYTES = Constants.BlobConstants.DEFAULT_CRITICAL_MEMORY_LIMITATION_32_IN_BYTES;
var DEFAULT_CRITICAL_MEMORY_LIMITATION_BROWSER_IN_BYTES = Constants.BlobConstants.DEFAULT_CRITICAL_MEMORY_LIMITATION_BROWSER_IN_BYTES;
var DEFAULT_GLOBAL_CONCURRENCY = 5; //Default http connection limitation for nodejs

var SystemTotalMemory = os.totalmem();
Expand Down Expand Up @@ -112,25 +114,27 @@ BatchOperation.prototype.IsWorkloadHeavy = function() {
if(this.enableReuseSocket && !this.callInOrder) {
sharedRequest = 2;
}
return this._activeOperation >= sharedRequest * this.concurrency ||
this._isLowMemory() ||
(this._activeOperation >= this.concurrency && this._getApproximateMemoryUsage() > 0.5 * SystemTotalMemory);
return this._activeOperation >= sharedRequest * this.concurrency ||this._isLowMemory();
};

/**
* get the approximate memory usage for batch operation
* Get the approximate memory usage for batch operation.
*/
BatchOperation.prototype._getApproximateMemoryUsage = function() {
var currentUsage = process.memoryUsage().rss;
var currentUsage = azureutil.isBrowser() ? 0 : process.memoryUsage().rss; // Currently, we cannot get memory usage in browsers
var futureUsage = this._queuedOperation * this.operationMemoryUsage;
return currentUsage + futureUsage;
};

/**
* get the approximate free memory
* Return whether in a low memory situation.
*/
BatchOperation.prototype._isLowMemory = function() {
return os.freemem() < CriticalFreeMemory;
BatchOperation.prototype._isLowMemory = function() {
var approximateMemoryUsage = this._getApproximateMemoryUsage();
return os.freemem() < CriticalFreeMemory ||
(this._activeOperation >= this.concurrency && approximateMemoryUsage > 0.5 * SystemTotalMemory) ||
(azureutil.is32() && approximateMemoryUsage > DEFAULT_CRITICAL_MEMORY_LIMITATION_32_IN_BYTES) ||
(azureutil.isBrowser() && approximateMemoryUsage > DEFAULT_CRITICAL_MEMORY_LIMITATION_BROWSER_IN_BYTES);
};

/**
Expand Down
50 changes: 50 additions & 0 deletions lib/common/streams/bufferstream.js
@@ -0,0 +1,50 @@
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//

var stream = require('stream');
var util = require('util');

function BufferStream(buffer, options) {
stream.Readable.call(this, options);

this._buffer = buffer;
this._offset = 0;
this._chunkSize = 4 * 1024 * 1024;
this._bufferSize = buffer.length;
}

util.inherits(BufferStream, stream.Readable);

BufferStream.prototype._read = function () {
while (this.push(this._readNextChunk())) {
continue;
}
};

BufferStream.prototype._readNextChunk = function () {
var data = null;

if (this._offset < this._bufferSize) {
var end = this._offset + this._chunkSize;
end = end > this._bufferSize ? this._bufferSize : end;
data = this._buffer.slice(this._offset, end);
this._offset = end;
}

return data;
};

module.exports = BufferStream;
8 changes: 8 additions & 0 deletions lib/common/streams/chunkallocator.js
Expand Up @@ -117,4 +117,12 @@ ChunkAllocator.prototype.releaseBuffer = function(buffer) {
}
};

/**
* Destroy ChunkAllocator.
*/
ChunkAllocator.prototype.destroy = function() {
this._pool = [];
this._inuse = 0;
};

module.exports = ChunkAllocator;
12 changes: 11 additions & 1 deletion lib/common/streams/chunkstream.js
Expand Up @@ -115,6 +115,11 @@ ChunkStream.prototype.error = function () {

ChunkStream.prototype.destroy = function () {
this.writable = this.readable = false;

if (this._allocator && this._allocator.destroy) {
this._allocator.destroy();
}

this.emit('close');
};

Expand Down Expand Up @@ -155,7 +160,8 @@ ChunkStream.prototype._buildChunk = function (data) {
this._copyToInternalBuffer(data, dataOffset, data.length);
return;
} else if (targetSize == this._highWaterMark){
if(this._internalBufferSize === 0 && data.length === this._highWaterMark) {
var canReleaseInnerStreamBuffer = this._stream && this._stream._allocator && this._stream._allocator.releaseBuffer;
if(this._internalBufferSize === 0 && data.length === this._highWaterMark && !canReleaseInnerStreamBuffer) {
// set the buffer to the data passed in to avoid creating a new buffer
buffer = data;
} else {
Expand Down Expand Up @@ -223,6 +229,10 @@ ChunkStream.prototype._copyToInternalBuffer = function(data, start, end) {
var copied = data.copy(this._buffer, this._internalBufferSize, start, end);
this._internalBufferSize += copied;

if (this._stream && this._stream._allocator && this._stream._allocator.releaseBuffer) {
this._stream._allocator.releaseBuffer(data);
}

if(copied != (end - start)) {
throw new Error('Can not copy entire data to buffer');
}
Expand Down
7 changes: 7 additions & 0 deletions lib/common/streams/filereadstream.js
Expand Up @@ -91,6 +91,13 @@ FileReadStream.prototype.on = function(event, listener) {
return EventEmitter.prototype.on.call(this, event, listener);
};

/**
* Set memory allocator
*/
FileReadStream.prototype.setMemoryAllocator = function(allocator) {
this._allocator = allocator;
};

/**
* Get buffer
*/
Expand Down
16 changes: 16 additions & 0 deletions lib/common/util/constants.js
Expand Up @@ -518,6 +518,22 @@ var Constants = {
*/
DEFAULT_WRITE_BLOCK_SIZE_IN_BYTES: 4 * 1024 * 1024,

/**
* The default critical memory limitation in 32bit Node.js environment, in bytes.
*
* @const
* @type {int}
*/
DEFAULT_CRITICAL_MEMORY_LIMITATION_32_IN_BYTES: 800 * 1024 * 1024,

/**
* The default critical memory limitation in browser environment, in bytes.
*
* @const
* @type {int}
*/
DEFAULT_CRITICAL_MEMORY_LIMITATION_BROWSER_IN_BYTES: 1 * 1024 * 1024 * 1024,

/**
* The maximum size of a single block of block blob.
*
Expand Down
9 changes: 9 additions & 0 deletions lib/common/util/util.js
Expand Up @@ -60,6 +60,15 @@ exports.isBrowser = function () {
return typeof window !== 'undefined';
};

/**
* Checks if in a 32bit Node.js environment.
*
* @return {bool} True if in a 32bit Node.js environment, false otherwise.
*/
exports.is32 = function () {
return !exports.isBrowser() && process.arch === 'ia32';
};

/**
* Checks if a value is null or undefined.
*
Expand Down
7 changes: 5 additions & 2 deletions lib/services/blob/blobservice.core.js
Expand Up @@ -4767,19 +4767,22 @@ BlobService.prototype._uploadContentFromChunkStream = function (container, blob,

// initialize chunk allocator
var allocator = new ChunkAllocator(sizeLimitation, parallelOperationThreadCount, { logger: this.logger });
chunkStream.setMemoryAllocator(allocator);
chunkStream.setOutputLength(streamLength);

// if this is a FileReadStream, set the allocator on that stream
if (chunkStream._stream && chunkStream._stream.setMemoryAllocator) {
chunkStream._stream.setMemoryAllocator(allocator);
var fileReadStreamAllocator = new ChunkAllocator(chunkStream._stream._highWaterMark, parallelOperationThreadCount, { logger: this.logger });
chunkStream._stream.setMemoryAllocator(fileReadStreamAllocator);
}

// initialize batch operations
var batchOperations = new BatchOperation(apiName, {
callInOrder: isAppendBlobUpload,
callbackInOrder: isAppendBlobUpload,
logger: this.logger,
enableReuseSocket: this.defaultEnableReuseSocket
enableReuseSocket: this.defaultEnableReuseSocket,
operationMemoryUsage: sizeLimitation
});
batchOperations.setConcurrency(parallelOperationThreadCount);

Expand Down
2 changes: 1 addition & 1 deletion package.json
@@ -1,7 +1,7 @@
{
"name": "azure-storage",
"author": "Microsoft Corporation",
"version": "2.2.0",
"version": "2.2.1",
"description": "Microsoft Azure Storage Client Library for Node.js",
"typings": "typings/azure-storage/azure-storage.d.ts",
"tags": [
Expand Down

0 comments on commit 7b6a50f

Please sign in to comment.