Skip to content

Commit

Permalink
chore(lint): ESLint auto-fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
zcei committed Feb 5, 2018
1 parent 502b734 commit bb2d866
Show file tree
Hide file tree
Showing 66 changed files with 2,331 additions and 2,346 deletions.
222 changes: 111 additions & 111 deletions lib/adapters/http.js
Original file line number Diff line number Diff line change
@@ -1,228 +1,228 @@
'use strict';

var utils = require('./../utils');
var settle = require('./../core/settle');
var buildURL = require('./../helpers/buildURL');
var http = require('http');
var https = require('https');
var httpFollow = require('follow-redirects').http;
var httpsFollow = require('follow-redirects').https;
var url = require('url');
var zlib = require('zlib');
var createError = require('../core/createError');
var enhanceError = require('../core/enhanceError');
var pkginfo = require('pkginfo')(module);

/*eslint consistent-return:0*/
module.exports = function httpAdapter(config) {
return new Promise(function dispatchHttpRequest(resolve, reject) {
var data = config.data;
var headers = config.headers;
var timer;
'use strict'

const utils = require('./../utils')
const settle = require('./../core/settle')
const buildURL = require('./../helpers/buildURL')
const http = require('http')
const https = require('https')
const httpFollow = require('follow-redirects').http
const httpsFollow = require('follow-redirects').https
const url = require('url')
const zlib = require('zlib')
const createError = require('../core/createError')
const enhanceError = require('../core/enhanceError')
const pkginfo = require('pkginfo')(module)

/* eslint consistent-return:0 */
module.exports = function httpAdapter (config) {
return new Promise(function dispatchHttpRequest (resolve, reject) {
let data = config.data
const headers = config.headers
let timer

// Set User-Agent (required by some servers)
// Only set header if it hasn't been set in config
// See https://github.com/axios/axios/issues/69
if (!headers['User-Agent'] && !headers['user-agent']) {
headers['User-Agent'] = 'axios/' + pkginfo.version;
headers['User-Agent'] = 'axios/' + pkginfo.version
}

if (data && !utils.isStream(data)) {
if (Buffer.isBuffer(data)) {
// Nothing to do...
} else if (utils.isArrayBuffer(data)) {
data = new Buffer(new Uint8Array(data));
data = new Buffer(new Uint8Array(data))
} else if (utils.isString(data)) {
data = new Buffer(data, 'utf-8');
data = new Buffer(data, 'utf-8')
} else {
return reject(createError(
'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream',
config
));
))
}

// Add Content-Length header if data exists
headers['Content-Length'] = data.length;
headers['Content-Length'] = data.length
}

// HTTP basic authentication
var auth = undefined;
let auth
if (config.auth) {
var username = config.auth.username || '';
var password = config.auth.password || '';
auth = username + ':' + password;
const username = config.auth.username || ''
const password = config.auth.password || ''
auth = username + ':' + password
}

// Parse url
var parsed = url.parse(config.url);
var protocol = parsed.protocol || 'http:';
const parsed = url.parse(config.url)
const protocol = parsed.protocol || 'http:'

if (!auth && parsed.auth) {
var urlAuth = parsed.auth.split(':');
var urlUsername = urlAuth[0] || '';
var urlPassword = urlAuth[1] || '';
auth = urlUsername + ':' + urlPassword;
const urlAuth = parsed.auth.split(':')
const urlUsername = urlAuth[0] || ''
const urlPassword = urlAuth[1] || ''
auth = urlUsername + ':' + urlPassword
}

if (auth) {
delete headers.Authorization;
delete headers.Authorization
}

var isHttps = protocol === 'https:';
var agent = isHttps ? config.httpsAgent : config.httpAgent;
const isHttps = protocol === 'https:'
const agent = isHttps ? config.httpsAgent : config.httpAgent

var options = {
const options = {
hostname: parsed.hostname,
port: parsed.port,
path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''),
method: config.method,
headers: headers,
agent: agent,
auth: auth
};
}

var proxy = config.proxy;
let proxy = config.proxy
if (!proxy && proxy !== false) {
var proxyEnv = protocol.slice(0, -1) + '_proxy';
var proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()];
const proxyEnv = protocol.slice(0, -1) + '_proxy'
const proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()]
if (proxyUrl) {
var parsedProxyUrl = url.parse(proxyUrl);
const parsedProxyUrl = url.parse(proxyUrl)
proxy = {
host: parsedProxyUrl.hostname,
port: parsedProxyUrl.port
};
}

if (parsedProxyUrl.auth) {
var proxyUrlAuth = parsedProxyUrl.auth.split(':');
const proxyUrlAuth = parsedProxyUrl.auth.split(':')
proxy.auth = {
username: proxyUrlAuth[0],
password: proxyUrlAuth[1]
};
}
}
}
}

if (proxy) {
options.hostname = proxy.host;
options.host = proxy.host;
options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : '');
options.port = proxy.port;
options.path = protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path;
options.hostname = proxy.host
options.host = proxy.host
options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : '')
options.port = proxy.port
options.path = protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path

// Basic proxy authorization
if (proxy.auth) {
var base64 = new Buffer(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64');
options.headers['Proxy-Authorization'] = 'Basic ' + base64;
const base64 = new Buffer(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64')
options.headers['Proxy-Authorization'] = 'Basic ' + base64
}
}

var transport;
let transport
if (config.transport) {
transport = config.transport;
transport = config.transport
} else if (config.maxRedirects === 0) {
transport = isHttps ? https : http;
transport = isHttps ? https : http
} else {
if (config.maxRedirects) {
options.maxRedirects = config.maxRedirects;
options.maxRedirects = config.maxRedirects
}
transport = isHttps ? httpsFollow : httpFollow;
transport = isHttps ? httpsFollow : httpFollow
}

// Create the request
var req = transport.request(options, function handleResponse(res) {
if (req.aborted) return;
var req = transport.request(options, function handleResponse (res) {
if (req.aborted) return

// Response has been received so kill timer that handles request timeout
clearTimeout(timer);
timer = null;
clearTimeout(timer)
timer = null

// uncompress the response body transparently if required
var stream = res;
let stream = res
switch (res.headers['content-encoding']) {
/*eslint default-case:0*/
case 'gzip':
case 'compress':
case 'deflate':
/* eslint default-case:0 */
case 'gzip':
case 'compress':
case 'deflate':
// add the unzipper to the body stream processing pipeline
stream = stream.pipe(zlib.createUnzip());
stream = stream.pipe(zlib.createUnzip())

// remove the content-encoding in order to not confuse downstream operations
delete res.headers['content-encoding'];
break;
// remove the content-encoding in order to not confuse downstream operations
delete res.headers['content-encoding']
break
}

// return the last request in case of redirects
var lastRequest = res.req || req;
const lastRequest = res.req || req

var response = {
const response = {
status: res.statusCode,
statusText: res.statusMessage,
headers: res.headers,
config: config,
request: lastRequest
};
}

if (config.responseType === 'stream') {
response.data = stream;
settle(resolve, reject, response);
response.data = stream
settle(resolve, reject, response)
} else {
var responseBuffer = [];
stream.on('data', function handleStreamData(chunk) {
responseBuffer.push(chunk);
const responseBuffer = []
stream.on('data', function handleStreamData (chunk) {
responseBuffer.push(chunk)

// make sure the content length is not over the maxContentLength if specified
if (config.maxContentLength > -1 && Buffer.concat(responseBuffer).length > config.maxContentLength) {
reject(createError('maxContentLength size of ' + config.maxContentLength + ' exceeded',
config, null, lastRequest));
config, null, lastRequest))
}
});
})

stream.on('error', function handleStreamError(err) {
if (req.aborted) return;
reject(enhanceError(err, config, null, lastRequest));
});
stream.on('error', function handleStreamError (err) {
if (req.aborted) return
reject(enhanceError(err, config, null, lastRequest))
})

stream.on('end', function handleStreamEnd() {
var responseData = Buffer.concat(responseBuffer);
stream.on('end', function handleStreamEnd () {
let responseData = Buffer.concat(responseBuffer)
if (config.responseType !== 'arraybuffer') {
responseData = responseData.toString('utf8');
responseData = responseData.toString('utf8')
}

response.data = responseData;
settle(resolve, reject, response);
});
response.data = responseData
settle(resolve, reject, response)
})
}
});
})

// Handle errors
req.on('error', function handleRequestError(err) {
if (req.aborted) return;
reject(enhanceError(err, config, null, req));
});
req.on('error', function handleRequestError (err) {
if (req.aborted) return
reject(enhanceError(err, config, null, req))
})

// Handle request timeout
if (config.timeout && !timer) {
timer = setTimeout(function handleRequestTimeout() {
req.abort();
reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED', req));
}, config.timeout);
timer = setTimeout(function handleRequestTimeout () {
req.abort()
reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED', req))
}, config.timeout)
}

if (config.cancelToken) {
// Handle cancellation
config.cancelToken.promise.then(function onCanceled(cancel) {
if (req.aborted) return;
config.cancelToken.promise.then(function onCanceled (cancel) {
if (req.aborted) return

req.abort();
reject(cancel);
});
req.abort()
reject(cancel)
})
}

// Send the request
if (utils.isStream(data)) {
data.pipe(req);
data.pipe(req)
} else {
req.end(data);
req.end(data)
}
});
};
})
}

0 comments on commit bb2d866

Please sign in to comment.