Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Fix jquery and proxy issues

  • Loading branch information...
commit 60147773e8f48465f8f0d77df2150f206bf3f6c3 1 parent 228d58a
@chriso authored
Showing with 17 additions and 15 deletions.
  1. +3 −7 lib/node.io/dom.js
  2. +11 −6 lib/node.io/request.js
  3. +3 −2 package.json
View
10 lib/node.io/dom.js
@@ -50,13 +50,9 @@ Job.prototype.parseHtml = function (data, callback, response) {
ProcessExternalResources: this.options.external_resources,
QuerySelector: false
};
- try {
- var $, window = require('jsdom').jsdom(data, null, {features:features}).createWindow(),
- jquery = require('jquery'),
- default_$ = jquery.create(window);
- } catch (e) {
- utils.fatal('jQuery is not installed. Run `npm install jquery`');
- }
+ var $, window = require('jsdom').jsdom(data, null, {features:features}).createWindow(),
+ jquery = require('jquery')
+ default_$ = jquery.create(window);
$ = function (selector, context) {
return context ? jquery.create(context) : default_$(selector);
};
View
17 lib/node.io/request.js
@@ -179,7 +179,7 @@ Job.prototype.encodeBody = function (body, use_json) {
* @api public
*/
Job.prototype.doRequest = function (method, resource, body, headers, callback, parse, redirects) {
- var self = this, port, url, path, rid, secure, request, cleanup, h,
+ var self = this, host, port, url, path, rid, secure, request, cleanup, h,
request_response, options, on_complete;
//Give each a request a unique ID for debugging
@@ -297,8 +297,10 @@ Job.prototype.doRequest = function (method, resource, body, headers, callback, p
this.debug(' | ' + h[0].toUpperCase() + h.substr(1) + ': ' + headers[h]);
}
+ host = url.hostname ? url.hostname : headers.host;
+
options = {
- host: headers.host,
+ host: host,
port: port,
path: path,
method: method,
@@ -582,11 +584,14 @@ Proxy.prototype.proxify = function (job) {
* @api public
*/
var HttpProxy = function (host) {
- var proxy = urlparse(host);
+ var proxy = urlparse(host), urlhost;
var url_callback = function (url) {
var u = urlparse(url);
- this.url_host = u.host;
- url = proxy.protocol + '//' + proxy.host + u.pathname;
+ urlhost = u.host;
+ url = (proxy.protocol || 'http:')
+ + '//' + proxy.host
+ + (u.pathname || '/');
+
if (u.search) {
url += u.search;
}
@@ -594,7 +599,7 @@ var HttpProxy = function (host) {
};
var header_callback = function (headers) {
headers = headers || {};
- headers.host = this.url_host;
+ headers.host = urlhost;
return headers;
};
return new Proxy(url_callback, header_callback);
View
5 package.json
@@ -1,6 +1,6 @@
{ "name" : "node.io",
"description" : "A distributed data scraping and processing framework",
- "version" : "0.3.3",
+ "version" : "0.3.4",
"homepage" : "http://github.com/chriso/node.io",
"keywords" : ["data","mapreduce","map","reduce","scraping","html","parsing","parse","scrape","process","processing","data"],
"author" : "Chris O'Hara <cohara87@gmail.com>",
@@ -19,7 +19,8 @@
"dependencies": {
"htmlparser": ">= 1.7.3",
"coffee-script": ">= 0.9.5",
- "jquery": ">= 1.4.4"
+ "jquery": ">= 1.4.4",
+ "jsdom": ">= 0.2.0"
},
"devDependencies": { "expresso": "*" },
"bin": {
Please sign in to comment.
Something went wrong with that request. Please try again.