Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Loading…

scrape #1

Closed
wants to merge 1 commit into from

2 participants

@lxfontes

Added /scrape handler.

Added additional scrape.js handler ( json output ), allowing torrent info to be
presented in a webpage or something.

following this same idea, will add a tracker.js handler displaying
overall tracker metrics.

@lxfontes lxfontes Added scrape handler.
Added additional scrape.js handler, allowing torrent info to be
presented in a webpage or something.

following this same idea, will add a tracker.js handler displaying
overall tracker metrics.
0335b9a
@pleax pleax closed this
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on May 26, 2011
  1. @lxfontes

    Added scrape handler.

    lxfontes authored
    Added additional scrape.js handler, allowing torrent info to be
    presented in a webpage or something.
    
    following this same idea, will add a tracker.js handler displaying
    overall tracker metrics.
This page is out of date. Refresh to see the latest.
View
9 lib/formatters.js
@@ -11,6 +11,15 @@ exports.announce = function(torrentInfo, wantedPeers, compact) {
});
};
+exports.scrape = function(infoHash,torrentInfo) {
+ return b.encode({
+ file: infoHash,
+ complete: torrentInfo.complete,
+ incomplete: torrentInfo.incomplete,
+ downloaded: torrentInfo.downloaded
+ });
+};
+
var peersDictionary = function(peers) {
return peers.map(function(peer) {
return {
View
34 lib/handlers.js
@@ -1,6 +1,40 @@
var utils = require('./utils.js');
var formatters = require('./formatters.js');
+var scrape = exports.scrape = function(ctx) {
+ console.log("Scrape", JSON.stringify(ctx.url));
+ var infoHash = ctx.params['info_hash'];
+ var pool = announce.pool;
+ var torrentInfo = pool.getInfo(infoHash);
+
+ console.log("TorrentInfo:", JSON.stringify(torrentInfo));
+
+ var responseText = formatters.scrape(infoHash,torrentInfo);
+ ctx.response.writeHead(200, { 'Content-Type': 'text/plain' });
+ ctx.response.end(responseText, 'ascii');
+};
+
+
+var scrape_js = exports.scrape_js = function(ctx) {
+ console.log("ScrapeJS", JSON.stringify(ctx.url));
+ var infoHash = ctx.params['info_hash'];
+ var pool = announce.pool;
+ var torrentInfo = pool.getInfo(infoHash);
+
+ console.log("TorrentInfo:", JSON.stringify(torrentInfo));
+
+ var response = {
+ complete: torrentInfo.complete,
+ incomplete: torrentInfo.incomplete,
+ downloaded: torrentInfo.downloaded
+ };
+
+ var responseText = JSON.stringify(response);
+ ctx.response.writeHead(200, { 'Content-Type': 'text/plain' });
+ ctx.response.end(responseText, 'ascii');
+};
+
+
var announce = exports.announce = function(ctx) {
console.log("Announce", JSON.stringify(ctx.url));
View
50 lib/pool_dummy.js
@@ -9,16 +9,10 @@ Pool.prototype.getInfo = function(infoHash) {
var complete = 0;
var incomplete = 0;
var torrent = this.torrents[infoHash];
- for (var peerId in (torrent && torrent.peers || {})) {
- var metrics = torrent.peers[peerId].metrics;
- var left = metrics && metrics.left || 0;
- if (left > 0) {
- incomplete += 1;
- } else {
- complete += 1;
- }
- }
- return { complete: complete, incomplete: incomplete };
+ return { complete: torrent.complete,
+ incomplete: torrent.peer_count - torrent.complete,
+ downloaded: torrent.downloaded
+ };
};
Pool.prototype.getPeers = function(infoHash, peer, numWant) {
@@ -36,15 +30,45 @@ Pool.prototype.getPeers = function(infoHash, peer, numWant) {
Pool.prototype.update = function(infoHash, peerInfo, metricsInfo, event) {
var torrent = this.torrents[infoHash];
if (torrent == null) {
- torrent = this.torrents[infoHash] = { peers: {} };
+ torrent = this.torrents[infoHash] = { peers: {},
+ downloaded: 0,
+ complete: 0,
+ peer_count: 0};
}
+ var peer = torrent.peers[peerInfo.id];
+
if (event == 'stopped') {
- delete torrent.peers[peerInfo.id];
+
+ if (peer != null){
+ delete torrent.peers[peerInfo.id];
+ if (metricsInfo.left == 0) {
+ //user was a seeder
+ if (torrent.complete > 0) {
+ torrent.complete--;
+ }
+ }
+
+ if ( torrent.peer_count > 0 && torrent.peer_count > torrent.complete ) {
+ torrent.peer_count--;
+ }
+ }
+
} else {
- var peer = torrent.peers[peerInfo.id];
+ //start and updates
+ if (event == 'completed') {
+ torrent.downloaded++;
+ torrent.complete++;
+ }
+ if (metricsInfo.left == 0 && peer == null) {
+ //new seeder. this will cover 2 cases:
+ //started + left = 0
+ //update ( due to tracker reset ) + left = 0
+ torrent.complete++;
+ }
if (peer == null) {
peer = torrent.peers[peerInfo.id] = { metrics: {} };
+ torrent.peer_count++;
}
peer.ip = peerInfo.ip;
peer.port = peerInfo.port;
View
4 lib/tracker.js
@@ -8,7 +8,9 @@ var pool = new (require('./pool_dummy.js').Pool)();
handlers.announce.pool = pool;
var dispatch = dispatcher.forHandlers({
- "/announce": handlers.announce
+ "/announce" : handlers.announce,
+ "/scrape" : handlers.scrape,
+ "/scrape.js" : handlers.scrape_js,
});
http.createServer(function(request, response) {
Something went wrong with that request. Please try again.