Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit 2de309b
Showing
21 changed files
with
4,877 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
node_modules/ |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
# CloudFlare uploader with custom posting | ||
|
||
## Disclaimer | ||
|
||
Not finished. Do not use in production workflows without re-checking everything. | ||
|
||
## Short description | ||
|
||
This is a little example of how to use the currently in beta Cloudflare Stream which lets you easily publish your videos to the web without thinking on storage and nasty video compression, encoding etc. Written in a relatively short period of time there could be some bugs. Please report them as issues or fix them and upload via a pull request. That said complexity is a problem now and there aren't many comments too so keep that in mind while going through the code. | ||
|
||
## What it does | ||
|
||
It comes with a little GUI that let users select a description, title, thumbnail etc and uploads a video through a proxy server to the cloudflare servers to not expose your CF API key. Videos are chunked through flow.js (as cloudflare accepts up to 100MB per file only) and rewritten on the server which acts as a proxy. Those videos are uploaded to Cloudflare which then will be encoded etc. | ||
It also tries to check whether videos already exists in your cloudflare account by running an SHA1 check with a custom hashing algorithm over the client side (browser) and then the same when it is uploaded to the server. | ||
|
||
## Workflow | ||
|
||
This example could help people who want to create their own video platforms to handle all that video progressing. An uploader for thumbnail images is included though it uploads all images to imgur with their API. The GUI uses currently three colors for the different processes: some blue for uploading to your server, green for uploading to cloudflare and yellow for encoding from cloudflares site. | ||
|
||
## Starting | ||
|
||
Running the index.js file with node should create a custom node webserver on port 80 (customize if you want). | ||
Add your cloudflare certificates (for strict ssl) in certs under `server.crt` and `server.key`. You don't need to do that but in that case remove it from the code. | ||
|
||
```js | ||
git clone https://github.com/bostrot/cloudflare-stream-uploader.git | ||
cd cloudflare-stream-uploader | ||
node index.js | ||
``` | ||
|
||
![thumbnail](https://i.imgur.com/0H8MKUw.png) | ||
|
||
## Help | ||
|
||
You are welcome to contribute with pull requests, bug reports, ideas and donations. | ||
|
||
Bitcoin: [1ECPWeTCq93F68BmgYjUgGSV11XuzSPSeM](https://www.blockchain.com/btc/payment_request?address=1ECPWeTCq93F68BmgYjUgGSV11XuzSPSeM¤cy=USD&nosavecurrency=true&message=Bostrot) | ||
|
||
PayPal: [paypal.me/bostrot](https://paypal.me/bostrot) | ||
|
||
Hosting: [2.50$ VPS at VULTR](https://www.vultr.com/?ref=7505919) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,210 @@ | ||
var fs = require('fs'), | ||
path = require('path'), | ||
util = require('util'), | ||
Stream = require('stream').Stream; | ||
|
||
module.exports = flow = function(temporaryFolder) { | ||
var $ = this; | ||
$.temporaryFolder = temporaryFolder; | ||
$.maxFileSize = null; | ||
$.fileParameterName = 'file'; | ||
|
||
try { | ||
fs.mkdirSync($.temporaryFolder); | ||
} catch (e) {} | ||
|
||
function cleanIdentifier(identifier) { | ||
return identifier.replace(/[^0-9A-Za-z_-]/g, ''); | ||
} | ||
|
||
function getChunkFilename(chunkNumber, identifier) { | ||
// Clean up the identifier | ||
identifier = cleanIdentifier(identifier); | ||
// What would the file name be? | ||
return path.resolve($.temporaryFolder, './flow-' + identifier + '.' + chunkNumber); | ||
} | ||
|
||
function validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize) { | ||
// Clean up the identifier | ||
identifier = cleanIdentifier(identifier); | ||
|
||
// Check if the request is sane | ||
if (chunkNumber == 0 || chunkSize == 0 || totalSize == 0 || identifier.length == 0 || filename.length == 0) { | ||
return 'non_flow_request'; | ||
} | ||
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); | ||
if (chunkNumber > numberOfChunks) { | ||
return 'invalid_flow_request1'; | ||
} | ||
|
||
// Is the file too big? | ||
if ($.maxFileSize && totalSize > $.maxFileSize) { | ||
return 'invalid_flow_request2'; | ||
} | ||
|
||
if (typeof(fileSize) != 'undefined') { | ||
if (chunkNumber < numberOfChunks && fileSize != chunkSize) { | ||
// The chunk in the POST request isn't the correct size | ||
return 'invalid_flow_request3'; | ||
} | ||
if (numberOfChunks > 1 && chunkNumber == numberOfChunks && fileSize != ((totalSize % chunkSize) + parseInt(chunkSize))) { | ||
// The chunks in the POST is the last one, and the fil is not the correct size | ||
return 'invalid_flow_request4'; | ||
} | ||
if (numberOfChunks == 1 && fileSize != totalSize) { | ||
// The file is only a single chunk, and the data size does not fit | ||
return 'invalid_flow_request5'; | ||
} | ||
} | ||
|
||
return 'valid'; | ||
} | ||
|
||
//'found', filename, original_filename, identifier | ||
//'not_found', null, null, null | ||
$.get = function(req, callback) { | ||
var chunkNumber = req.param('flowChunkNumber', 0); | ||
var chunkSize = req.param('flowChunkSize', 0); | ||
var totalSize = req.param('flowTotalSize', 0); | ||
var identifier = req.param('flowIdentifier', ""); | ||
var filename = req.param('flowFilename', ""); | ||
|
||
if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') { | ||
var chunkFilename = getChunkFilename(chunkNumber, identifier); | ||
fs.exists(chunkFilename, function(exists) { | ||
if (exists) { | ||
callback('found', chunkFilename, filename, identifier); | ||
} else { | ||
callback('not_found', null, null, null); | ||
} | ||
}); | ||
} else { | ||
callback('not_found', null, null, null); | ||
} | ||
}; | ||
|
||
//'partly_done', filename, original_filename, identifier | ||
//'done', filename, original_filename, identifier | ||
//'invalid_flow_request', null, null, null | ||
//'non_flow_request', null, null, null | ||
$.post = function(req, callback) { | ||
|
||
var fields = req.body; | ||
var files = req.files; | ||
|
||
var chunkNumber = fields['flowChunkNumber']; | ||
var chunkSize = fields['flowChunkSize']; | ||
var totalSize = fields['flowTotalSize']; | ||
var identifier = cleanIdentifier(fields['flowIdentifier']); | ||
var filename = fields['flowFilename']; | ||
|
||
if (!files[$.fileParameterName] || !files[$.fileParameterName].size) { | ||
callback('invalid_flow_request', null, null, null); | ||
return; | ||
} | ||
|
||
var original_filename = files[$.fileParameterName]['originalFilename']; | ||
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, files[$.fileParameterName].size); | ||
if (validation == 'valid') { | ||
var chunkFilename = getChunkFilename(chunkNumber, identifier); | ||
|
||
// Save the chunk (TODO: OVERWRITE) | ||
fs.rename(files[$.fileParameterName].path, chunkFilename, function() { | ||
|
||
// Do we have all the chunks? | ||
var currentTestChunk = 1; | ||
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1); | ||
var testChunkExists = function() { | ||
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists) { | ||
if (exists) { | ||
currentTestChunk++; | ||
if (currentTestChunk > numberOfChunks) { | ||
callback('done', filename, original_filename, identifier); | ||
} else { | ||
// Recursion | ||
testChunkExists(); | ||
} | ||
} else { | ||
callback('partly_done', filename, original_filename, identifier); | ||
} | ||
}); | ||
}; | ||
testChunkExists(); | ||
}); | ||
} else { | ||
callback(validation, filename, original_filename, identifier); | ||
} | ||
}; | ||
|
||
// Pipe chunks directly in to an existsing WritableStream | ||
// r.write(identifier, response); | ||
// r.write(identifier, response, {end:false}); | ||
// | ||
// var stream = fs.createWriteStream(filename); | ||
// r.write(identifier, stream); | ||
// stream.on('data', function(data){...}); | ||
// stream.on('finish', function(){...}); | ||
$.write = function(identifier, writableStream, options) { | ||
options = options || {}; | ||
options.end = (typeof options['end'] == 'undefined' ? true : options['end']); | ||
|
||
// Iterate over each chunk | ||
var pipeChunk = function(number) { | ||
|
||
var chunkFilename = getChunkFilename(number, identifier); | ||
fs.exists(chunkFilename, function(exists) { | ||
|
||
if (exists) { | ||
// If the chunk with the current number exists, | ||
// then create a ReadStream from the file | ||
// and pipe it to the specified writableStream. | ||
var sourceStream = fs.createReadStream(chunkFilename); | ||
sourceStream.pipe(writableStream, { | ||
end: false | ||
}); | ||
sourceStream.on('end', function() { | ||
// When the chunk is fully streamed, | ||
// jump to the next one | ||
pipeChunk(number + 1); | ||
}); | ||
} else { | ||
// When all the chunks have been piped, end the stream | ||
if (options.end) writableStream.end(); | ||
if (options.onDone) options.onDone(); | ||
} | ||
}); | ||
}; | ||
pipeChunk(1); | ||
}; | ||
|
||
$.clean = function(identifier, options) { | ||
options = options || {}; | ||
|
||
// Iterate over each chunk | ||
var pipeChunkRm = function(number) { | ||
|
||
var chunkFilename = getChunkFilename(number, identifier); | ||
|
||
//console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename); | ||
fs.exists(chunkFilename, function(exists) { | ||
if (exists) { | ||
|
||
//console.log('exist removing ', chunkFilename); | ||
fs.unlink(chunkFilename, function(err) { | ||
if (err && options.onError) options.onError(err); | ||
}); | ||
|
||
pipeChunkRm(number + 1); | ||
|
||
} else { | ||
|
||
if (options.onDone) options.onDone(); | ||
|
||
} | ||
}); | ||
}; | ||
pipeChunkRm(1); | ||
}; | ||
|
||
return $; | ||
}; |
Oops, something went wrong.