Skip to content

Commit 2de309b

Browse files
committed
Init
0 parents  commit 2de309b

21 files changed

+4877
-0
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
node_modules/

LICENSE

Lines changed: 674 additions & 0 deletions
Large diffs are not rendered by default.

README.md

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
# CloudFlare uploader with custom posting
2+
3+
## Disclaimer
4+
5+
Not finished. Do not use in production workflows without re-checking everything.
6+
7+
## Short description
8+
9+
This is a little example of how to use the currently in beta Cloudflare Stream which lets you easily publish your videos to the web without thinking on storage and nasty video compression, encoding etc. Written in a relatively short period of time there could be some bugs. Please report them as issues or fix them and upload via a pull request. That said complexity is a problem now and there aren't many comments too so keep that in mind while going through the code.
10+
11+
## What it does
12+
13+
It comes with a little GUI that let users select a description, title, thumbnail etc and uploads a video through a proxy server to the cloudflare servers to not expose your CF API key. Videos are chunked through flow.js (as cloudflare accepts up to 100MB per file only) and rewritten on the server which acts as a proxy. Those videos are uploaded to Cloudflare which then will be encoded etc.
14+
It also tries to check whether videos already exists in your cloudflare account by running an SHA1 check with a custom hashing algorithm over the client side (browser) and then the same when it is uploaded to the server.
15+
16+
## Workflow
17+
18+
This example could help people who want to create their own video platforms to handle all that video progressing. An uploader for thumbnail images is included though it uploads all images to imgur with their API. The GUI uses currently three colors for the different processes: some blue for uploading to your server, green for uploading to cloudflare and yellow for encoding from cloudflares site.
19+
20+
## Starting
21+
22+
Running the index.js file with node should create a custom node webserver on port 80 (customize if you want).
23+
Add your cloudflare certificates (for strict ssl) in certs under `server.crt` and `server.key`. You don't need to do that but in that case remove it from the code.
24+
25+
```js
26+
git clone https://github.com/bostrot/cloudflare-stream-uploader.git
27+
cd cloudflare-stream-uploader
28+
node index.js
29+
```
30+
31+
![thumbnail](https://i.imgur.com/0H8MKUw.png)
32+
33+
## Help
34+
35+
You are welcome to contribute with pull requests, bug reports, ideas and donations.
36+
37+
Bitcoin: [1ECPWeTCq93F68BmgYjUgGSV11XuzSPSeM](https://www.blockchain.com/btc/payment_request?address=1ECPWeTCq93F68BmgYjUgGSV11XuzSPSeM&currency=USD&nosavecurrency=true&message=Bostrot)
38+
39+
PayPal: [paypal.me/bostrot](https://paypal.me/bostrot)
40+
41+
Hosting: [2.50$ VPS at VULTR](https://www.vultr.com/?ref=7505919)

flow-node.js

Lines changed: 210 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,210 @@
1+
var fs = require('fs'),
2+
path = require('path'),
3+
util = require('util'),
4+
Stream = require('stream').Stream;
5+
6+
module.exports = flow = function(temporaryFolder) {
7+
var $ = this;
8+
$.temporaryFolder = temporaryFolder;
9+
$.maxFileSize = null;
10+
$.fileParameterName = 'file';
11+
12+
try {
13+
fs.mkdirSync($.temporaryFolder);
14+
} catch (e) {}
15+
16+
function cleanIdentifier(identifier) {
17+
return identifier.replace(/[^0-9A-Za-z_-]/g, '');
18+
}
19+
20+
function getChunkFilename(chunkNumber, identifier) {
21+
// Clean up the identifier
22+
identifier = cleanIdentifier(identifier);
23+
// What would the file name be?
24+
return path.resolve($.temporaryFolder, './flow-' + identifier + '.' + chunkNumber);
25+
}
26+
27+
function validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize) {
28+
// Clean up the identifier
29+
identifier = cleanIdentifier(identifier);
30+
31+
// Check if the request is sane
32+
if (chunkNumber == 0 || chunkSize == 0 || totalSize == 0 || identifier.length == 0 || filename.length == 0) {
33+
return 'non_flow_request';
34+
}
35+
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
36+
if (chunkNumber > numberOfChunks) {
37+
return 'invalid_flow_request1';
38+
}
39+
40+
// Is the file too big?
41+
if ($.maxFileSize && totalSize > $.maxFileSize) {
42+
return 'invalid_flow_request2';
43+
}
44+
45+
if (typeof(fileSize) != 'undefined') {
46+
if (chunkNumber < numberOfChunks && fileSize != chunkSize) {
47+
// The chunk in the POST request isn't the correct size
48+
return 'invalid_flow_request3';
49+
}
50+
if (numberOfChunks > 1 && chunkNumber == numberOfChunks && fileSize != ((totalSize % chunkSize) + parseInt(chunkSize))) {
51+
// The chunks in the POST is the last one, and the fil is not the correct size
52+
return 'invalid_flow_request4';
53+
}
54+
if (numberOfChunks == 1 && fileSize != totalSize) {
55+
// The file is only a single chunk, and the data size does not fit
56+
return 'invalid_flow_request5';
57+
}
58+
}
59+
60+
return 'valid';
61+
}
62+
63+
//'found', filename, original_filename, identifier
64+
//'not_found', null, null, null
65+
$.get = function(req, callback) {
66+
var chunkNumber = req.param('flowChunkNumber', 0);
67+
var chunkSize = req.param('flowChunkSize', 0);
68+
var totalSize = req.param('flowTotalSize', 0);
69+
var identifier = req.param('flowIdentifier', "");
70+
var filename = req.param('flowFilename', "");
71+
72+
if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') {
73+
var chunkFilename = getChunkFilename(chunkNumber, identifier);
74+
fs.exists(chunkFilename, function(exists) {
75+
if (exists) {
76+
callback('found', chunkFilename, filename, identifier);
77+
} else {
78+
callback('not_found', null, null, null);
79+
}
80+
});
81+
} else {
82+
callback('not_found', null, null, null);
83+
}
84+
};
85+
86+
//'partly_done', filename, original_filename, identifier
87+
//'done', filename, original_filename, identifier
88+
//'invalid_flow_request', null, null, null
89+
//'non_flow_request', null, null, null
90+
$.post = function(req, callback) {
91+
92+
var fields = req.body;
93+
var files = req.files;
94+
95+
var chunkNumber = fields['flowChunkNumber'];
96+
var chunkSize = fields['flowChunkSize'];
97+
var totalSize = fields['flowTotalSize'];
98+
var identifier = cleanIdentifier(fields['flowIdentifier']);
99+
var filename = fields['flowFilename'];
100+
101+
if (!files[$.fileParameterName] || !files[$.fileParameterName].size) {
102+
callback('invalid_flow_request', null, null, null);
103+
return;
104+
}
105+
106+
var original_filename = files[$.fileParameterName]['originalFilename'];
107+
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, files[$.fileParameterName].size);
108+
if (validation == 'valid') {
109+
var chunkFilename = getChunkFilename(chunkNumber, identifier);
110+
111+
// Save the chunk (TODO: OVERWRITE)
112+
fs.rename(files[$.fileParameterName].path, chunkFilename, function() {
113+
114+
// Do we have all the chunks?
115+
var currentTestChunk = 1;
116+
var numberOfChunks = Math.max(Math.floor(totalSize / (chunkSize * 1.0)), 1);
117+
var testChunkExists = function() {
118+
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists) {
119+
if (exists) {
120+
currentTestChunk++;
121+
if (currentTestChunk > numberOfChunks) {
122+
callback('done', filename, original_filename, identifier);
123+
} else {
124+
// Recursion
125+
testChunkExists();
126+
}
127+
} else {
128+
callback('partly_done', filename, original_filename, identifier);
129+
}
130+
});
131+
};
132+
testChunkExists();
133+
});
134+
} else {
135+
callback(validation, filename, original_filename, identifier);
136+
}
137+
};
138+
139+
// Pipe chunks directly in to an existsing WritableStream
140+
// r.write(identifier, response);
141+
// r.write(identifier, response, {end:false});
142+
//
143+
// var stream = fs.createWriteStream(filename);
144+
// r.write(identifier, stream);
145+
// stream.on('data', function(data){...});
146+
// stream.on('finish', function(){...});
147+
$.write = function(identifier, writableStream, options) {
148+
options = options || {};
149+
options.end = (typeof options['end'] == 'undefined' ? true : options['end']);
150+
151+
// Iterate over each chunk
152+
var pipeChunk = function(number) {
153+
154+
var chunkFilename = getChunkFilename(number, identifier);
155+
fs.exists(chunkFilename, function(exists) {
156+
157+
if (exists) {
158+
// If the chunk with the current number exists,
159+
// then create a ReadStream from the file
160+
// and pipe it to the specified writableStream.
161+
var sourceStream = fs.createReadStream(chunkFilename);
162+
sourceStream.pipe(writableStream, {
163+
end: false
164+
});
165+
sourceStream.on('end', function() {
166+
// When the chunk is fully streamed,
167+
// jump to the next one
168+
pipeChunk(number + 1);
169+
});
170+
} else {
171+
// When all the chunks have been piped, end the stream
172+
if (options.end) writableStream.end();
173+
if (options.onDone) options.onDone();
174+
}
175+
});
176+
};
177+
pipeChunk(1);
178+
};
179+
180+
$.clean = function(identifier, options) {
181+
options = options || {};
182+
183+
// Iterate over each chunk
184+
var pipeChunkRm = function(number) {
185+
186+
var chunkFilename = getChunkFilename(number, identifier);
187+
188+
//console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename);
189+
fs.exists(chunkFilename, function(exists) {
190+
if (exists) {
191+
192+
//console.log('exist removing ', chunkFilename);
193+
fs.unlink(chunkFilename, function(err) {
194+
if (err && options.onError) options.onError(err);
195+
});
196+
197+
pipeChunkRm(number + 1);
198+
199+
} else {
200+
201+
if (options.onDone) options.onDone();
202+
203+
}
204+
});
205+
};
206+
pipeChunkRm(1);
207+
};
208+
209+
return $;
210+
};

0 commit comments

Comments
 (0)