Skip to content

Commit 1ff499f

Browse files
committed
Update body parsing and file streaming examples
1 parent 670048b commit 1ff499f

File tree

5 files changed

+156
-229
lines changed

5 files changed

+156
-229
lines changed

examples/FileStreaming.js

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
/* This is an example of streaming files */
2+
3+
const uWS = require('../dist/uws.js');
4+
const fs = require('fs');
5+
const port = 9001;
6+
7+
const smallFileType = 'application/json';
8+
const smallFileName = 'absolutPathTo/smallFile.json';
9+
const smallFileCachedBuffer = fs.readFileSync(smallFileName);
10+
console.log('Small file size is: '+ smallFileCachedBuffer.length +' bytes');
11+
12+
const bigFileType = 'video/mpeg';
13+
const bigFileName = 'absolutPathTo/bigFile.mp3';
14+
const bigFileSize = fs.statSync(bigFileName).size;
15+
console.log('Big file size is: '+ bigFileSize +' bytes');
16+
17+
let lastStreamIndex = 0;
18+
let openStreams = 0;
19+
20+
/* Helper function to stream data */
21+
/** @param {import('node:Stream').Readable} readStream */
22+
const streamData = (res, readStream, totalSize, onSucceed) => {
23+
let chunkBuffer; /* Actual chunk being streamed */
24+
let totalOffset = 0; /* Actual chunk offset */
25+
26+
/* Send actual chunk to client */
27+
const sendChunkBuffer = () => {
28+
const [sent, done] = res.tryEnd(chunkBuffer, totalSize);
29+
if (done) {
30+
/* Streaming finished */
31+
readStream.destroy();
32+
onSucceed();
33+
} else if (sent) {
34+
/* Chunk send succeed */
35+
totalOffset += chunkBuffer.length;
36+
/* Resume stream if it was paused */
37+
readStream.resume();
38+
} else {
39+
/* Chunk send failed (client backpressure)
40+
* onWritable will be called once client ready to receive new chunk
41+
* Pause stream to wait client */
42+
readStream.pause();
43+
}
44+
return sent;
45+
};
46+
47+
/* Register onWritable callback
48+
* Will be called to drain client backpressure */
49+
res.onWritable((offset) => {
50+
if (offset !== totalOffset) {
51+
/* If start of the chunk was successfully sent
52+
* We only send the missing part */
53+
chunkBuffer = chunkBuffer.subarray(offset - totalOffset);
54+
totalOffset = offset;
55+
}
56+
/* Always return if resend was successful or not */
57+
return sendChunkBuffer();
58+
});
59+
60+
/* Register callback for stream events */
61+
readStream.on('error', (err) => {
62+
console.log('Error reading file: '+ err);
63+
/* res.close calls onAborted callback */
64+
res.cork(() => res.close());
65+
}).on('data', (newChunkBuffer) => {
66+
chunkBuffer = newChunkBuffer;
67+
/* Cork before sending new chunk */
68+
res.cork(sendChunkBuffer);
69+
});
70+
};
71+
72+
const app = uWS./*SSL*/App({
73+
key_file_name: 'misc/key.pem',
74+
cert_file_name: 'misc/cert.pem',
75+
passphrase: '1234'
76+
}).get('/smallFile', (res, req) => {
77+
/* !! Use this only for small files !!
78+
* May cause server backpressure and bad performance
79+
* For bigger files you have to use streaming method */
80+
res.writeHeader('Content-Type', smallFileType).end(smallFileCachedBuffer);
81+
}).get('/bigFile', (res, req) => {
82+
const streamIndex = ++ lastStreamIndex;
83+
console.log('Stream ('+ streamIndex +') was opened, openStreams: '+ (++ openStreams));
84+
const readStream = fs.createReadStream(bigFileName);
85+
/* Attach onAborted handler because streaming is async */
86+
res.onAborted(() => {
87+
readStream.destroy();
88+
console.log('Stream ('+ streamIndex +') failed, openStreams: '+ (-- openStreams));
89+
});
90+
res.writeHeader('Content-Type', bigFileType);
91+
streamData(res, readStream, bigFileSize, () => {
92+
console.log('Stream ('+ streamIndex +') succeed, openStreams: '+ (-- openStreams));
93+
});
94+
}).listen(port, (token) => {
95+
if (token) {
96+
console.log('Listening to port ' + port);
97+
} else {
98+
console.log('Failed to listen to port ' + port);
99+
}
100+
});

examples/JsonPost.js

Lines changed: 0 additions & 72 deletions
This file was deleted.

examples/ParseRequestBody.js

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
/* This is an example of parsing request body */
2+
3+
const uWS = require('../dist/uws.js');
4+
const port = 9001;
5+
6+
/* Helper function to parse JSON body */
7+
const parseJSONBody = (res, callback) => {
8+
let buffer;
9+
/* Register data callback */
10+
res.onData((ab, isLast) => {
11+
const chunk = Buffer.from(ab);
12+
if (isLast) {
13+
let json;
14+
try {
15+
json = JSON.parse(buffer ? Buffer.concat([buffer, chunk]) : chunk);
16+
} catch (e) {
17+
json = undefined;
18+
}
19+
callback(json);
20+
} else if (buffer) {
21+
buffer = Buffer.concat([buffer, chunk]);
22+
} else {
23+
buffer = Buffer.concat([chunk]);
24+
}
25+
});
26+
};
27+
28+
const app = uWS./*SSL*/App({
29+
key_file_name: 'misc/key.pem',
30+
cert_file_name: 'misc/cert.pem',
31+
passphrase: '1234'
32+
}).get('/jsonAPI', (res, req) => {
33+
/* Attach onAborted handler because body parsing is async */
34+
res.onAborted(() => {
35+
console.log('Request aborted!');
36+
});
37+
parseJSONBody(res, (parsedJson) => {
38+
if (parsedJson !== undefined) {
39+
console.log('Valid JSON:', parsedJson);
40+
res.cork(() => {
41+
res.end('Thanks for your data!');
42+
});
43+
} else {
44+
console.log('Invalid JSON or no data at all!');
45+
res.cork(() => {
46+
res.writeStatus('400 Bad Request').end();
47+
});
48+
}
49+
});
50+
}).listen(port, (token) => {
51+
if (token) {
52+
console.log('Listening to port ' + port);
53+
} else {
54+
console.log('Failed to listen to port ' + port);
55+
}
56+
});

examples/VideoStreamer.js

Lines changed: 0 additions & 115 deletions
This file was deleted.

examples/VideoStreamerSync.js

Lines changed: 0 additions & 42 deletions
This file was deleted.

0 commit comments

Comments
 (0)