Skip to content

Commit 19426f9

Browse files
committed
Update body parsing and file streaming examples
1 parent 670048b commit 19426f9

File tree

5 files changed

+161
-229
lines changed

5 files changed

+161
-229
lines changed

examples/FileStreaming.js

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
/* This is an example of streaming files */
2+
3+
const uWS = require('../dist/uws.js');
4+
const fs = require('fs');
5+
const port = 9001;
6+
7+
/* Small file is cached in memory */
8+
const smallFileType = 'application/json';
9+
const smallFileName = 'absolutPathTo/smallFile.json';
10+
const smallFileCachedBuffer = fs.readFileSync(smallFileName);
11+
console.log('Small file size is: '+ smallFileCachedBuffer.length +' bytes');
12+
13+
/* Big file is streamed from storage */
14+
const bigFileType = 'video/mpeg';
15+
const bigFileName = 'absolutPathTo/bigFile.mp3';
16+
const bigFileSize = fs.statSync(bigFileName).size;
17+
console.log('Big file size is: '+ bigFileSize +' bytes');
18+
19+
let lastStreamIndex = 0;
20+
let openStreams = 0;
21+
22+
/* Helper function to stream data */
23+
/** @param {import('node:Stream').Readable} readStream */
24+
const streamData = (res, readStream, totalSize, onSucceed) => {
25+
let chunkBuffer; /* Actual chunk being streamed */
26+
let totalOffset = 0; /* Actual chunk offset */
27+
28+
/* Function to send actual chunk */
29+
const sendChunkBuffer = () => {
30+
const [ok, done] = res.tryEnd(chunkBuffer, totalSize);
31+
if (done) {
32+
/* Streaming finished */
33+
readStream.destroy();
34+
onSucceed();
35+
} else if (ok) {
36+
/* Chunk send succeed */
37+
totalOffset += chunkBuffer.length;
38+
} else {
39+
/* Chunk send failed (client backpressure)
40+
* onWritable will be called once client ready to receive new chunk
41+
* Pause stream to wait client */
42+
readStream.pause();
43+
}
44+
return ok;
45+
};
46+
47+
/* Register onWritable callback
48+
* Will be called to drain client backpressure */
49+
res.onWritable((offset) => {
50+
if (offset !== totalOffset) {
51+
/* If start of the chunk was successfully sent
52+
* We only send the missing part */
53+
chunkBuffer = chunkBuffer.subarray(offset - totalOffset);
54+
totalOffset = offset;
55+
}
56+
if (sendChunkBuffer()) {
57+
/* Resume stream if resend succeed */
58+
readStream.resume();
59+
return true;
60+
}
61+
return false;
62+
});
63+
64+
/* Register callback for stream events */
65+
readStream.on('error', (err) => {
66+
console.log('Error reading file: '+ err);
67+
/* res.close() calls onAborted callback */
68+
res.close();
69+
}).on('data', (newChunkBuffer) => {
70+
chunkBuffer = newChunkBuffer;
71+
/* Cork before sending new chunk */
72+
res.cork(sendChunkBuffer);
73+
});
74+
};
75+
76+
const app = uWS./*SSL*/App({
77+
key_file_name: 'misc/key.pem',
78+
cert_file_name: 'misc/cert.pem',
79+
passphrase: '1234'
80+
}).get('/smallFile', (res, req) => {
81+
res.writeHeader('Content-Type', smallFileType);
82+
/* !! Use this only for small-sized bodies !!
83+
* May cause server backpressure and bad performance
84+
* For large bodies you must use the streaming method */
85+
res.end(smallFileCachedBuffer);
86+
}).get('/bigFile', (res, req) => {
87+
const streamIndex = ++ lastStreamIndex;
88+
console.log('Stream ('+ streamIndex +') was opened, openStreams: '+ (++ openStreams));
89+
const readStream = fs.createReadStream(bigFileName);
90+
/* Attach onAborted handler because streaming is async */
91+
res.onAborted(() => {
92+
readStream.destroy();
93+
console.log('Stream ('+ streamIndex +') failed, openStreams: '+ (-- openStreams));
94+
});
95+
res.writeHeader('Content-Type', bigFileType);
96+
streamData(res, readStream, bigFileSize, () => {
97+
console.log('Stream ('+ streamIndex +') succeed, openStreams: '+ (-- openStreams));
98+
});
99+
}).listen(port, (token) => {
100+
if (token) {
101+
console.log('Listening to port ' + port);
102+
} else {
103+
console.log('Failed to listen to port ' + port);
104+
}
105+
});

examples/JsonPost.js

Lines changed: 0 additions & 72 deletions
This file was deleted.

examples/ParseRequestBody.js

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
/* This is an example of parsing request body */
2+
3+
const uWS = require('../dist/uws.js');
4+
const port = 9001;
5+
6+
/* Helper function to parse JSON body */
7+
const parseJSONBody = (res, callback) => {
8+
let buffer;
9+
/* Register data callback */
10+
res.onData((ab, isLast) => {
11+
const chunk = Buffer.from(ab);
12+
if (isLast) {
13+
let json;
14+
try {
15+
json = JSON.parse(buffer ? Buffer.concat([buffer, chunk]) : chunk);
16+
} catch (e) {
17+
json = undefined;
18+
}
19+
callback(json);
20+
} else if (buffer) {
21+
buffer = Buffer.concat([buffer, chunk]);
22+
} else {
23+
buffer = Buffer.concat([chunk]);
24+
}
25+
});
26+
};
27+
28+
const app = uWS./*SSL*/App({
29+
key_file_name: 'misc/key.pem',
30+
cert_file_name: 'misc/cert.pem',
31+
passphrase: '1234'
32+
}).get('/jsonAPI', (res, req) => {
33+
/* Attach onAborted handler because body parsing is async */
34+
res.onAborted(() => {
35+
console.log('Request aborted!');
36+
});
37+
parseJSONBody(res, (parsedJson) => {
38+
if (parsedJson !== undefined) {
39+
console.log('Valid JSON:', parsedJson);
40+
res.cork(() => {
41+
res.end('Thanks for your data!');
42+
});
43+
} else {
44+
console.log('Invalid JSON or no data at all!');
45+
res.cork(() => {
46+
res.writeStatus('400 Bad Request').end();
47+
});
48+
}
49+
});
50+
}).listen(port, (token) => {
51+
if (token) {
52+
console.log('Listening to port ' + port);
53+
} else {
54+
console.log('Failed to listen to port ' + port);
55+
}
56+
});

examples/VideoStreamer.js

Lines changed: 0 additions & 115 deletions
This file was deleted.

examples/VideoStreamerSync.js

Lines changed: 0 additions & 42 deletions
This file was deleted.

0 commit comments

Comments
 (0)