improve stream blocks rendering on Nginx

This commit is contained in:
Nikita 2024-12-11 15:45:46 +03:00
parent 75e832d3b8
commit df2d7c3759

View file

@ -9,12 +9,11 @@ export default class BlocksStreamProcessor {
this.isJsonStarted = false; this.isJsonStarted = false;
this.jsonBuffer = ''; this.jsonBuffer = '';
this.lastDispatchedBlocks = null; this.lastDispatchedBlocks = null;
this.blockStructure = new Map(); this.renderDelay = 150;
this.CONFIG = { // In Nginx server we have the true steaming experience and receive chunks in JS as soon as they are available.
UPDATE_INTERVAL: 150, // In Apache server we receive the butches of chunks and then JS process them. So, we can use this flag to detect the mode.
CHUNK_DELAY: 50, this.isBatchMode = false;
};
} }
async processStream(reader) { async processStream(reader) {
@ -22,6 +21,17 @@ export default class BlocksStreamProcessor {
while (true) { while (true) {
const { value, done } = await reader.read(); const { value, done } = await reader.read();
if (done) break; if (done) break;
// Detect batch mode by analyzing first chunks timing
if (!this.firstChunkTime) {
this.firstChunkTime = Date.now();
} else if (!this.secondChunkTime) {
this.secondChunkTime = Date.now();
// If chunks come with big delay, it's probably Apache with batching
this.isBatchMode =
this.secondChunkTime - this.firstChunkTime > 500;
}
await this.processChunk(value); await this.processChunk(value);
} }
} catch (error) { } catch (error) {
@ -53,9 +63,6 @@ export default class BlocksStreamProcessor {
if (!data.content) continue; if (!data.content) continue;
await this.processContent(data.content); await this.processContent(data.content);
await new Promise((resolve) =>
setTimeout(resolve, this.CONFIG.CHUNK_DELAY)
);
} catch (e) { } catch (e) {
// console.error('Error processing line:', e); // console.error('Error processing line:', e);
} }
@ -79,7 +86,20 @@ export default class BlocksStreamProcessor {
this.jsonBuffer = ''; this.jsonBuffer = '';
} else { } else {
this.jsonBuffer += content; this.jsonBuffer += content;
await this.tryParseIncomplete(this.jsonBuffer); if (!this.isBatchMode) {
// For Nginx - process immediately
await this.tryParseIncomplete(this.jsonBuffer);
} else {
// For Apache - add delay for typing effect
await this.tryParseWithDelay(this.jsonBuffer);
}
}
}
async tryParseWithDelay(jsonContent) {
const now = Date.now();
if (now - this.lastUpdate >= this.RENDER_DELAY) {
await this.tryParseIncomplete(jsonContent);
} }
} }
@ -319,28 +339,32 @@ export default class BlocksStreamProcessor {
async dispatchBlocks(blocks, isFinal = false) { async dispatchBlocks(blocks, isFinal = false) {
const now = Date.now(); const now = Date.now();
if (now - this.lastUpdate >= this.CONFIG.UPDATE_INTERVAL || isFinal) { const timeSinceLastUpdate = now - this.lastUpdate;
this.lastDispatchedBlocks = blocks;
this.dispatch({ // Apply render delay only in batch mode or if it's too soon
type: isFinal ? 'REQUEST_AI_SUCCESS' : 'REQUEST_AI_CHUNK', if (
payload: { (this.isBatchMode || timeSinceLastUpdate < this.RENDER_DELAY) &&
response: blocks, !isFinal
progress: { ) {
charsProcessed: this.contentBuffer.length, await new Promise((resolve) =>
blocksCount: blocks.length, setTimeout(resolve, this.RENDER_DELAY - timeSinceLastUpdate)
isComplete: isFinal, );
},
},
});
this.lastUpdate = now;
if (!isFinal) {
await new Promise((resolve) =>
setTimeout(resolve, this.CONFIG.UPDATE_INTERVAL)
);
}
} }
this.lastDispatchedBlocks = blocks;
this.lastUpdate = Date.now();
this.dispatch({
type: isFinal ? 'REQUEST_AI_SUCCESS' : 'REQUEST_AI_CHUNK',
payload: {
response: blocks,
progress: {
charsProcessed: this.contentBuffer.length,
blocksCount: blocks.length,
isComplete: isFinal,
},
},
});
} }
handleError(error) { handleError(error) {