mirror of
https://github.com/WenPai-org/wpmind.git
synced 2025-08-03 02:48:41 +08:00
added Stream to AI requests
This commit is contained in:
parent
3995877b75
commit
bb7b0b6151
9 changed files with 620 additions and 129 deletions
|
@ -108,28 +108,12 @@ class Mind_Rest extends WP_REST_Controller {
|
|||
}
|
||||
|
||||
/**
|
||||
* Send request to OpenAI.
|
||||
* Prepare messages for request.
|
||||
*
|
||||
* @param WP_REST_Request $req request object.
|
||||
*
|
||||
* @return mixed
|
||||
* @param string $request user request.
|
||||
* @param string $context context.
|
||||
*/
|
||||
public function request_ai( WP_REST_Request $req ) {
|
||||
$settings = get_option( 'mind_settings', array() );
|
||||
$openai_key = $settings['openai_api_key'] ?? '';
|
||||
|
||||
$request = $req->get_param( 'request' ) ?? '';
|
||||
$context = $req->get_param( 'context' ) ?? '';
|
||||
|
||||
if ( ! $openai_key ) {
|
||||
return $this->error( 'no_openai_key_found', __( 'Provide OpenAI key in the plugin settings.', 'mind' ) );
|
||||
}
|
||||
|
||||
if ( ! $request ) {
|
||||
return $this->error( 'no_request', __( 'Provide request to receive AI response.', 'mind' ) );
|
||||
}
|
||||
|
||||
// Messages.
|
||||
public function prepare_messages( $request, $context ) {
|
||||
$messages = [];
|
||||
|
||||
$messages[] = [
|
||||
|
@ -189,52 +173,75 @@ class Mind_Rest extends WP_REST_Controller {
|
|||
),
|
||||
];
|
||||
|
||||
return $messages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send request to OpenAI.
|
||||
*
|
||||
* @param WP_REST_Request $req request object.
|
||||
*
|
||||
* @return mixed
|
||||
*/
|
||||
public function request_ai( WP_REST_Request $req ) {
|
||||
// Set headers for streaming.
|
||||
header( 'Content-Type: text/event-stream' );
|
||||
header( 'Cache-Control: no-cache' );
|
||||
header( 'Connection: keep-alive' );
|
||||
// For Nginx.
|
||||
header( 'X-Accel-Buffering: no' );
|
||||
|
||||
$settings = get_option( 'mind_settings', array() );
|
||||
$openai_key = $settings['openai_api_key'] ?? '';
|
||||
|
||||
$request = $req->get_param( 'request' ) ?? '';
|
||||
$context = $req->get_param( 'context' ) ?? '';
|
||||
|
||||
if ( ! $openai_key ) {
|
||||
$this->send_stream_error( 'no_openai_key_found', __( 'Provide OpenAI key in the plugin settings.', 'mind' ) );
|
||||
exit;
|
||||
}
|
||||
|
||||
if ( ! $request ) {
|
||||
$this->send_stream_error( 'no_request', __( 'Provide request to receive AI response.', 'mind' ) );
|
||||
exit;
|
||||
}
|
||||
|
||||
// Messages.
|
||||
$messages = $this->prepare_messages( $request, $context );
|
||||
|
||||
$body = [
|
||||
'model' => 'gpt-4o-mini',
|
||||
'stream' => false,
|
||||
'stream' => true,
|
||||
'temperature' => 0.7,
|
||||
'messages' => $messages,
|
||||
];
|
||||
|
||||
// Make Request to OpenAI API.
|
||||
$ai_request = wp_remote_post(
|
||||
'https://api.openai.com/v1/chat/completions',
|
||||
[
|
||||
'headers' => [
|
||||
'Authorization' => 'Bearer ' . $openai_key,
|
||||
'Content-Type' => 'application/json',
|
||||
],
|
||||
'timeout' => 30,
|
||||
'sslverify' => false,
|
||||
'body' => wp_json_encode( $body ),
|
||||
]
|
||||
);
|
||||
// Initialize cURL.
|
||||
// phpcs:disable
|
||||
$ch = curl_init( 'https://api.openai.com/v1/chat/completions' );
|
||||
curl_setopt( $ch, CURLOPT_POST, 1 );
|
||||
curl_setopt( $ch, CURLOPT_RETURNTRANSFER, true );
|
||||
curl_setopt( $ch, CURLOPT_HTTPHEADER, [
|
||||
'Content-Type: application/json',
|
||||
'Authorization: Bearer ' . $openai_key,
|
||||
] );
|
||||
curl_setopt( $ch, CURLOPT_POSTFIELDS, json_encode( $body ) );
|
||||
curl_setopt( $ch, CURLOPT_WRITEFUNCTION, function ( $curl, $data ) {
|
||||
$this->process_stream_chunk( $data );
|
||||
return strlen( $data );
|
||||
});
|
||||
|
||||
// Error.
|
||||
if ( is_wp_error( $ai_request ) ) {
|
||||
$response = $ai_request->get_error_message();
|
||||
// Execute request
|
||||
curl_exec( $ch );
|
||||
|
||||
return $this->error( 'openai_request_error', $response );
|
||||
} elseif ( wp_remote_retrieve_response_code( $ai_request ) !== 200 ) {
|
||||
$response = json_decode( wp_remote_retrieve_body( $ai_request ), true );
|
||||
|
||||
if ( isset( $response['error']['message'] ) ) {
|
||||
return $this->error( 'openai_request_error', $response['error']['message'] );
|
||||
}
|
||||
|
||||
return $this->error( 'openai_request_error', __( 'OpenAI data failed to load.', 'mind' ) );
|
||||
if ( curl_errno( $ch ) ) {
|
||||
$this->send_stream_error( 'curl_error', curl_error( $ch ) );
|
||||
}
|
||||
|
||||
// Success.
|
||||
$result = '';
|
||||
$response = json_decode( wp_remote_retrieve_body( $ai_request ), true );
|
||||
|
||||
// TODO: this a limited part, which should be reworked.
|
||||
if ( isset( $response['choices'][0]['message']['content'] ) ) {
|
||||
$result = $response['choices'][0]['message']['content'];
|
||||
}
|
||||
|
||||
return $this->success( $result );
|
||||
curl_close( $ch );
|
||||
// phpcs:enable
|
||||
exit;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -255,6 +262,72 @@ class Mind_Rest extends WP_REST_Controller {
|
|||
return $method . '&' . rawurlencode( $base_uri ) . '&' . rawurlencode( implode( '&', $r ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Process streaming chunk from OpenAI
|
||||
*
|
||||
* @param string $chunk - chunk of data.
|
||||
*/
|
||||
private function process_stream_chunk( $chunk ) {
|
||||
$lines = explode( "\n", $chunk );
|
||||
|
||||
foreach ( $lines as $line ) {
|
||||
if ( strlen( trim( $line ) ) === 0 ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( strpos( $line, 'data: ' ) === 0 ) {
|
||||
$json_data = trim( substr( $line, 6 ) );
|
||||
|
||||
if ( '[DONE]' === $json_data ) {
|
||||
$this->send_stream_chunk( [ 'done' => true ] );
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
$data = json_decode( $json_data, true );
|
||||
|
||||
if ( isset( $data['choices'][0]['delta']['content'] ) ) {
|
||||
// Send smaller chunks immediately.
|
||||
$this->send_stream_chunk(
|
||||
[
|
||||
'content' => $data['choices'][0]['delta']['content'],
|
||||
]
|
||||
);
|
||||
flush();
|
||||
}
|
||||
} catch ( Exception $e ) {
|
||||
$this->send_stream_error( 'json_error', $e->getMessage() );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send stream chunk
|
||||
*
|
||||
* @param array $data - data to send.
|
||||
*/
|
||||
private function send_stream_chunk( $data ) {
|
||||
echo 'data: ' . wp_json_encode( $data ) . "\n\n";
|
||||
flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send stream error
|
||||
*
|
||||
* @param string $code - error code.
|
||||
* @param string $message - error message.
|
||||
*/
|
||||
private function send_stream_error( $code, $message ) {
|
||||
$this->send_stream_chunk(
|
||||
[
|
||||
'error' => true,
|
||||
'code' => $code,
|
||||
'message' => $message,
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Success rest.
|
||||
*
|
||||
|
|
70
src/editor/popup/components/ai-response/index.js
Normal file
70
src/editor/popup/components/ai-response/index.js
Normal file
|
@ -0,0 +1,70 @@
|
|||
/**
|
||||
* Styles
|
||||
*/
|
||||
import './style.scss';
|
||||
|
||||
/**
|
||||
* WordPress dependencies
|
||||
*/
|
||||
import { useRef, useEffect, RawHTML, memo } from '@wordpress/element';
|
||||
|
||||
const AIResponse = memo(
|
||||
function AIResponse({ response, loading }) {
|
||||
const responseRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
if (!responseRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
const popupContent = responseRef.current.closest(
|
||||
'.mind-popup-content'
|
||||
);
|
||||
|
||||
if (!popupContent) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Smooth scroll to bottom of response.
|
||||
const { scrollHeight, clientHeight } = popupContent;
|
||||
|
||||
// Only auto-scroll for shorter contents.
|
||||
const shouldScroll = scrollHeight - clientHeight < 1000;
|
||||
|
||||
if (shouldScroll) {
|
||||
popupContent.scrollTo({
|
||||
top: scrollHeight,
|
||||
behavior: 'smooth',
|
||||
});
|
||||
}
|
||||
}, [response]);
|
||||
|
||||
if (!response && !loading) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={responseRef}
|
||||
className="mind-popup-response"
|
||||
style={{
|
||||
opacity: loading ? 0.85 : 1,
|
||||
}}
|
||||
>
|
||||
<RawHTML>{response}</RawHTML>
|
||||
{loading && <div className="mind-popup-cursor" />}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
(prevProps, nextProps) => {
|
||||
// Custom memoization to prevent unnecessary rerenders.
|
||||
return (
|
||||
prevProps.renderBuffer.lastUpdate ===
|
||||
nextProps.renderBuffer.lastUpdate &&
|
||||
prevProps.loading === nextProps.loading &&
|
||||
prevProps.progress.isComplete === nextProps.progress.isComplete
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export default AIResponse;
|
36
src/editor/popup/components/ai-response/style.scss
Normal file
36
src/editor/popup/components/ai-response/style.scss
Normal file
|
@ -0,0 +1,36 @@
|
|||
.mind-popup-response {
|
||||
/* GPU acceleration */
|
||||
transform: translateZ(0);
|
||||
will-change: transform;
|
||||
|
||||
/* Optimize repaints */
|
||||
contain: content;
|
||||
|
||||
/* Smooth typing cursor */
|
||||
.mind-popup-cursor {
|
||||
display: inline-block;
|
||||
width: 1.5px;
|
||||
height: 1em;
|
||||
background: currentColor;
|
||||
margin-left: 2px;
|
||||
animation: mind-cursor-blink 1s step-end infinite;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes mind-cursor-blink {
|
||||
0%,
|
||||
100% {
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
|
||||
/* Optimize for mobile */
|
||||
@media (max-width: 768px) {
|
||||
.mind-popup-response {
|
||||
contain: strict;
|
||||
height: 100%;
|
||||
}
|
||||
}
|
|
@ -4,15 +4,15 @@ import './style.scss';
|
|||
* WordPress dependencies
|
||||
*/
|
||||
import { __ } from '@wordpress/i18n';
|
||||
import { useRef, useEffect, RawHTML } from '@wordpress/element';
|
||||
import { useRef, useEffect } from '@wordpress/element';
|
||||
import { useSelect, useDispatch } from '@wordpress/data';
|
||||
import { Button } from '@wordpress/components';
|
||||
|
||||
/**
|
||||
* Internal dependencies
|
||||
*/
|
||||
import LoadingText from '../loading-text';
|
||||
import Notice from '../notice';
|
||||
import AIResponse from '../ai-response';
|
||||
import { ReactComponent as PopupPostTitleAboutIcon } from '../../../../icons/popup-post-title-about.svg';
|
||||
import { ReactComponent as PopupPostAboutIcon } from '../../../../icons/popup-post-about.svg';
|
||||
import { ReactComponent as PopupOutlineAboutIcon } from '../../../../icons/popup-outline-about.svg';
|
||||
|
@ -73,29 +73,40 @@ export default function Content() {
|
|||
|
||||
const { setInput, setScreen } = useDispatch('mind/popup');
|
||||
|
||||
const { isOpen, input, screen, loading, response, error } = useSelect(
|
||||
(select) => {
|
||||
const {
|
||||
isOpen: checkIsOpen,
|
||||
getInput,
|
||||
getContext,
|
||||
getScreen,
|
||||
getLoading,
|
||||
getResponse,
|
||||
getError,
|
||||
} = select('mind/popup');
|
||||
const {
|
||||
isOpen,
|
||||
input,
|
||||
screen,
|
||||
loading,
|
||||
response,
|
||||
progress,
|
||||
renderBuffer,
|
||||
error,
|
||||
} = useSelect((select) => {
|
||||
const {
|
||||
isOpen: checkIsOpen,
|
||||
getInput,
|
||||
getContext,
|
||||
getScreen,
|
||||
getLoading,
|
||||
getResponse,
|
||||
getProgress,
|
||||
getRenderBuffer,
|
||||
getError,
|
||||
} = select('mind/popup');
|
||||
|
||||
return {
|
||||
isOpen: checkIsOpen(),
|
||||
input: getInput(),
|
||||
context: getContext(),
|
||||
screen: getScreen(),
|
||||
loading: getLoading(),
|
||||
response: getResponse(),
|
||||
error: getError(),
|
||||
};
|
||||
}
|
||||
);
|
||||
return {
|
||||
isOpen: checkIsOpen(),
|
||||
input: getInput(),
|
||||
context: getContext(),
|
||||
screen: getScreen(),
|
||||
loading: getLoading(),
|
||||
response: getResponse(),
|
||||
progress: getProgress(),
|
||||
renderBuffer: getRenderBuffer(),
|
||||
error: getError(),
|
||||
};
|
||||
});
|
||||
|
||||
function focusInput() {
|
||||
if (ref?.current) {
|
||||
|
@ -156,12 +167,14 @@ export default function Content() {
|
|||
|
||||
{screen === 'request' && (
|
||||
<div className="mind-popup-request">
|
||||
{loading && (
|
||||
<LoadingText>
|
||||
{__('Waiting for AI response', 'mind')}
|
||||
</LoadingText>
|
||||
{response && (
|
||||
<AIResponse
|
||||
progress={progress}
|
||||
loading={loading}
|
||||
response={response}
|
||||
renderBuffer={renderBuffer}
|
||||
/>
|
||||
)}
|
||||
{!loading && response && <RawHTML>{response}</RawHTML>}
|
||||
{!loading && error && <Notice type="error">{error}</Notice>}
|
||||
</div>
|
||||
)}
|
||||
|
|
|
@ -7,6 +7,8 @@ import { __ } from '@wordpress/i18n';
|
|||
import { Button } from '@wordpress/components';
|
||||
import { useSelect, useDispatch } from '@wordpress/data';
|
||||
|
||||
import LoadingText from '../loading-text';
|
||||
|
||||
export default function Input(props) {
|
||||
const { onInsert } = props;
|
||||
|
||||
|
@ -25,7 +27,7 @@ export default function Input(props) {
|
|||
};
|
||||
});
|
||||
|
||||
const showFooter = response || (input && !loading && !response);
|
||||
const showFooter = response || loading || (input && !loading && !response);
|
||||
|
||||
if (!showFooter) {
|
||||
return null;
|
||||
|
@ -33,6 +35,7 @@ export default function Input(props) {
|
|||
|
||||
return (
|
||||
<div className="mind-popup-footer">
|
||||
{loading && <LoadingText>{__('Writing', 'mind')}</LoadingText>}
|
||||
<div className="mind-popup-footer-actions">
|
||||
{input && !loading && !response && (
|
||||
<Button
|
||||
|
@ -43,7 +46,7 @@ export default function Input(props) {
|
|||
{__('Get Answer', 'mind')} <kbd>⏎</kbd>
|
||||
</Button>
|
||||
)}
|
||||
{response && (
|
||||
{response && !loading && (
|
||||
<>
|
||||
<Button
|
||||
onClick={() => {
|
||||
|
|
|
@ -7,6 +7,7 @@ import apiFetch from '@wordpress/api-fetch';
|
|||
/**
|
||||
* Internal dependencies.
|
||||
*/
|
||||
import AIStreamProcessor from '../../../utils/ai-stream-processor';
|
||||
import getSelectedBlocksContent from '../../../utils/get-selected-blocks-content';
|
||||
import { isConnected } from '../core/selectors';
|
||||
|
||||
|
@ -77,8 +78,92 @@ export function setError(error) {
|
|||
};
|
||||
}
|
||||
|
||||
export function reset() {
|
||||
return {
|
||||
type: 'RESET',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Process stream data from the API
|
||||
*
|
||||
* @param {ReadableStream} reader Stream reader
|
||||
* @return {Function} Redux-style action function
|
||||
*/
|
||||
export function processStream(reader) {
|
||||
return async ({ dispatch }) => {
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = '';
|
||||
let responseText = '';
|
||||
|
||||
// Create artificial delay for smoother updates
|
||||
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
// Process smaller chunks.
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
const lines = buffer.split('\n');
|
||||
buffer = lines.pop() || '';
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
const data = JSON.parse(line.slice(6));
|
||||
|
||||
if (data.error) {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_ERROR',
|
||||
payload: data.message,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.done) {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_SUCCESS',
|
||||
payload: responseText,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.content) {
|
||||
responseText += data.content;
|
||||
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_CHUNK',
|
||||
payload: responseText,
|
||||
});
|
||||
|
||||
// Add small delay between chunks for smoother appearance.
|
||||
await delay(50);
|
||||
}
|
||||
} catch (error) {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_ERROR',
|
||||
payload: __(
|
||||
'Failed to parse stream data',
|
||||
'mind'
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_ERROR',
|
||||
payload: error.message,
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function requestAI() {
|
||||
return ({ dispatch, select }) => {
|
||||
return async ({ dispatch, select }) => {
|
||||
if (!isConnected) {
|
||||
return;
|
||||
}
|
||||
|
@ -98,32 +183,34 @@ export function requestAI() {
|
|||
data.context = getSelectedBlocksContent();
|
||||
}
|
||||
|
||||
apiFetch({
|
||||
path: '/mind/v1/request_ai',
|
||||
method: 'POST',
|
||||
data,
|
||||
})
|
||||
.then((res) => {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_SUCCESS',
|
||||
payload: res.response,
|
||||
});
|
||||
return res.response;
|
||||
})
|
||||
.catch((err) => {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_ERROR',
|
||||
payload:
|
||||
err?.response ||
|
||||
err?.error_code ||
|
||||
__('Something went wrong, please, try again…', 'mind'),
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
try {
|
||||
// Initialize StreamProcessor with dispatch
|
||||
const streamProcessor = new AIStreamProcessor(dispatch);
|
||||
|
||||
export function reset() {
|
||||
return {
|
||||
type: 'RESET',
|
||||
const response = await apiFetch({
|
||||
path: '/mind/v1/request_ai',
|
||||
method: 'POST',
|
||||
data,
|
||||
// Important: don't parse the response automatically
|
||||
parse: false,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(
|
||||
errorData.message ||
|
||||
__('Failed to fetch AI response', 'mind')
|
||||
);
|
||||
}
|
||||
|
||||
// Process the stream
|
||||
await streamProcessor.processStream(response.body.getReader());
|
||||
} catch (error) {
|
||||
dispatch({
|
||||
type: 'REQUEST_AI_ERROR',
|
||||
payload:
|
||||
error.message || __('Failed to fetch AI response', 'mind'),
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,18 +1,29 @@
|
|||
import mdToHtml from '../../../utils/md-to-html';
|
||||
|
||||
function reducer(
|
||||
state = {
|
||||
isOpen: false,
|
||||
input: '',
|
||||
context: '',
|
||||
insertionPlace: '',
|
||||
screen: '',
|
||||
loading: false,
|
||||
response: false,
|
||||
error: false,
|
||||
const initialState = {
|
||||
isOpen: false,
|
||||
input: '',
|
||||
context: '',
|
||||
insertionPlace: '',
|
||||
screen: '',
|
||||
loading: false,
|
||||
response: '',
|
||||
error: null,
|
||||
progress: {
|
||||
charsProcessed: 0,
|
||||
queueSize: 0,
|
||||
isComplete: false,
|
||||
},
|
||||
action = {}
|
||||
) {
|
||||
renderBuffer: {
|
||||
content: '',
|
||||
lastUpdate: 0,
|
||||
},
|
||||
};
|
||||
|
||||
// throttle in ms.
|
||||
const RENDER_THROTTLE = 50;
|
||||
|
||||
function reducer(state = initialState, action = {}) {
|
||||
switch (action.type) {
|
||||
case 'CLOSE':
|
||||
if (state.isOpen) {
|
||||
|
@ -94,22 +105,62 @@ function reducer(
|
|||
case 'REQUEST_AI_PENDING':
|
||||
return {
|
||||
...state,
|
||||
loading: true,
|
||||
isOpen: true,
|
||||
loading: true,
|
||||
response: '',
|
||||
error: null,
|
||||
screen: 'request',
|
||||
progress: initialState.progress,
|
||||
renderBuffer: initialState.renderBuffer,
|
||||
};
|
||||
case 'REQUEST_AI_CHUNK':
|
||||
const now = Date.now();
|
||||
const shouldUpdate =
|
||||
now - state.renderBuffer.lastUpdate >= RENDER_THROTTLE;
|
||||
|
||||
if (!shouldUpdate) {
|
||||
return {
|
||||
...state,
|
||||
renderBuffer: {
|
||||
content: action.payload.content,
|
||||
lastUpdate: state.renderBuffer.lastUpdate,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
loading: true,
|
||||
response: action.payload.content
|
||||
? mdToHtml(action.payload.content)
|
||||
: false,
|
||||
progress: action.payload.progress,
|
||||
renderBuffer: {
|
||||
content: action.payload.content,
|
||||
lastUpdate: now,
|
||||
},
|
||||
};
|
||||
case 'REQUEST_AI_SUCCESS':
|
||||
return {
|
||||
...state,
|
||||
loading: false,
|
||||
response: action.payload ? mdToHtml(action.payload) : false,
|
||||
response: action.payload.content
|
||||
? mdToHtml(action.payload.content)
|
||||
: false,
|
||||
progress: { ...action.payload.progress, isComplete: true },
|
||||
renderBuffer: {
|
||||
content: action.payload.content,
|
||||
lastUpdate: Date.now(),
|
||||
},
|
||||
};
|
||||
case 'REQUEST_AI_ERROR':
|
||||
return {
|
||||
...state,
|
||||
loading: false,
|
||||
error: action.payload || '',
|
||||
response: false,
|
||||
error: action.payload || '',
|
||||
progress: initialState.progress,
|
||||
renderBuffer: initialState.renderBuffer,
|
||||
};
|
||||
case 'RESET':
|
||||
return {
|
||||
|
|
|
@ -26,6 +26,14 @@ export function getResponse(state) {
|
|||
return state?.response || false;
|
||||
}
|
||||
|
||||
export function getProgress(state) {
|
||||
return state?.progress || false;
|
||||
}
|
||||
|
||||
export function getRenderBuffer(state) {
|
||||
return state?.renderBuffer || false;
|
||||
}
|
||||
|
||||
export function getError(state) {
|
||||
return state?.error || false;
|
||||
}
|
||||
|
|
150
src/utils/ai-stream-processor/index.js
Normal file
150
src/utils/ai-stream-processor/index.js
Normal file
|
@ -0,0 +1,150 @@
|
|||
export default class AIStreamProcessor {
|
||||
constructor(dispatch) {
|
||||
this.dispatch = dispatch;
|
||||
this.buffer = '';
|
||||
this.responseText = '';
|
||||
this.decoder = new TextDecoder();
|
||||
this.lastUpdate = Date.now();
|
||||
this.updateQueue = [];
|
||||
this.isProcessing = false;
|
||||
|
||||
// Configuration
|
||||
this.CONFIG = {
|
||||
// ms between chunks.
|
||||
CHUNK_DELAY: 30,
|
||||
// ms minimum between updates.
|
||||
UPDATE_INTERVAL: 50,
|
||||
// number of chunks to batch.
|
||||
BATCH_SIZE: 3,
|
||||
// maximum queue size.
|
||||
MAX_QUEUE_SIZE: 100,
|
||||
TYPING_SPEED: {
|
||||
FAST: 20,
|
||||
MEDIUM: 35,
|
||||
SLOW: 50,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async processStream(reader) {
|
||||
try {
|
||||
this.startQueueProcessor();
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
await this.processChunk(value);
|
||||
}
|
||||
|
||||
await this.flushQueue();
|
||||
this.sendCompletion();
|
||||
} catch (error) {
|
||||
this.handleError(error);
|
||||
}
|
||||
}
|
||||
|
||||
async processChunk(value) {
|
||||
const text = this.decoder.decode(value, { stream: true });
|
||||
const chunks = this.parseChunks(text);
|
||||
|
||||
for (const chunk of chunks) {
|
||||
await this.queueUpdate(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
parseChunks(text) {
|
||||
const chunks = [];
|
||||
const lines = text.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
const data = JSON.parse(line.slice(6));
|
||||
if (data.content) {
|
||||
chunks.push(data.content);
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug('Chunk parse error:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
async queueUpdate(content) {
|
||||
this.updateQueue.push(content);
|
||||
|
||||
// Prevent queue from growing too large
|
||||
if (this.updateQueue.length > this.CONFIG.MAX_QUEUE_SIZE) {
|
||||
await this.flushQueue();
|
||||
}
|
||||
}
|
||||
|
||||
async startQueueProcessor() {
|
||||
if (this.isProcessing) return;
|
||||
|
||||
this.isProcessing = true;
|
||||
while (this.updateQueue.length > 0) {
|
||||
const batch = this.updateQueue.splice(0, this.CONFIG.BATCH_SIZE);
|
||||
await this.processBatch(batch);
|
||||
}
|
||||
this.isProcessing = false;
|
||||
}
|
||||
|
||||
async processBatch(batch) {
|
||||
const content = batch.join('');
|
||||
this.responseText += content;
|
||||
|
||||
// Throttle updates
|
||||
const now = Date.now();
|
||||
if (now - this.lastUpdate >= this.CONFIG.UPDATE_INTERVAL) {
|
||||
this.dispatch({
|
||||
type: 'REQUEST_AI_CHUNK',
|
||||
payload: {
|
||||
content: this.responseText,
|
||||
progress: this.calculateProgress(),
|
||||
},
|
||||
});
|
||||
this.lastUpdate = now;
|
||||
}
|
||||
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, this.CONFIG.CHUNK_DELAY)
|
||||
);
|
||||
}
|
||||
|
||||
calculateProgress() {
|
||||
// Implement your progress calculation logic
|
||||
return {
|
||||
charsProcessed: this.responseText.length,
|
||||
queueSize: this.updateQueue.length,
|
||||
isComplete: false,
|
||||
};
|
||||
}
|
||||
|
||||
async flushQueue() {
|
||||
while (this.updateQueue.length > 0) {
|
||||
await this.startQueueProcessor();
|
||||
}
|
||||
}
|
||||
|
||||
sendCompletion() {
|
||||
this.dispatch({
|
||||
type: 'REQUEST_AI_SUCCESS',
|
||||
payload: {
|
||||
content: this.responseText,
|
||||
progress: { isComplete: true },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
handleError(error) {
|
||||
this.dispatch({
|
||||
type: 'REQUEST_AI_ERROR',
|
||||
payload: error.message,
|
||||
});
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue