added Stream to AI requests

This commit is contained in:
Nikita 2024-12-09 14:02:26 +03:00
parent 3995877b75
commit bb7b0b6151
9 changed files with 620 additions and 129 deletions

View file

@ -108,28 +108,12 @@ class Mind_Rest extends WP_REST_Controller {
} }
/** /**
* Send request to OpenAI. * Prepare messages for request.
* *
* @param WP_REST_Request $req request object. * @param string $request user request.
* * @param string $context context.
* @return mixed
*/ */
public function request_ai( WP_REST_Request $req ) { public function prepare_messages( $request, $context ) {
$settings = get_option( 'mind_settings', array() );
$openai_key = $settings['openai_api_key'] ?? '';
$request = $req->get_param( 'request' ) ?? '';
$context = $req->get_param( 'context' ) ?? '';
if ( ! $openai_key ) {
return $this->error( 'no_openai_key_found', __( 'Provide OpenAI key in the plugin settings.', 'mind' ) );
}
if ( ! $request ) {
return $this->error( 'no_request', __( 'Provide request to receive AI response.', 'mind' ) );
}
// Messages.
$messages = []; $messages = [];
$messages[] = [ $messages[] = [
@ -189,52 +173,75 @@ class Mind_Rest extends WP_REST_Controller {
), ),
]; ];
return $messages;
}
/**
* Send request to OpenAI.
*
* @param WP_REST_Request $req request object.
*
* @return mixed
*/
public function request_ai( WP_REST_Request $req ) {
// Set headers for streaming.
header( 'Content-Type: text/event-stream' );
header( 'Cache-Control: no-cache' );
header( 'Connection: keep-alive' );
// For Nginx.
header( 'X-Accel-Buffering: no' );
$settings = get_option( 'mind_settings', array() );
$openai_key = $settings['openai_api_key'] ?? '';
$request = $req->get_param( 'request' ) ?? '';
$context = $req->get_param( 'context' ) ?? '';
if ( ! $openai_key ) {
$this->send_stream_error( 'no_openai_key_found', __( 'Provide OpenAI key in the plugin settings.', 'mind' ) );
exit;
}
if ( ! $request ) {
$this->send_stream_error( 'no_request', __( 'Provide request to receive AI response.', 'mind' ) );
exit;
}
// Messages.
$messages = $this->prepare_messages( $request, $context );
$body = [ $body = [
'model' => 'gpt-4o-mini', 'model' => 'gpt-4o-mini',
'stream' => false, 'stream' => true,
'temperature' => 0.7, 'temperature' => 0.7,
'messages' => $messages, 'messages' => $messages,
]; ];
// Make Request to OpenAI API. // Initialize cURL.
$ai_request = wp_remote_post( // phpcs:disable
'https://api.openai.com/v1/chat/completions', $ch = curl_init( 'https://api.openai.com/v1/chat/completions' );
[ curl_setopt( $ch, CURLOPT_POST, 1 );
'headers' => [ curl_setopt( $ch, CURLOPT_RETURNTRANSFER, true );
'Authorization' => 'Bearer ' . $openai_key, curl_setopt( $ch, CURLOPT_HTTPHEADER, [
'Content-Type' => 'application/json', 'Content-Type: application/json',
], 'Authorization: Bearer ' . $openai_key,
'timeout' => 30, ] );
'sslverify' => false, curl_setopt( $ch, CURLOPT_POSTFIELDS, json_encode( $body ) );
'body' => wp_json_encode( $body ), curl_setopt( $ch, CURLOPT_WRITEFUNCTION, function ( $curl, $data ) {
] $this->process_stream_chunk( $data );
); return strlen( $data );
});
// Error. // Execute request
if ( is_wp_error( $ai_request ) ) { curl_exec( $ch );
$response = $ai_request->get_error_message();
return $this->error( 'openai_request_error', $response ); if ( curl_errno( $ch ) ) {
} elseif ( wp_remote_retrieve_response_code( $ai_request ) !== 200 ) { $this->send_stream_error( 'curl_error', curl_error( $ch ) );
$response = json_decode( wp_remote_retrieve_body( $ai_request ), true );
if ( isset( $response['error']['message'] ) ) {
return $this->error( 'openai_request_error', $response['error']['message'] );
}
return $this->error( 'openai_request_error', __( 'OpenAI data failed to load.', 'mind' ) );
} }
// Success. curl_close( $ch );
$result = ''; // phpcs:enable
$response = json_decode( wp_remote_retrieve_body( $ai_request ), true ); exit;
// TODO: this a limited part, which should be reworked.
if ( isset( $response['choices'][0]['message']['content'] ) ) {
$result = $response['choices'][0]['message']['content'];
}
return $this->success( $result );
} }
/** /**
@ -255,6 +262,72 @@ class Mind_Rest extends WP_REST_Controller {
return $method . '&' . rawurlencode( $base_uri ) . '&' . rawurlencode( implode( '&', $r ) ); return $method . '&' . rawurlencode( $base_uri ) . '&' . rawurlencode( implode( '&', $r ) );
} }
/**
* Process streaming chunk from OpenAI
*
* @param string $chunk - chunk of data.
*/
private function process_stream_chunk( $chunk ) {
$lines = explode( "\n", $chunk );
foreach ( $lines as $line ) {
if ( strlen( trim( $line ) ) === 0 ) {
continue;
}
if ( strpos( $line, 'data: ' ) === 0 ) {
$json_data = trim( substr( $line, 6 ) );
if ( '[DONE]' === $json_data ) {
$this->send_stream_chunk( [ 'done' => true ] );
return;
}
try {
$data = json_decode( $json_data, true );
if ( isset( $data['choices'][0]['delta']['content'] ) ) {
// Send smaller chunks immediately.
$this->send_stream_chunk(
[
'content' => $data['choices'][0]['delta']['content'],
]
);
flush();
}
} catch ( Exception $e ) {
$this->send_stream_error( 'json_error', $e->getMessage() );
}
}
}
}
/**
* Send stream chunk
*
* @param array $data - data to send.
*/
private function send_stream_chunk( $data ) {
echo 'data: ' . wp_json_encode( $data ) . "\n\n";
flush();
}
/**
* Send stream error
*
* @param string $code - error code.
* @param string $message - error message.
*/
private function send_stream_error( $code, $message ) {
$this->send_stream_chunk(
[
'error' => true,
'code' => $code,
'message' => $message,
]
);
}
/** /**
* Success rest. * Success rest.
* *

View file

@ -0,0 +1,70 @@
/**
* Styles
*/
import './style.scss';
/**
* WordPress dependencies
*/
import { useRef, useEffect, RawHTML, memo } from '@wordpress/element';
const AIResponse = memo(
function AIResponse({ response, loading }) {
const responseRef = useRef();
useEffect(() => {
if (!responseRef.current) {
return;
}
const popupContent = responseRef.current.closest(
'.mind-popup-content'
);
if (!popupContent) {
return;
}
// Smooth scroll to bottom of response.
const { scrollHeight, clientHeight } = popupContent;
// Only auto-scroll for shorter contents.
const shouldScroll = scrollHeight - clientHeight < 1000;
if (shouldScroll) {
popupContent.scrollTo({
top: scrollHeight,
behavior: 'smooth',
});
}
}, [response]);
if (!response && !loading) {
return null;
}
return (
<div
ref={responseRef}
className="mind-popup-response"
style={{
opacity: loading ? 0.85 : 1,
}}
>
<RawHTML>{response}</RawHTML>
{loading && <div className="mind-popup-cursor" />}
</div>
);
},
(prevProps, nextProps) => {
// Custom memoization to prevent unnecessary rerenders.
return (
prevProps.renderBuffer.lastUpdate ===
nextProps.renderBuffer.lastUpdate &&
prevProps.loading === nextProps.loading &&
prevProps.progress.isComplete === nextProps.progress.isComplete
);
}
);
export default AIResponse;

View file

@ -0,0 +1,36 @@
.mind-popup-response {
/* GPU acceleration */
transform: translateZ(0);
will-change: transform;
/* Optimize repaints */
contain: content;
/* Smooth typing cursor */
.mind-popup-cursor {
display: inline-block;
width: 1.5px;
height: 1em;
background: currentColor;
margin-left: 2px;
animation: mind-cursor-blink 1s step-end infinite;
}
}
@keyframes mind-cursor-blink {
0%,
100% {
opacity: 1;
}
50% {
opacity: 0;
}
}
/* Optimize for mobile */
@media (max-width: 768px) {
.mind-popup-response {
contain: strict;
height: 100%;
}
}

View file

@ -4,15 +4,15 @@ import './style.scss';
* WordPress dependencies * WordPress dependencies
*/ */
import { __ } from '@wordpress/i18n'; import { __ } from '@wordpress/i18n';
import { useRef, useEffect, RawHTML } from '@wordpress/element'; import { useRef, useEffect } from '@wordpress/element';
import { useSelect, useDispatch } from '@wordpress/data'; import { useSelect, useDispatch } from '@wordpress/data';
import { Button } from '@wordpress/components'; import { Button } from '@wordpress/components';
/** /**
* Internal dependencies * Internal dependencies
*/ */
import LoadingText from '../loading-text';
import Notice from '../notice'; import Notice from '../notice';
import AIResponse from '../ai-response';
import { ReactComponent as PopupPostTitleAboutIcon } from '../../../../icons/popup-post-title-about.svg'; import { ReactComponent as PopupPostTitleAboutIcon } from '../../../../icons/popup-post-title-about.svg';
import { ReactComponent as PopupPostAboutIcon } from '../../../../icons/popup-post-about.svg'; import { ReactComponent as PopupPostAboutIcon } from '../../../../icons/popup-post-about.svg';
import { ReactComponent as PopupOutlineAboutIcon } from '../../../../icons/popup-outline-about.svg'; import { ReactComponent as PopupOutlineAboutIcon } from '../../../../icons/popup-outline-about.svg';
@ -73,29 +73,40 @@ export default function Content() {
const { setInput, setScreen } = useDispatch('mind/popup'); const { setInput, setScreen } = useDispatch('mind/popup');
const { isOpen, input, screen, loading, response, error } = useSelect( const {
(select) => { isOpen,
const { input,
isOpen: checkIsOpen, screen,
getInput, loading,
getContext, response,
getScreen, progress,
getLoading, renderBuffer,
getResponse, error,
getError, } = useSelect((select) => {
} = select('mind/popup'); const {
isOpen: checkIsOpen,
getInput,
getContext,
getScreen,
getLoading,
getResponse,
getProgress,
getRenderBuffer,
getError,
} = select('mind/popup');
return { return {
isOpen: checkIsOpen(), isOpen: checkIsOpen(),
input: getInput(), input: getInput(),
context: getContext(), context: getContext(),
screen: getScreen(), screen: getScreen(),
loading: getLoading(), loading: getLoading(),
response: getResponse(), response: getResponse(),
error: getError(), progress: getProgress(),
}; renderBuffer: getRenderBuffer(),
} error: getError(),
); };
});
function focusInput() { function focusInput() {
if (ref?.current) { if (ref?.current) {
@ -156,12 +167,14 @@ export default function Content() {
{screen === 'request' && ( {screen === 'request' && (
<div className="mind-popup-request"> <div className="mind-popup-request">
{loading && ( {response && (
<LoadingText> <AIResponse
{__('Waiting for AI response', 'mind')} progress={progress}
</LoadingText> loading={loading}
response={response}
renderBuffer={renderBuffer}
/>
)} )}
{!loading && response && <RawHTML>{response}</RawHTML>}
{!loading && error && <Notice type="error">{error}</Notice>} {!loading && error && <Notice type="error">{error}</Notice>}
</div> </div>
)} )}

View file

@ -7,6 +7,8 @@ import { __ } from '@wordpress/i18n';
import { Button } from '@wordpress/components'; import { Button } from '@wordpress/components';
import { useSelect, useDispatch } from '@wordpress/data'; import { useSelect, useDispatch } from '@wordpress/data';
import LoadingText from '../loading-text';
export default function Input(props) { export default function Input(props) {
const { onInsert } = props; const { onInsert } = props;
@ -25,7 +27,7 @@ export default function Input(props) {
}; };
}); });
const showFooter = response || (input && !loading && !response); const showFooter = response || loading || (input && !loading && !response);
if (!showFooter) { if (!showFooter) {
return null; return null;
@ -33,6 +35,7 @@ export default function Input(props) {
return ( return (
<div className="mind-popup-footer"> <div className="mind-popup-footer">
{loading && <LoadingText>{__('Writing', 'mind')}</LoadingText>}
<div className="mind-popup-footer-actions"> <div className="mind-popup-footer-actions">
{input && !loading && !response && ( {input && !loading && !response && (
<Button <Button
@ -43,7 +46,7 @@ export default function Input(props) {
{__('Get Answer', 'mind')} <kbd></kbd> {__('Get Answer', 'mind')} <kbd></kbd>
</Button> </Button>
)} )}
{response && ( {response && !loading && (
<> <>
<Button <Button
onClick={() => { onClick={() => {

View file

@ -7,6 +7,7 @@ import apiFetch from '@wordpress/api-fetch';
/** /**
* Internal dependencies. * Internal dependencies.
*/ */
import AIStreamProcessor from '../../../utils/ai-stream-processor';
import getSelectedBlocksContent from '../../../utils/get-selected-blocks-content'; import getSelectedBlocksContent from '../../../utils/get-selected-blocks-content';
import { isConnected } from '../core/selectors'; import { isConnected } from '../core/selectors';
@ -77,8 +78,92 @@ export function setError(error) {
}; };
} }
export function reset() {
return {
type: 'RESET',
};
}
/**
* Process stream data from the API
*
* @param {ReadableStream} reader Stream reader
* @return {Function} Redux-style action function
*/
export function processStream(reader) {
return async ({ dispatch }) => {
const decoder = new TextDecoder();
let buffer = '';
let responseText = '';
// Create artificial delay for smoother updates
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
try {
while (true) {
const { value, done } = await reader.read();
if (done) break;
// Process smaller chunks.
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.slice(6));
if (data.error) {
dispatch({
type: 'REQUEST_AI_ERROR',
payload: data.message,
});
return;
}
if (data.done) {
dispatch({
type: 'REQUEST_AI_SUCCESS',
payload: responseText,
});
return;
}
if (data.content) {
responseText += data.content;
dispatch({
type: 'REQUEST_AI_CHUNK',
payload: responseText,
});
// Add small delay between chunks for smoother appearance.
await delay(50);
}
} catch (error) {
dispatch({
type: 'REQUEST_AI_ERROR',
payload: __(
'Failed to parse stream data',
'mind'
),
});
}
}
}
}
} catch (error) {
dispatch({
type: 'REQUEST_AI_ERROR',
payload: error.message,
});
}
};
}
export function requestAI() { export function requestAI() {
return ({ dispatch, select }) => { return async ({ dispatch, select }) => {
if (!isConnected) { if (!isConnected) {
return; return;
} }
@ -98,32 +183,34 @@ export function requestAI() {
data.context = getSelectedBlocksContent(); data.context = getSelectedBlocksContent();
} }
apiFetch({ try {
path: '/mind/v1/request_ai', // Initialize StreamProcessor with dispatch
method: 'POST', const streamProcessor = new AIStreamProcessor(dispatch);
data,
})
.then((res) => {
dispatch({
type: 'REQUEST_AI_SUCCESS',
payload: res.response,
});
return res.response;
})
.catch((err) => {
dispatch({
type: 'REQUEST_AI_ERROR',
payload:
err?.response ||
err?.error_code ||
__('Something went wrong, please, try again…', 'mind'),
});
});
};
}
export function reset() { const response = await apiFetch({
return { path: '/mind/v1/request_ai',
type: 'RESET', method: 'POST',
data,
// Important: don't parse the response automatically
parse: false,
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(
errorData.message ||
__('Failed to fetch AI response', 'mind')
);
}
// Process the stream
await streamProcessor.processStream(response.body.getReader());
} catch (error) {
dispatch({
type: 'REQUEST_AI_ERROR',
payload:
error.message || __('Failed to fetch AI response', 'mind'),
});
}
}; };
} }

View file

@ -1,18 +1,29 @@
import mdToHtml from '../../../utils/md-to-html'; import mdToHtml from '../../../utils/md-to-html';
function reducer( const initialState = {
state = { isOpen: false,
isOpen: false, input: '',
input: '', context: '',
context: '', insertionPlace: '',
insertionPlace: '', screen: '',
screen: '', loading: false,
loading: false, response: '',
response: false, error: null,
error: false, progress: {
charsProcessed: 0,
queueSize: 0,
isComplete: false,
}, },
action = {} renderBuffer: {
) { content: '',
lastUpdate: 0,
},
};
// throttle in ms.
const RENDER_THROTTLE = 50;
function reducer(state = initialState, action = {}) {
switch (action.type) { switch (action.type) {
case 'CLOSE': case 'CLOSE':
if (state.isOpen) { if (state.isOpen) {
@ -94,22 +105,62 @@ function reducer(
case 'REQUEST_AI_PENDING': case 'REQUEST_AI_PENDING':
return { return {
...state, ...state,
loading: true,
isOpen: true, isOpen: true,
loading: true,
response: '',
error: null,
screen: 'request', screen: 'request',
progress: initialState.progress,
renderBuffer: initialState.renderBuffer,
};
case 'REQUEST_AI_CHUNK':
const now = Date.now();
const shouldUpdate =
now - state.renderBuffer.lastUpdate >= RENDER_THROTTLE;
if (!shouldUpdate) {
return {
...state,
renderBuffer: {
content: action.payload.content,
lastUpdate: state.renderBuffer.lastUpdate,
},
};
}
return {
...state,
loading: true,
response: action.payload.content
? mdToHtml(action.payload.content)
: false,
progress: action.payload.progress,
renderBuffer: {
content: action.payload.content,
lastUpdate: now,
},
}; };
case 'REQUEST_AI_SUCCESS': case 'REQUEST_AI_SUCCESS':
return { return {
...state, ...state,
loading: false, loading: false,
response: action.payload ? mdToHtml(action.payload) : false, response: action.payload.content
? mdToHtml(action.payload.content)
: false,
progress: { ...action.payload.progress, isComplete: true },
renderBuffer: {
content: action.payload.content,
lastUpdate: Date.now(),
},
}; };
case 'REQUEST_AI_ERROR': case 'REQUEST_AI_ERROR':
return { return {
...state, ...state,
loading: false, loading: false,
error: action.payload || '',
response: false, response: false,
error: action.payload || '',
progress: initialState.progress,
renderBuffer: initialState.renderBuffer,
}; };
case 'RESET': case 'RESET':
return { return {

View file

@ -26,6 +26,14 @@ export function getResponse(state) {
return state?.response || false; return state?.response || false;
} }
export function getProgress(state) {
return state?.progress || false;
}
export function getRenderBuffer(state) {
return state?.renderBuffer || false;
}
export function getError(state) { export function getError(state) {
return state?.error || false; return state?.error || false;
} }

View file

@ -0,0 +1,150 @@
export default class AIStreamProcessor {
constructor(dispatch) {
this.dispatch = dispatch;
this.buffer = '';
this.responseText = '';
this.decoder = new TextDecoder();
this.lastUpdate = Date.now();
this.updateQueue = [];
this.isProcessing = false;
// Configuration
this.CONFIG = {
// ms between chunks.
CHUNK_DELAY: 30,
// ms minimum between updates.
UPDATE_INTERVAL: 50,
// number of chunks to batch.
BATCH_SIZE: 3,
// maximum queue size.
MAX_QUEUE_SIZE: 100,
TYPING_SPEED: {
FAST: 20,
MEDIUM: 35,
SLOW: 50,
},
};
}
async processStream(reader) {
try {
this.startQueueProcessor();
while (true) {
const { value, done } = await reader.read();
if (done) break;
await this.processChunk(value);
}
await this.flushQueue();
this.sendCompletion();
} catch (error) {
this.handleError(error);
}
}
async processChunk(value) {
const text = this.decoder.decode(value, { stream: true });
const chunks = this.parseChunks(text);
for (const chunk of chunks) {
await this.queueUpdate(chunk);
}
}
parseChunks(text) {
const chunks = [];
const lines = text.split('\n');
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.slice(6));
if (data.content) {
chunks.push(data.content);
}
} catch (error) {
// eslint-disable-next-line no-console
console.debug('Chunk parse error:', error);
}
}
}
return chunks;
}
async queueUpdate(content) {
this.updateQueue.push(content);
// Prevent queue from growing too large
if (this.updateQueue.length > this.CONFIG.MAX_QUEUE_SIZE) {
await this.flushQueue();
}
}
async startQueueProcessor() {
if (this.isProcessing) return;
this.isProcessing = true;
while (this.updateQueue.length > 0) {
const batch = this.updateQueue.splice(0, this.CONFIG.BATCH_SIZE);
await this.processBatch(batch);
}
this.isProcessing = false;
}
async processBatch(batch) {
const content = batch.join('');
this.responseText += content;
// Throttle updates
const now = Date.now();
if (now - this.lastUpdate >= this.CONFIG.UPDATE_INTERVAL) {
this.dispatch({
type: 'REQUEST_AI_CHUNK',
payload: {
content: this.responseText,
progress: this.calculateProgress(),
},
});
this.lastUpdate = now;
}
await new Promise((resolve) =>
setTimeout(resolve, this.CONFIG.CHUNK_DELAY)
);
}
calculateProgress() {
// Implement your progress calculation logic
return {
charsProcessed: this.responseText.length,
queueSize: this.updateQueue.length,
isComplete: false,
};
}
async flushQueue() {
while (this.updateQueue.length > 0) {
await this.startQueueProcessor();
}
}
sendCompletion() {
this.dispatch({
type: 'REQUEST_AI_SUCCESS',
payload: {
content: this.responseText,
progress: { isComplete: true },
},
});
}
handleError(error) {
this.dispatch({
type: 'REQUEST_AI_ERROR',
payload: error.message,
});
}
}