From ad70e5ed0aa83e79eb6956075ff69afed2b71540 Mon Sep 17 00:00:00 2001 From: STetsing <41009393+STetsing@users.noreply.github.com> Date: Thu, 10 Oct 2024 18:37:27 +0200 Subject: [PATCH] styled the chat interface --- .../src/app/plugins/remixAIPlugin.tsx | 11 ++- .../src/lib/InferenceServerManager.ts | 6 +- apps/vyper/src/app/utils/remix-client.tsx | 11 +-- .../src/agents/codeExplainAgent.ts | 30 ++++-- .../src/helpers/streamHandler.ts | 90 +++++++++--------- libs/remix-ai-core/src/index.ts | 3 +- .../src/inferencers/remote/remoteInference.ts | 25 +++-- libs/remix-ai-core/src/types/types.ts | 3 + .../remix-ai/src/lib/components/Default.tsx | 94 ++----------------- .../remix-ai/src/lib/components/RemixAI.tsx | 2 +- .../remix-ai/src/lib/components/color.css | 94 +++++++++++++++++++ 11 files changed, 204 insertions(+), 165 deletions(-) create mode 100644 libs/remix-ui/remix-ai/src/lib/components/color.css diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx index 51983ea620..1ffa118234 100644 --- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx +++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx @@ -3,7 +3,7 @@ import { ViewPlugin } from '@remixproject/engine-web' import { Plugin } from '@remixproject/engine'; import { RemixAITab, ChatApi } from '@remix-ui/remix-ai' import React, { useCallback } from 'react'; -import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, HandleStreamResponse } from '@remix/remix-ai-core'; +import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent} from '@remix/remix-ai-core'; type chatRequestBufferT = { [key in keyof T]: T[key] @@ -33,11 +33,12 @@ export class RemixAIPlugin extends ViewPlugin { remoteInferencer:RemoteInferencer = null isInferencing: boolean = false chatRequestBuffer: chatRequestBufferT = null + agent: CodeExplainAgent constructor(inDesktop:boolean) { super(profile) this.isOnDesktop = inDesktop - + this.agent = new CodeExplainAgent(this) // user machine dont use ressource for remote inferencing } @@ -113,11 +114,12 @@ export class RemixAIPlugin extends ViewPlugin { return } + const newPrompt = await this.agent.chatCommand(prompt) let result if (this.isOnDesktop) { - result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt) + result = await this.call(this.remixDesktopPluginName, 'solidity_answer', newPrompt) } else { - result = await this.remoteInferencer.solidity_answer(prompt) + result = await this.remoteInferencer.solidity_answer(newPrompt) } if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) return result @@ -171,7 +173,6 @@ export class RemixAIPlugin extends ViewPlugin { prompt: prompt, context: context } - console.log('pipe message', pipeMessage) if (pipeMessage) ChatApi.composer.send(pipeMessage) else { if (fn === "code_explaining") ChatApi.composer.send("Explain the current code") diff --git a/apps/remixdesktop/src/lib/InferenceServerManager.ts b/apps/remixdesktop/src/lib/InferenceServerManager.ts index 87d1ae7781..741295cba8 100644 --- a/apps/remixdesktop/src/lib/InferenceServerManager.ts +++ b/apps/remixdesktop/src/lib/InferenceServerManager.ts @@ -404,8 +404,12 @@ export class InferenceManager implements ICompletions { } , responseType: 'stream' }); - const userPrompt = payload[Object.keys(payload)[0]] + const userPrompt = payload.prompt let resultText = "" + if (payload.return_stream_response) { + return response + } + response.data.on('data', (chunk: Buffer) => { try { const parsedData = JSON.parse(chunk.toString()); diff --git a/apps/vyper/src/app/utils/remix-client.tsx b/apps/vyper/src/app/utils/remix-client.tsx index 3034dcb27d..c15c3775a8 100644 --- a/apps/vyper/src/app/utils/remix-client.tsx +++ b/apps/vyper/src/app/utils/remix-client.tsx @@ -8,6 +8,7 @@ import EventEmitter from 'events' import { Plugin } from "@remixproject/engine"; import { CustomRemixApi } from '@remix-api' + export type VyperComplierAddress = 'https://vyper2.remixproject.org/' | 'http://localhost:8000/' export class RemixClient extends PluginClient { private client = createClient>(this) @@ -68,12 +69,10 @@ export class RemixClient extends PluginClient { } try { // TODO: remove! no formatting required since already handled on server - const formattedMessage = ` - ${message} - can you explain why this error occurred and how to fix it? - ` - // await this.client.call('remixAI' as any, 'error_explaining', message) - await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + const file = await this.client.call('fileManager', 'getCurrentFile') + const content = await this.client.call('fileManager', 'readFile', file) + const messageAI = `Vyper code: ${content}\n error message: ${message}\n explain why the error occurred and how to fix it.` + await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI) } catch (err) { console.error('unable to askGpt') console.error(err) diff --git a/libs/remix-ai-core/src/agents/codeExplainAgent.ts b/libs/remix-ai-core/src/agents/codeExplainAgent.ts index 8d1d02b89f..176de4bbba 100644 --- a/libs/remix-ai-core/src/agents/codeExplainAgent.ts +++ b/libs/remix-ai-core/src/agents/codeExplainAgent.ts @@ -1,29 +1,45 @@ // interactive code explaining and highlight security vunerabilities import * as fs from 'fs'; -class CodeExplainAgent { +export class CodeExplainAgent { private codebase: string[]; // list of code base file public currentFile: string; + plugin - constructor(codebasePath: string) { + constructor(props) { + this.plugin = props + // git or fs - this.codebase = this.loadCodebase(codebasePath); + const codebase = this.loadCodebase("codebasePath"); } private loadCodebase(path: string): string[] { - const files = fs.readdirSync(path); - return files - .filter(file => file.endsWith('.ts')) - .flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n')); + return [] } public update(currentFile, lineNumber){ } + async chatCommand(prompt:string){ + // change this function with indexer or related + try{ + if (prompt.includes('Explain briefly the current file')){ + const file = await this.plugin.call('fileManager', 'getCurrentFile') + const content = `Explain this code:\n ${await this.plugin.call('fileManager', 'readFile', file)}` + return content + } else return prompt + } catch { + console.log('There is No file selected') + return 'There is No file selected' + } + } + public getExplanations(currentLine: string, numSuggestions: number = 3): string[] { // process the code base explaining the current file and highlight some details const suggestions: string[] = []; return suggestions; } } + +// Handle file changed (significantly) diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts index ae13088c07..e4e04d8ac2 100644 --- a/libs/remix-ai-core/src/helpers/streamHandler.ts +++ b/libs/remix-ai-core/src/helpers/streamHandler.ts @@ -1,61 +1,61 @@ import { ChatHistory } from '../prompts/chat'; -import { JsonStreamParser} from '../types/types' +import { JsonStreamParser } from '../types/types' -export const HandleSimpleResponse = async (response, - cb?: (streamText: string) => void) => { +export const HandleSimpleResponse = async (response, + cb?: (streamText: string) => void) => { + let resultText = '' + const parser = new JsonStreamParser(); + + const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response); + for (const parsedData of chunk) { + if (parsedData.isGenerating) { + resultText += parsedData.generatedText + cb(parsedData.generatedText) + } else { + resultText += parsedData.generatedText + cb(parsedData.generatedText) + } + } +} + +export const HandleStreamResponse = async (streamResponse, + cb?: (streamText: string) => void, + done_cb?: (result: string) => void) => { + try { let resultText = '' const parser = new JsonStreamParser(); + const reader = streamResponse.body!.getReader(); + const decoder = new TextDecoder(); - const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response); - for (const parsedData of chunk) { - if (parsedData.isGenerating) { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + + try { + const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true })); + for (const parsedData of chunk) { + if (parsedData.isGenerating) { resultText += parsedData.generatedText cb(parsedData.generatedText) - } else { + } else { resultText += parsedData.generatedText cb(parsedData.generatedText) + } } - } -} - -export const HandleStreamResponse = async (streamResponse, - cb?: (streamText: string) => void, - done_cb?: (result: string) => void) => { - try { - let resultText = '' - const parser = new JsonStreamParser(); - const reader = streamResponse.body!.getReader(); - const decoder = new TextDecoder(); - - while (true) { - const { done, value } = await reader.read(); - if (done) break; - - try { - const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true })); - for (const parsedData of chunk) { - if (parsedData.isGenerating) { - resultText += parsedData.generatedText - cb(parsedData.generatedText) - } else { - resultText += parsedData.generatedText - cb(parsedData.generatedText) - } - } - } - catch (error) { - console.error('Error parsing JSON:', error); - } - } - if (done_cb) { - done_cb(resultText) - } - } - catch (error) { + } + catch (error) { console.error('Error parsing JSON:', error); + } + } + if (done_cb) { + done_cb(resultText) } + } + catch (error) { + console.error('Error parsing JSON:', error); + } } export const UpdtateChatHistory = (userPromptprompt: string, AIAnswer: string) => { - ChatHistory.pushHistory(userPromptprompt, AIAnswer) + ChatHistory.pushHistory(userPromptprompt, AIAnswer) } \ No newline at end of file diff --git a/libs/remix-ai-core/src/index.ts b/libs/remix-ai-core/src/index.ts index e61998f3e0..fe54a57f2f 100644 --- a/libs/remix-ai-core/src/index.ts +++ b/libs/remix-ai-core/src/index.ts @@ -20,4 +20,5 @@ export { } export * from './types/types' -export * from './helpers/streamHandler' \ No newline at end of file +export * from './helpers/streamHandler' +export * from './agents/codeExplainAgent' \ No newline at end of file diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts index f8e816bc65..ea07cee565 100644 --- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts +++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts @@ -17,20 +17,21 @@ export class RemoteInferencer implements ICompletions { model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary event: EventEmitter test_env=true + test_url="http://solcodertest.org/" constructor(apiUrl?:string, completionUrl?:string) { - this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? "http://127.0.0.1:7861/" : "https://solcoder.remixproject.org" - this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? "http://127.0.0.1:7861/" : "https://completion.remixproject.org" + this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org" + this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? this.test_url : "https://completion.remixproject.org" this.event = new EventEmitter() } - private async _makeRequest(endpoint, payload, rType:AIRequestType){ + private async _makeRequest(payload, rType:AIRequestType){ this.event.emit("onInference") const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url try { const options = { headers: { 'Content-Type': 'application/json', } } - const result = await axios.post(`${requestURL}/${endpoint}`, payload, options) + const result = await axios.post(`${requestURL}`, payload, options) switch (rType) { case AIRequestType.COMPLETION: @@ -63,11 +64,10 @@ export class RemoteInferencer implements ICompletions { try { this.event.emit('onInference') const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url - const response = await fetch(`${requestURL}/${endpoint}`, { + const response = await fetch(requestURL, { method: 'POST', headers: { 'Content-Type': 'application/json', - "Accept": "text/event-stream", }, body: JSON.stringify(payload), }); @@ -118,20 +118,20 @@ export class RemoteInferencer implements ICompletions { async code_completion(prompt, options:IParams=CompletionParams): Promise { const payload = { prompt, "endpoint":"code_completion", ...options } - return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION) + return this._makeRequest(payload, AIRequestType.COMPLETION) } async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise { // const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]} const payload = {"endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' } - return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION) + return this._makeRequest(payload, AIRequestType.COMPLETION) } async code_generation(prompt, options:IParams=GenerationParams): Promise { // const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]} const payload = { prompt, "endpoint":"code_completion", ...options } if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION) - else return this._makeRequest(payload.endpoint, payload, AIRequestType.COMPLETION) + else return this._makeRequest(payload, AIRequestType.COMPLETION) } async solidity_answer(prompt, options:IParams=GenerationParams): Promise { @@ -139,20 +139,19 @@ export class RemoteInferencer implements ICompletions { // const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]} const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options } if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) - else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) } async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise { // const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]} const payload = { prompt, "endpoint":"code_explaining", context, ...options } if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) - else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) } async error_explaining(prompt, options:IParams=GenerationParams): Promise { const payload = { prompt, "endpoint":"error_explaining", ...options } - console.log("payload: ", payload) if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL) - else return this._makeRequest(payload.endpoint, payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) } } diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts index a8bbef244e..14c5af2cc3 100644 --- a/libs/remix-ai-core/src/types/types.ts +++ b/libs/remix-ai-core/src/types/types.ts @@ -122,4 +122,7 @@ export class JsonStreamParser { return results; } + safeJsonParseSingle(chunk: string): T[] | null { + return JSON.parse(this.buffer); + } } diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx index 3d52b93a56..0d50023776 100644 --- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx +++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx @@ -4,10 +4,13 @@ import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, Han import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react'; import axios from 'axios'; import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react'; -import '@nlux/themes/nova.css'; import { JsonStreamParser } from '@remix/remix-ai-core'; import { user, assistantAvatar } from './personas'; import {highlighter} from '@nlux/highlighter' +import './color.css' +import '@nlux/themes/unstyled.css'; +// import '@nlux/themes' +import { result } from 'lodash'; const demoProxyServerUrl = 'https://solcoder.remixproject.org'; @@ -44,8 +47,8 @@ export const Default = (props) => { }; ChatApi = useAiChatApi(); const conversationStarters: ConversationStarter[] = [ - {prompt: 'Explain what is a solidity contract!', icon: ⭐️}, - {prompt: 'Explain the current file in Editor'}] + {prompt: 'Explain briefly the current file in Editor', icon: ⭐️}, + {prompt: 'Explain what is a solidity contract!'}] // Define initial messages const initialMessages: ChatItem[] = [ @@ -71,7 +74,7 @@ export const Default = (props) => { }} //initialConversation={initialMessages} conversationOptions={{ layout: 'bubbles', conversationStarters }} - displayOptions={{ colorScheme: "auto", themeId: "nova" }} + displayOptions={{ colorScheme: "auto", themeId: "remix_ai_theme" }} composerOptions={{ placeholder: "Type your query", submitShortcut: 'Enter', hideStopButton: false, @@ -83,85 +86,4 @@ export const Default = (props) => { }} /> ); -}; - -// export const Default = (props) => { -// const [searchText, setSearchText] = useState(''); -// const [resultText, setResultText] = useState(''); -// const pluginName = 'remixAI' -// const appendText = (newText) => { -// setResultText(resultText => resultText + newText); -// } - -// useEffect(() => { -// const handleResultReady = async (e) => { -// appendText(e); -// }; -// if (props.plugin.isOnDesktop ) { -// props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => { -// handleResultReady(value); -// }) -// } -// }, []) - -// return ( -//
-//
-// console.log('searchText not implememted')} -// > -// - -// - -//
- -//
-//