diff --git a/apps/circuit-compiler/src/app/components/container.tsx b/apps/circuit-compiler/src/app/components/container.tsx index 2c7cc41e16..80663789cd 100644 --- a/apps/circuit-compiler/src/app/components/container.tsx +++ b/apps/circuit-compiler/src/app/components/container.tsx @@ -73,16 +73,14 @@ export function Container () { full circom error: ${JSON.stringify(report, null, 2)} explain why the error occurred and how to fix it. ` - // @ts-ignore - await circuitApp.plugin.call('remixAI', 'error_explaining', message) + await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) } else { const message = ` error message: ${error} full circom error: ${JSON.stringify(report, null, 2)} explain why the error occurred and how to fix it. ` - // @ts-ignore - await circuitApp.plugin.call('remixAI', 'error_explaining', message) + await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) } } else { const error = report.message @@ -91,8 +89,7 @@ export function Container () { full circom error: ${JSON.stringify(report, null, 2)} explain why the error occurred and how to fix it. ` - // @ts-ignore - await circuitApp.plugin.call('remixAI', 'error_explaining', message) + await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) } } diff --git a/apps/remix-ide/src/app.js b/apps/remix-ide/src/app.js index 91b2a42f33..796d354c5c 100644 --- a/apps/remix-ide/src/app.js +++ b/apps/remix-ide/src/app.js @@ -557,7 +557,7 @@ class AppComponent { await this.appManager.activatePlugin(['solidity-script', 'remix-templates']) if (isElectron()) { - await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat', 'remixAID']) + await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat']) // 'remixAID' } this.appManager.on( diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx index 8bc1427204..d6d3c78a48 100644 --- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx +++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx @@ -1,9 +1,14 @@ import * as packageJson from '../../../../../package.json' import { ViewPlugin } from '@remixproject/engine-web' import { Plugin } from '@remixproject/engine'; -import { RemixAITab } from '@remix-ui/remix-ai' -import React from 'react'; -import { ICompletions, IModel, RemoteInferencer, IRemoteModel } from '@remix/remix-ai-core'; +import { RemixAITab, ChatApi } from '@remix-ui/remix-ai' +import React, { useCallback } from 'react'; +import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core'; +import { CustomRemixApi } from '@remix-api' + +type chatRequestBufferT = { + [key in keyof T]: T[key] +} const profile = { name: 'remixAI', @@ -11,39 +16,52 @@ const profile = { methods: ['code_generation', 'code_completion', "solidity_answer", "code_explaining", "code_insertion", "error_explaining", - "initialize"], + "initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'], events: [], icon: 'assets/img/remix-logo-blue.png', description: 'RemixAI provides AI services to Remix IDE.', kind: '', - // location: 'sidePanel', + location: 'sidePanel', documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html', version: packageJson.version, maintainedBy: 'Remix' } -export class RemixAIPlugin extends Plugin { +// add Plugin +export class RemixAIPlugin extends ViewPlugin { isOnDesktop:boolean = false aiIsActivated:boolean = false readonly remixDesktopPluginName = 'remixAID' remoteInferencer:RemoteInferencer = null isInferencing: boolean = false + chatRequestBuffer: chatRequestBufferT = null + agent: CodeExplainAgent + useRemoteInferencer:boolean = false constructor(inDesktop:boolean) { super(profile) this.isOnDesktop = inDesktop - + this.agent = new CodeExplainAgent(this) // user machine dont use ressource for remote inferencing } onActivation(): void { - this.initialize(null, null, null, false) + if (this.isOnDesktop) { + console.log('Activating RemixAIPlugin on desktop') + // this.on(this.remixDesktopPluginName, 'activated', () => { + this.useRemoteInferencer = true + this.initialize(null, null, null, this.useRemoteInferencer); + // }) + } else { + console.log('Activating RemixAIPlugin on browser') + this.useRemoteInferencer = true + this.initialize() + } } async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){ - if (this.isOnDesktop) { + if (this.isOnDesktop && !this.useRemoteInferencer) { // on desktop use remote inferencer -> false - console.log('initialize on desktop') const res = await this.call(this.remixDesktopPluginName, 'initializeModelBackend', useRemote, model1, model2) if (res) { this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => { @@ -60,7 +78,6 @@ export class RemixAIPlugin extends Plugin { } } else { - // on browser this.remoteInferencer = new RemoteInferencer(remoteModel?.apiUrl, remoteModel?.completionUrl) this.remoteInferencer.event.on('onInference', () => { this.isInferencing = true @@ -80,7 +97,7 @@ export class RemixAIPlugin extends Plugin { return } - if (this.isOnDesktop) { + if (this.isOnDesktop && !this.useRemoteInferencer) { return await this.call(this.remixDesktopPluginName, 'code_generation', prompt) } else { return await this.remoteInferencer.code_generation(prompt) @@ -88,82 +105,109 @@ export class RemixAIPlugin extends Plugin { } async code_completion(prompt: string, promptAfter: string): Promise { - if (this.isOnDesktop) { + if (this.isOnDesktop && !this.useRemoteInferencer) { return await this.call(this.remixDesktopPluginName, 'code_completion', prompt, promptAfter) } else { return await this.remoteInferencer.code_completion(prompt, promptAfter) } } - async solidity_answer(prompt: string): Promise { + async solidity_answer(prompt: string, params: IParams=GenerationParams): Promise { if (this.isInferencing) { this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" }) return } - this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` }) - + const newPrompt = await this.agent.chatCommand(prompt) let result - if (this.isOnDesktop) { - result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt) + if (this.isOnDesktop && !this.useRemoteInferencer) { + result = await this.call(this.remixDesktopPluginName, 'solidity_answer', newPrompt) } else { - result = await this.remoteInferencer.solidity_answer(prompt) + result = await this.remoteInferencer.solidity_answer(newPrompt) } - if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) - // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" }) + if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) return result } - async code_explaining(prompt: string): Promise { + async code_explaining(prompt: string, context: string, params: IParams=GenerationParams): Promise { if (this.isInferencing) { this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" }) return } - this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` }) - let result - if (this.isOnDesktop) { - result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt) + if (this.isOnDesktop && !this.useRemoteInferencer) { + result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params) } else { - result = await this.remoteInferencer.code_explaining(prompt) + result = await this.remoteInferencer.code_explaining(prompt, context, params) } - if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) - // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" }) + if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) return result } - async error_explaining(prompt: string): Promise { + async error_explaining(prompt: string, context: string="", params: IParams=GenerationParams): Promise { if (this.isInferencing) { this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" }) return } - this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` }) - let result - if (this.isOnDesktop) { + if (this.isOnDesktop && !this.useRemoteInferencer) { result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt) } else { - result = await this.remoteInferencer.error_explaining(prompt) + result = await this.remoteInferencer.error_explaining(prompt, params) } - if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) - // this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" }) + if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) return result } async code_insertion(msg_pfx: string, msg_sfx: string): Promise { - if (this.isOnDesktop) { + if (this.isOnDesktop && !this.useRemoteInferencer) { return await this.call(this.remixDesktopPluginName, 'code_insertion', msg_pfx, msg_sfx) } else { return await this.remoteInferencer.code_insertion(msg_pfx, msg_sfx) } } - // render() { - // return ( - // - // ) - // } + chatPipe(fn, prompt: string, context?: string, pipeMessage?: string){ + if (this.chatRequestBuffer == null){ + this.chatRequestBuffer = { + fn_name: fn, + prompt: prompt, + context: context + } + if (pipeMessage) ChatApi.composer.send(pipeMessage) + else { + if (fn === "code_explaining") ChatApi.composer.send("Explain the current code") + else if (fn === "error_explaining") ChatApi.composer.send("Explain the error") + else if (fn === "solidity_answer") ChatApi.composer.send("Answer the following question") + else console.log("chatRequestBuffer is not empty. First process the last request.") + } + } + else { + console.log("chatRequestBuffer is not empty. First process the last request.") + } + } + + async ProcessChatRequestBuffer(params:IParams=GenerationParams){ + if (this.chatRequestBuffer != null){ + const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params) + this.chatRequestBuffer = null + return result + } + else { + console.log("chatRequestBuffer is empty.") + return "" + } + } + isChatRequestPending(){ + return this.chatRequestBuffer != null + } + + render() { + return ( + + ) + } } diff --git a/apps/remix-ide/src/app/tabs/locales/en/editor.json b/apps/remix-ide/src/app/tabs/locales/en/editor.json index 76bfc87a1f..6f0f367555 100644 --- a/apps/remix-ide/src/app/tabs/locales/en/editor.json +++ b/apps/remix-ide/src/app/tabs/locales/en/editor.json @@ -25,8 +25,9 @@ "editor.explainFunction": "Explain this function", "editor.explainFunctionSol": "Explain this code", "editor.explainFunction2": "Explain the function \"{name}\"", - "editor.explainFunctionByAI": "solidity code: {content}\n Explain the function {currentFunction}", - "editor.explainFunctionByAISol": "solidity code: {content}\n Explain the function {currentFunction}", + "editor.explainFunctionByAI": "```\n{content}\n```\nExplain the function {currentFunction}", + "editor.explainFunctionByAISol": "```\n{content}\n```\nExplain the function {currentFunction}", + "editor.ExplainPipeMessage": "```\n {content}\n```\nExplain the snipped above", "editor.executeFreeFunction": "Run a free function", "editor.executeFreeFunction2": "Run the free function \"{name}\"", "editor.toastText1": "This can only execute free function", diff --git a/apps/remix-ide/src/remixAppManager.js b/apps/remix-ide/src/remixAppManager.js index 2bfe56fe8f..3a11908675 100644 --- a/apps/remix-ide/src/remixAppManager.js +++ b/apps/remix-ide/src/remixAppManager.js @@ -78,7 +78,6 @@ let requiredModules = [ // 'doc-gen', 'remix-templates', 'remixAID', - 'remixAI', 'solhint', 'dgit', 'pinnedPanel', diff --git a/apps/remixdesktop/src/lib/InferenceServerManager.ts b/apps/remixdesktop/src/lib/InferenceServerManager.ts index 65ea23696e..fb180a26d6 100644 --- a/apps/remixdesktop/src/lib/InferenceServerManager.ts +++ b/apps/remixdesktop/src/lib/InferenceServerManager.ts @@ -404,8 +404,12 @@ export class InferenceManager implements ICompletions { } , responseType: 'stream' }); - const userPrompt = payload[Object.keys(payload)[0]] + const userPrompt = payload.prompt let resultText = "" + if (payload.return_stream_response) { + return response + } + response.data.on('data', (chunk: Buffer) => { try { const parsedData = JSON.parse(chunk.toString()); @@ -449,14 +453,14 @@ export class InferenceManager implements ICompletions { } } - async code_completion(context: any, params:IParams=CompletionParams): Promise { + async code_completion(prompt, promptAfter, params:IParams=CompletionParams): Promise { if (!this.isReady) { console.log('model not ready yet') return } // as of now no prompt required - const payload = { context_code: context, ...params } + const payload = { prompt, 'context':promptAfter, ...params } return this._makeInferenceRequest('code_completion', payload, AIRequestType.COMPLETION) } @@ -484,9 +488,9 @@ export class InferenceManager implements ICompletions { return } if (params.stream_result) { - return this._streamInferenceRequest('code_explaining', { code, context, ...params }) + return this._streamInferenceRequest('code_explaining', { prompt: code, context, ...params }) } else { - return this._makeInferenceRequest('code_explaining', { code, context, ...params }, AIRequestType.GENERAL) + return this._makeInferenceRequest('code_explaining', { prompt: code, context, ...params }, AIRequestType.GENERAL) } } diff --git a/apps/remixdesktop/src/plugins/remixAIDektop.ts b/apps/remixdesktop/src/plugins/remixAIDektop.ts index 30e58a8e4e..6382e28bba 100644 --- a/apps/remixdesktop/src/plugins/remixAIDektop.ts +++ b/apps/remixdesktop/src/plugins/remixAIDektop.ts @@ -45,6 +45,7 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient { async onActivation(): Promise { this.onload(() => { + this.emit('activated') }) } @@ -81,9 +82,9 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient { return true } - async code_completion(context: any) { + async code_completion(prompt: string, promptAfter: string) { // use general purpose model - return this.desktopInferencer.code_completion(context) + return this.desktopInferencer.code_completion(prompt, promptAfter) } async code_insertion(msg_pfx: string, msg_sfx: string) { diff --git a/apps/vyper/src/app/utils/remix-client.tsx b/apps/vyper/src/app/utils/remix-client.tsx index 916124b812..7eadbb2a93 100644 --- a/apps/vyper/src/app/utils/remix-client.tsx +++ b/apps/vyper/src/app/utils/remix-client.tsx @@ -7,6 +7,7 @@ import { ExampleContract } from '../components/VyperResult' import EventEmitter from 'events' import { CustomRemixApi } from '@remix-api' + export type VyperComplierAddress = 'https://vyper2.remixproject.org/' | 'http://localhost:8000/' export class RemixClient extends PluginClient { private client = createClient>(this) @@ -67,11 +68,10 @@ export class RemixClient extends PluginClient { } try { // TODO: remove! no formatting required since already handled on server - const formattedMessage = ` - ${message} - can you explain why this error occurred and how to fix it? - ` - await this.client.call('remixAI' as any, 'error_explaining', message) + const file = await this.client.call('fileManager', 'getCurrentFile') + const content = await this.client.call('fileManager', 'readFile', file) + const messageAI = `Vyper code: ${content}\n error message: ${message}\n explain why the error occurred and how to fix it.` + await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI) } catch (err) { console.error('unable to askGpt') console.error(err) diff --git a/libs/remix-ai-core/src/agents/codeExplainAgent.ts b/libs/remix-ai-core/src/agents/codeExplainAgent.ts index 8d1d02b89f..83e6adf914 100644 --- a/libs/remix-ai-core/src/agents/codeExplainAgent.ts +++ b/libs/remix-ai-core/src/agents/codeExplainAgent.ts @@ -1,29 +1,44 @@ // interactive code explaining and highlight security vunerabilities import * as fs from 'fs'; -class CodeExplainAgent { +export class CodeExplainAgent { private codebase: string[]; // list of code base file public currentFile: string; + plugin - constructor(codebasePath: string) { + constructor(props) { + this.plugin = props // git or fs - this.codebase = this.loadCodebase(codebasePath); + const codebase = this.loadCodebase("codebasePath"); } private loadCodebase(path: string): string[] { - const files = fs.readdirSync(path); - return files - .filter(file => file.endsWith('.ts')) - .flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n')); + return [] } public update(currentFile, lineNumber){ } + async chatCommand(prompt:string){ + // change this function with indexer or related + try { + if (prompt.includes('Explain briefly the current file')){ + const file = await this.plugin.call('fileManager', 'getCurrentFile') + const content = `Explain this code:\n ${await this.plugin.call('fileManager', 'readFile', file)}` + return content + } else return prompt + } catch { + console.log('There is No file selected') + return 'There is No file selected' + } + } + public getExplanations(currentLine: string, numSuggestions: number = 3): string[] { // process the code base explaining the current file and highlight some details const suggestions: string[] = []; return suggestions; } } + +// Handle file changed (significantly) diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts new file mode 100644 index 0000000000..a42db7b645 --- /dev/null +++ b/libs/remix-ai-core/src/helpers/streamHandler.ts @@ -0,0 +1,62 @@ +import { ChatHistory } from '../prompts/chat'; +import { JsonStreamParser } from '../types/types' + +export const HandleSimpleResponse = async (response, + cb?: (streamText: string) => void) => { + let resultText = '' + const parser = new JsonStreamParser(); + + const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response); + for (const parsedData of chunk) { + if (parsedData.isGenerating) { + resultText += parsedData.generatedText + cb(parsedData.generatedText) + } else { + resultText += parsedData.generatedText + cb(parsedData.generatedText) + } + } +} + +export const HandleStreamResponse = async (streamResponse, + cb: (streamText: string) => void, + done_cb?: (result: string) => void) => { + try { + let resultText = '' + const parser = new JsonStreamParser(); + const reader = streamResponse.body?.getReader(); + const decoder = new TextDecoder(); + + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) break; + + try { + const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true })); + for (const parsedData of chunk) { + if (parsedData.isGenerating) { + resultText += parsedData.generatedText + cb(parsedData.generatedText) + } else { + resultText += parsedData.generatedText + cb(parsedData.generatedText) + } + } + } + catch (error) { + console.error('Error parsing JSON:', error); + } + } + if (done_cb) { + done_cb(resultText) + } + } + catch (error) { + console.error('Error parsing JSON:', error); + } +} + +export const UpdtateChatHistory = (userPromptprompt: string, AIAnswer: string) => { + ChatHistory.pushHistory(userPromptprompt, AIAnswer) +} \ No newline at end of file diff --git a/libs/remix-ai-core/src/index.ts b/libs/remix-ai-core/src/index.ts index 54130d7827..fe54a57f2f 100644 --- a/libs/remix-ai-core/src/index.ts +++ b/libs/remix-ai-core/src/index.ts @@ -17,4 +17,8 @@ export { getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPromt, RemoteInferencer, InsertionParams, CompletionParams, GenerationParams, ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable -} \ No newline at end of file +} + +export * from './types/types' +export * from './helpers/streamHandler' +export * from './agents/codeExplainAgent' \ No newline at end of file diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts index 1c148bce22..f618133a0f 100644 --- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts +++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts @@ -1,50 +1,46 @@ -import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel } from "../../types/types"; +import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel, JsonStreamParser } from "../../types/types"; +import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models"; import { buildSolgptPromt } from "../../prompts/promptBuilder"; -import axios from "axios"; import EventEmitter from "events"; import { ChatHistory } from "../../prompts/chat"; +import axios from 'axios'; const defaultErrorMessage = `Unable to get a response from AI server` - export class RemoteInferencer implements ICompletions { api_url: string completion_url: string max_history = 7 model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary event: EventEmitter + test_env=false + test_url="http://solcodertest.org" constructor(apiUrl?:string, completionUrl?:string) { - this.api_url = apiUrl!==undefined ? apiUrl: "https://solcoder.remixproject.org" - this.completion_url = completionUrl!==undefined ? completionUrl : "https://completion.remixproject.org" + this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org" + this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? this.test_url : "https://completion.remixproject.org" this.event = new EventEmitter() } - private async _makeRequest(data, rType:AIRequestType){ + private async _makeRequest(payload, rType:AIRequestType){ this.event.emit("onInference") - const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url - const userPrompt = data.data[0] + const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url try { - const result = await axios(requesURL, { - method: 'POST', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - }, - data: JSON.stringify(data), - }) + const options = { headers: { 'Content-Type': 'application/json', } } + const result = await axios.post(`${requestURL}`, payload, options) switch (rType) { case AIRequestType.COMPLETION: if (result.statusText === "OK") - return result.data.data[0] + return result.data.generatedText else { return defaultErrorMessage } case AIRequestType.GENERAL: if (result.statusText === "OK") { - const resultText = result.data.data[0] - ChatHistory.pushHistory(userPrompt, resultText) + if (result.data?.error) return result.data?.error + const resultText = result.data.generatedText + ChatHistory.pushHistory(payload.prompt, resultText) return resultText } else { return defaultErrorMessage @@ -54,46 +50,57 @@ export class RemoteInferencer implements ICompletions { } catch (e) { ChatHistory.clearHistory() console.error('Error making request to Inference server:', e.message) - return e } finally { this.event.emit("onInferenceDone") } } - private async _streamInferenceRequest(data, rType:AIRequestType){ + private async _streamInferenceRequest(endpoint, payload, rType:AIRequestType){ + let resultText = "" try { this.event.emit('onInference') - const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url - const userPrompt = data.data[0] - const response = await axios({ - method: 'post', - url: requesURL, - data: data, - headers: { 'Content-Type': 'application/json', "Accept": "text/event-stream" }, - responseType: 'stream' + const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url + const response = await fetch(requestURL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(payload), }); - let resultText = "" - response.data.on('data', (chunk: Buffer) => { + if (payload.return_stream_response) { + return response + } + const reader = response.body?.getReader(); + const decoder = new TextDecoder(); + const parser = new JsonStreamParser(); + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) break; + try { - const parsedData = JSON.parse(chunk.toString()); - if (parsedData.isGenerating) { - this.event.emit('onStreamResult', parsedData.generatedText); - resultText = resultText + parsedData.generatedText - } else { - // stream generation is complete - resultText = resultText + parsedData.generatedText - ChatHistory.pushHistory(userPrompt, resultText) - return parsedData.generatedText + console.log("value" + decoder.decode(value)) + const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true })); + for (const parsedData of chunk) { + if (parsedData.isGenerating) { + this.event.emit('onStreamResult', parsedData.generatedText); + resultText = resultText + parsedData.generatedText + } else { + // stream generation is complete + resultText = resultText + parsedData.generatedText + ChatHistory.pushHistory(payload.prompt, resultText) + return parsedData.generatedText + } } } catch (error) { console.error('Error parsing JSON:', error); ChatHistory.clearHistory() } - }); + } - return "" // return empty string for now as handled in event + return resultText } catch (error) { ChatHistory.clearHistory() console.error('Error making stream request to Inference server:', error.message); @@ -103,39 +110,38 @@ export class RemoteInferencer implements ICompletions { } } - async code_completion(prompt, promptAfter, options:IParams=null): Promise { - const payload = !options? - { "data": [prompt, "code_completion", promptAfter, false, 30, 0.9, 0.90, 50]} : - { "data": [prompt, "code_completion", promptAfter, options.stream_result, - options.max_new_tokens, options.temperature, options.top_p, options.top_k] - } - + async code_completion(prompt, promptAfter, options:IParams=CompletionParams): Promise { + const payload = { prompt, 'context':promptAfter, "endpoint":"code_completion", ...options } return this._makeRequest(payload, AIRequestType.COMPLETION) } - async code_insertion(msg_pfx, msg_sfx): Promise { - const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]} + async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise { + const payload = { "endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' } return this._makeRequest(payload, AIRequestType.COMPLETION) } - async code_generation(prompt): Promise { - const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]} - return this._makeRequest(payload, AIRequestType.COMPLETION) + async code_generation(prompt, options:IParams=GenerationParams): Promise { + const payload = { prompt, "endpoint":"code_completion", ...options } + if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION) + else return this._makeRequest(payload, AIRequestType.COMPLETION) } - async solidity_answer(prompt): Promise { + async solidity_answer(prompt, options:IParams=GenerationParams): Promise { const main_prompt = buildSolgptPromt(prompt, this.model_op) - const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]} - return this._makeRequest(payload, AIRequestType.GENERAL) + const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options } + if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) } - async code_explaining(prompt, context:string=""): Promise { - const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]} - return this._makeRequest(payload, AIRequestType.GENERAL) + async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise { + const payload = { prompt, "endpoint":"code_explaining", context, ...options } + if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) } - async error_explaining(prompt): Promise { - const payload = { "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]} - return this._makeRequest(payload, AIRequestType.GENERAL) + async error_explaining(prompt, options:IParams=GenerationParams): Promise { + const payload = { prompt, "endpoint":"error_explaining", ...options } + if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) } } diff --git a/libs/remix-ai-core/src/prompts/promptBuilder.ts b/libs/remix-ai-core/src/prompts/promptBuilder.ts index 0fab3837c9..27c1c3705d 100644 --- a/libs/remix-ai-core/src/prompts/promptBuilder.ts +++ b/libs/remix-ai-core/src/prompts/promptBuilder.ts @@ -2,7 +2,7 @@ import { RemoteBackendOPModel } from "../types/types" import { ChatHistory } from "./chat" export const PromptBuilder = (inst, answr, modelop) => { - if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|><|start_header_id|>assistant<|end_header_id|> ${answr}` + if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|eot_id|>\n<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|> ${answr}\n` if (modelop === RemoteBackendOPModel.DEEPSEEK) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr if (modelop === RemoteBackendOPModel.MISTRAL) return "" } @@ -21,8 +21,10 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP) else newPrompt += PromptBuilder(question, answer, modelOP) } - // finaly - newPrompt = "sol-gpt " + newPrompt + PromptBuilder(userPrompt.split('gpt')[1], "", modelOP) + + // remove sol-gpt or gpt from the start of the prompt + const parsedPrompt = userPrompt.replace(/^sol-gpt|^gpt/gm, '') + newPrompt = "sol-gpt " + newPrompt + PromptBuilder(parsedPrompt, "", modelOP) return newPrompt } } \ No newline at end of file diff --git a/libs/remix-ai-core/src/types/models.ts b/libs/remix-ai-core/src/types/models.ts index e3ed62fe74..0f46dbd75b 100644 --- a/libs/remix-ai-core/src/types/models.ts +++ b/libs/remix-ai-core/src/types/models.ts @@ -61,6 +61,7 @@ const CompletionParams:IParams = { topK: 40, topP: 0.92, max_new_tokens: 15, + stream_result: false, } const InsertionParams:IParams = { @@ -68,6 +69,7 @@ const InsertionParams:IParams = { topK: 40, topP: 0.92, max_new_tokens: 150, + stream_result: false, } const GenerationParams:IParams = { @@ -76,6 +78,8 @@ const GenerationParams:IParams = { topP: 0.92, max_new_tokens: 2000, stream_result: false, + repeat_penalty: 1.2, + terminal_output: false, } export { DefaultModels, CompletionParams, InsertionParams, GenerationParams } \ No newline at end of file diff --git a/libs/remix-ai-core/src/types/remix-project.code-workspace b/libs/remix-ai-core/src/types/remix-project.code-workspace new file mode 100644 index 0000000000..01fe49386a --- /dev/null +++ b/libs/remix-ai-core/src/types/remix-project.code-workspace @@ -0,0 +1,10 @@ +{ + "folders": [ + { + "path": "../../../.." + }, + { + "path": "../../../../../remix-wildcard" + } + ] +} \ No newline at end of file diff --git a/libs/remix-ai-core/src/types/types.ts b/libs/remix-ai-core/src/types/types.ts index fe7ac5469d..a6d2c9eb88 100644 --- a/libs/remix-ai-core/src/types/types.ts +++ b/libs/remix-ai-core/src/types/types.ts @@ -58,7 +58,7 @@ export interface IParams { temperature?: number; max_new_tokens?: number; repetition_penalty?: number; - repeatPenalty?:any + repeat_penalty?:any no_repeat_ngram_size?: number; num_beams?: number; num_return_sequences?: number; @@ -71,6 +71,8 @@ export interface IParams { topK?: number; topP?: number; temp?: number; + return_stream_response?: boolean; + terminal_output?: boolean; } export enum AIRequestType { @@ -85,3 +87,48 @@ export enum RemoteBackendOPModel{ CODELLAMA, MISTRAL } + +interface GeneratedTextObject { + generatedText: string; + isGenerating: boolean; +} +export class JsonStreamParser { + buffer: string + constructor() { + this.buffer = ''; + } + + safeJsonParse(chunk: string): T[] | null { + this.buffer += chunk; + const results = []; + let startIndex = 0; + let endIndex: number; + while ((endIndex = this.buffer.indexOf('}', startIndex)) !== -1) { + // check if next character is a opening curly bracket + let modifiedEndIndex = endIndex; + if ((modifiedEndIndex = this.buffer.indexOf('{', endIndex)) !== -1 ) { + endIndex = modifiedEndIndex - 1; + } + + if (((modifiedEndIndex = this.buffer.indexOf('{', endIndex)) === -1) && + (this.buffer.indexOf('}', endIndex) < this.buffer.length)) { + endIndex = this.buffer.indexOf('}', endIndex+1) <0 ? this.buffer.length - 1 : this.buffer.indexOf('}', endIndex+1); + } + + const jsonStr = this.buffer.slice(startIndex, endIndex + 1); + try { + const obj: GeneratedTextObject = JSON.parse(jsonStr); + results.push(obj); + } catch (error) { + console.error('Error parsing JSON:', error); + } + startIndex = endIndex + 1; + } + this.buffer = this.buffer.slice(startIndex); + return results; + } + + safeJsonParseSingle(chunk: string): T[] | null { + return JSON.parse(this.buffer); + } +} diff --git a/libs/remix-api/src/lib/plugins/remixai-api.ts b/libs/remix-api/src/lib/plugins/remixai-api.ts index cb32e2a61c..0ea1498151 100644 --- a/libs/remix-api/src/lib/plugins/remixai-api.ts +++ b/libs/remix-api/src/lib/plugins/remixai-api.ts @@ -5,9 +5,11 @@ export interface IRemixAI { events: { onStreamResult(streamText: string): Promise, activated(): Promise, + onInference():void, + onInferenceDone():void, } & StatusEvents, methods: { - code_completion(context: string): Promise + code_completion(context: string): Promise code_insertion(msg_pfx: string, msg_sfx: string): Promise, code_generation(prompt: string): Promise, code_explaining(code: string, context?: string): Promise, diff --git a/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts new file mode 100644 index 0000000000..dc9deb7e82 --- /dev/null +++ b/libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts @@ -0,0 +1,23 @@ +import { IParams } from "@remix/remix-ai-core"; +import { StatusEvents } from "@remixproject/plugin-utils"; + +export interface IRemixAID { + events: { + activated():void, + onInference():void, + onInferenceDone():void, + onStreamResult(streamText: string):void, + + } & StatusEvents, + methods: { + code_completion(context: string): Promise + code_insertion(msg_pfx: string, msg_sfx: string): Promise, + code_generation(prompt: string): Promise, + code_explaining(code: string, context?: string): Promise, + error_explaining(prompt: string): Promise, + solidity_answer(prompt: string): Promise, + initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise, + chatPipe(pipeMessage: string): Promise, + ProcessChatRequestBuffer(params:IParams): Promise, + } +} \ No newline at end of file diff --git a/libs/remix-api/src/lib/remix-api.ts b/libs/remix-api/src/lib/remix-api.ts index fa052bdd03..12e29f62f5 100644 --- a/libs/remix-api/src/lib/remix-api.ts +++ b/libs/remix-api/src/lib/remix-api.ts @@ -16,7 +16,6 @@ import { IMatomoApi } from "./plugins/matomo-api" import { IRemixAI } from "./plugins/remixai-api" import { IRemixAID } from "./plugins/remixAIDesktop-api" - export interface ICustomRemixApi extends IRemixApi { dgitApi: IGitApi config: IConfigApi diff --git a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts index 37aaebd60b..674d069f1c 100644 --- a/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts +++ b/libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts @@ -1,6 +1,8 @@ /* eslint-disable no-control-regex */ import { EditorUIProps, monacoTypes } from '@remix-ui/editor'; +import { JsonStreamParser } from '@remix/remix-ai-core'; import * as monaco from 'monaco-editor'; + const _paq = (window._paq = window._paq || []) export class RemixInLineCompletionProvider implements monacoTypes.languages.InlineCompletionsProvider { @@ -26,9 +28,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli } async provideInlineCompletions(model: monacoTypes.editor.ITextModel, position: monacoTypes.Position, context: monacoTypes.languages.InlineCompletionContext, token: monacoTypes.CancellationToken): Promise> { - if (context.selectedSuggestionInfo) { - return { items: []}; - } + const isActivate = await await this.props.plugin.call('settings', 'get', 'settings/copilot/suggest/activate') + if (!isActivate) return const currentTime = Date.now(); const timeSinceLastRequest = currentTime - this.lastRequestTime; @@ -61,17 +62,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli if (!word.endsWith(' ') && !word.endsWith('.') && + !word.endsWith('"') && !word.endsWith('(')) { return; } - try { - const isActivate = await await this.props.plugin.call('settings', 'get', 'settings/copilot/suggest/activate') - if (!isActivate) return - } catch (err) { - return; - } - try { const split = word.split('\n') if (split.length < 2) return @@ -81,8 +76,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'RemixAI - generating code for following comment: ' + ask.replace('///', '') }) const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after) - this.task = 'code_generation' _paq.push(['trackEvent', 'ai', 'remixAI', 'code_generation']) + this.task = 'code_generation' const parsedData = data.trimStart() //JSON.parse(data).trimStart() const item: monacoTypes.languages.InlineCompletion = { @@ -93,7 +88,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli this.currentCompletion.item = item return { items: [item], - enableForwardStability: false + enableForwardStability: true } } } catch (e) { @@ -110,11 +105,6 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli return { items: []}; // do not do completion on single and multiline comment } - // abort if there is a signal - if (token.isCancellationRequested) { - return - } - if (word.replace(/ +$/, '').endsWith('\n')){ // Code insertion try { @@ -123,6 +113,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli const generatedText = output // no need to clean it. should already be this.task = 'code_insertion' + _paq.push(['trackEvent', 'ai', 'remixAI', this.task]) const item: monacoTypes.languages.InlineCompletion = { insertText: generatedText, range: new monaco.Range(position.lineNumber, position.column, position.lineNumber, position.column) @@ -132,10 +123,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli return { items: [item], - enableForwardStability: false, + enableForwardStability: true, } } catch (err){ + console.log("err: " + err) return } } @@ -151,8 +143,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli if (generatedText.indexOf('@custom:dev-run-script./') !== -1) { clean = generatedText.replace('@custom:dev-run-script', '@custom:dev-run-script ') } - clean = clean.replace(word, '').trimStart() - clean = this.process_completion(clean) + clean = clean.replace(word, '') + clean = this.process_completion(clean, word_after) const item: monacoTypes.languages.InlineCompletion = { insertText: clean, @@ -163,22 +155,30 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli return { items: [item], - enableForwardStability: true + enableForwardStability: true, } } catch (err) { - return + const item: monacoTypes.languages.InlineCompletion = { insertText: " " } + return { + items: [item], + enableForwardStability: true, + } } } - process_completion(data: any) { - let clean = data.split('\n')[0].startsWith('\n') ? [data.split('\n')[0], data.split('\n')[1]].join('\n'): data.split('\n')[0] - + process_completion(data: any, word_after: any) { + let clean = data // if clean starts with a comment, remove it if (clean.startsWith('//') || clean.startsWith('/*') || clean.startsWith('*') || clean.startsWith('*/')){ + console.log("clean starts with comment") return "" } - // remove comment inline - clean = clean.split('//')[0].trimEnd() + + const text_after = word_after.split('\n')[0].trim() + if (clean.toLowerCase().includes(text_after.toLowerCase())){ + clean = clean.replace(text_after, '') // apply regex to conserve the case + } + return clean } diff --git a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx index be2367d06a..ab8de7ec34 100644 --- a/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx +++ b/libs/remix-ui/editor/src/lib/remix-ui-editor.tsx @@ -776,7 +776,11 @@ export const EditorUI = (props: EditorUIProps) => { const file = await props.plugin.call('fileManager', 'getCurrentFile') const content = await props.plugin.call('fileManager', 'readFile', file) const message = intl.formatMessage({ id: 'editor.generateDocumentationByAI' }, { content, currentFunction: currentFunction.current }) - const cm = await await props.plugin.call('remixAI', 'code_explaining', message) + + // do not stream this response + const pipeMessage = `Generate the documentation for the function **${currentFunction.current}**` + // const cm = await await props.plugin.call('remixAI', 'code_explaining', message) + const cm = await props.plugin.call('remixAI' as any, 'chatPipe', 'solidity_answer', message, '', pipeMessage) const natSpecCom = "\n" + extractNatspecComments(cm) const cln = await props.plugin.call('codeParser', "getLineColumnOfNode", currenFunctionNode) @@ -827,9 +831,9 @@ export const EditorUI = (props: EditorUIProps) => { ], run: async () => { const file = await props.plugin.call('fileManager', 'getCurrentFile') - const content = await props.plugin.call('fileManager', 'readFile', file) - const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content, currentFunction: currentFunction.current }) - await props.plugin.call('remixAI', 'code_explaining', message, content) + const context = await props.plugin.call('fileManager', 'readFile', file) + const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content:context, currentFunction: currentFunction.current }) + await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', message, context) _paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction']) }, } @@ -848,8 +852,9 @@ export const EditorUI = (props: EditorUIProps) => { const file = await props.plugin.call('fileManager', 'getCurrentFile') const content = await props.plugin.call('fileManager', 'readFile', file) const selectedCode = editor.getModel().getValueInRange(editor.getSelection()) + const pipeMessage = intl.formatMessage({ id: 'editor.ExplainPipeMessage' }, { content:selectedCode }) - await props.plugin.call('remixAI', 'code_explaining', selectedCode, content) + await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', selectedCode, content, pipeMessage) _paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction']) }, } diff --git a/libs/remix-ui/remix-ai/src/index.ts b/libs/remix-ui/remix-ai/src/index.ts index 56f0a076a1..e8f9ee13f2 100644 --- a/libs/remix-ui/remix-ai/src/index.ts +++ b/libs/remix-ui/remix-ai/src/index.ts @@ -1 +1 @@ -export { RemixAITab } from './lib/components/RemixAI' \ No newline at end of file +export { RemixAITab, ChatApi } from './lib/components/RemixAI' \ No newline at end of file diff --git a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx index 59239309ed..459797294c 100644 --- a/libs/remix-ui/remix-ai/src/lib/components/Default.tsx +++ b/libs/remix-ui/remix-ai/src/lib/components/Default.tsx @@ -1,84 +1,83 @@ -import React, { useContext, useEffect, useState } from 'react' +import React from 'react' import '../remix-ai.css' -import { DefaultModels } from '@remix/remix-ai-core'; +import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core'; +import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react'; +import { AiChat, useAsStreamAdapter, ChatItem } from '@nlux/react'; +import { JsonStreamParser } from '@remix/remix-ai-core'; +import { user, assistantAvatar } from './personas'; +import { highlighter } from '@nlux/highlighter' +import './color.css' +import '@nlux/themes/unstyled.css'; + +export let ChatApi = null export const Default = (props) => { - const [searchText, setSearchText] = useState(''); - const [resultText, setResultText] = useState(''); - const pluginName = 'remixAI' - const appendText = (newText) => { - setResultText(resultText => resultText + newText); - } + const send: StreamSend = async ( + prompt: string, + observer: StreamingAdapterObserver, + ) => { + GenerationParams.stream_result = true + GenerationParams.return_stream_response = GenerationParams.stream_result - useEffect(() => { - const handleResultReady = async (e) => { - appendText(e); - }; - if (props.plugin.isOnDesktop ) { - props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => { - handleResultReady(value); - }) + let response = null + if (await props.plugin.call('remixAI', 'isChatRequestPending')){ + response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams); + } else { + response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams); } - }, []) - - return ( -
-
- console.log('searchText not implememted')} - > - - - + if (GenerationParams.return_stream_response) HandleStreamResponse(response, + (text) => {observer.next(text)}, + (result) => { + observer.next(' ') // Add a space to flush the last message + ChatHistory.pushHistory(prompt, result) + observer.complete() + } + ) + else { + observer.next(response) + observer.complete() + } -
+ }; + ChatApi = useAiChatApi(); + const conversationStarters: ConversationStarter[] = [ + { prompt: 'Explain briefly the current file in Editor', icon: ⭐️ }, + { prompt: 'Explain what is a solidity contract!' }] -
-