From 9ac8dfb226eb07e15a9a8810f74a5a98f22e169d Mon Sep 17 00:00:00 2001 From: STetsing <41009393+STetsing@users.noreply.github.com> Date: Wed, 6 Nov 2024 12:19:46 +0100 Subject: [PATCH] calling popuppannel and fixed sol/gpt command in terminal --- .../src/app/components/container.tsx | 15 ++++++++--- .../src/app/plugins/remixAIPlugin.tsx | 5 ++++ apps/vyper/src/app/utils/remix-client.tsx | 6 ++++- .../src/helpers/streamHandler.ts | 1 + .../src/inferencers/remote/remoteInference.ts | 18 ++++++++----- libs/remix-ui/renderer/src/lib/renderer.tsx | 6 ++++- libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx | 27 +++---------------- 7 files changed, 43 insertions(+), 35 deletions(-) diff --git a/apps/circuit-compiler/src/app/components/container.tsx b/apps/circuit-compiler/src/app/components/container.tsx index 80663789cd..765f33abe3 100644 --- a/apps/circuit-compiler/src/app/components/container.tsx +++ b/apps/circuit-compiler/src/app/components/container.tsx @@ -73,14 +73,20 @@ export function Container () { full circom error: ${JSON.stringify(report, null, 2)} explain why the error occurred and how to fix it. ` - await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + await circuitApp.plugin.call('popupPanel' as any, 'showPopupPanel', true) + setTimeout(async () => { + await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + }, 500) } else { const message = ` error message: ${error} full circom error: ${JSON.stringify(report, null, 2)} explain why the error occurred and how to fix it. ` - await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + await circuitApp.plugin.call('popupPanel' as any, 'showPopupPanel', true) + setTimeout(async () => { + await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + }, 500) } } else { const error = report.message @@ -89,7 +95,10 @@ export function Container () { full circom error: ${JSON.stringify(report, null, 2)} explain why the error occurred and how to fix it. ` - await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + await circuitApp.plugin.call('popupPanel' as any, 'showPopupPanel', true) + setTimeout(async () => { + await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + }, 500) } } diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx index 6004c20c56..60b79dc3e3 100644 --- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx +++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx @@ -121,6 +121,11 @@ export class RemixAIPlugin extends ViewPlugin { this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" }) return } + if (prompt.trimStart().startsWith('gpt') || prompt.trimStart().startsWith('sol-gpt')) { + params.terminal_output = true + params.stream_result = false + params.return_stream_response = false + } const newPrompt = await this.agent.chatCommand(prompt) let result diff --git a/apps/vyper/src/app/utils/remix-client.tsx b/apps/vyper/src/app/utils/remix-client.tsx index 7eadbb2a93..a52e34ab8a 100644 --- a/apps/vyper/src/app/utils/remix-client.tsx +++ b/apps/vyper/src/app/utils/remix-client.tsx @@ -71,7 +71,11 @@ export class RemixClient extends PluginClient { const file = await this.client.call('fileManager', 'getCurrentFile') const content = await this.client.call('fileManager', 'readFile', file) const messageAI = `Vyper code: ${content}\n error message: ${message}\n explain why the error occurred and how to fix it.` - await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI) + + await this.client.plugin.call('popupPanel', 'showPopupPanel', true) + setTimeout(async () => { + await this.client.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI) + }, 500) } catch (err) { console.error('unable to askGpt') console.error(err) diff --git a/libs/remix-ai-core/src/helpers/streamHandler.ts b/libs/remix-ai-core/src/helpers/streamHandler.ts index a42db7b645..9eb5511861 100644 --- a/libs/remix-ai-core/src/helpers/streamHandler.ts +++ b/libs/remix-ai-core/src/helpers/streamHandler.ts @@ -54,6 +54,7 @@ export const HandleStreamResponse = async (streamResponse, } catch (error) { console.error('Error parsing JSON:', error); + return { 'generateText': '', 'isGenerating': false } } } diff --git a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts index f618133a0f..c6ad381618 100644 --- a/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts +++ b/libs/remix-ai-core/src/inferencers/remote/remoteInference.ts @@ -27,7 +27,7 @@ export class RemoteInferencer implements ICompletions { try { const options = { headers: { 'Content-Type': 'application/json', } } - const result = await axios.post(`${requestURL}`, payload, options) + const result = await axios.post(requestURL, payload, options) switch (rType) { case AIRequestType.COMPLETION: @@ -56,7 +56,7 @@ export class RemoteInferencer implements ICompletions { } } - private async _streamInferenceRequest(endpoint, payload, rType:AIRequestType){ + private async _streamInferenceRequest(payload, rType:AIRequestType){ let resultText = "" try { this.event.emit('onInference') @@ -122,26 +122,32 @@ export class RemoteInferencer implements ICompletions { async code_generation(prompt, options:IParams=GenerationParams): Promise { const payload = { prompt, "endpoint":"code_completion", ...options } - if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION) + if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.COMPLETION) else return this._makeRequest(payload, AIRequestType.COMPLETION) } async solidity_answer(prompt, options:IParams=GenerationParams): Promise { const main_prompt = buildSolgptPromt(prompt, this.model_op) const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options } - if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) + if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL) else return this._makeRequest(payload, AIRequestType.GENERAL) } async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise { const payload = { prompt, "endpoint":"code_explaining", context, ...options } - if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) + if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL) else return this._makeRequest(payload, AIRequestType.GENERAL) } async error_explaining(prompt, options:IParams=GenerationParams): Promise { const payload = { prompt, "endpoint":"error_explaining", ...options } - if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL) + if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL) + else return this._makeRequest(payload, AIRequestType.GENERAL) + } + + async vulnerability_check(prompt, options:IParams=GenerationParams): Promise { + const payload = { prompt, "endpoint":"vulnerability_check", ...options } + if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL) else return this._makeRequest(payload, AIRequestType.GENERAL) } } diff --git a/libs/remix-ui/renderer/src/lib/renderer.tsx b/libs/remix-ui/renderer/src/lib/renderer.tsx index 45aa945b2e..966cb23f07 100644 --- a/libs/remix-ui/renderer/src/lib/renderer.tsx +++ b/libs/remix-ui/renderer/src/lib/renderer.tsx @@ -90,7 +90,11 @@ export const Renderer = ({ message, opt, plugin, context }: RendererProps) => { try { const content = await plugin.call('fileManager', 'readFile', editorOptions.errFile) const message = intl.formatMessage({ id: `${context || 'solidity' }.openaigptMessage` }, { content, messageText }) - await plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + + await plugin.call('popupPanel', 'showPopupPanel', true) + setTimeout(async () => { + await plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) + }, 500) _paq.push(['trackEvent', 'ai', 'remixAI', 'error_explaining_SolidityError']) } catch (err) { console.error('unable to askGtp') diff --git a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx index eaaffcad15..98dd90a4ad 100644 --- a/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx +++ b/libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx @@ -269,31 +269,10 @@ export const TabsUI = (props: TabsUIProps) => { if (tabsState.currentExt === 'sol') { setExplaining(true) // if plugin is pinned, - if (await props.plugin.call('pinnedPanel', 'currentFocus') === 'remixAI'){ + await props.plugin.call('popupPanel', 'showPopupPanel', true) + setTimeout(async () => { await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content) - } - else { - const profile = { - name: 'remixAI', - displayName: 'Remix AI', - methods: ['code_generation', 'code_completion', - "solidity_answer", "code_explaining", - "code_insertion", "error_explaining", - "initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'], - events: [], - icon: 'assets/img/remix-logo-blue.png', - description: 'RemixAI provides AI services to Remix IDE.', - kind: '', - location: 'sidePanel', - documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html', - maintainedBy: 'Remix' - } - // await props.plugin.call('sidePanel', 'focus', 'remixAI') - await props.plugin.call('sidePanel', 'pinView', profile) - setTimeout(async () => { - await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content) - }, 500) - } + }, 500) setExplaining(false) _paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file']) }