linked explain to remix ai output

pull/5370/head
STetsing 2 months ago
parent e84c36aaf5
commit 2a7e326112
  1. 55
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  2. 62
      libs/remix-ai-core/src/helpers/streamHandler.ts
  3. 3
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  4. 1
      libs/remix-ai-core/src/prompts/promptBuilder.ts
  5. 16
      libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
  6. 2
      libs/remix-ui/remix-ai/src/index.ts
  7. 36
      libs/remix-ui/remix-ai/src/lib/components/Default.tsx
  8. 5
      libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
  9. 4
      libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
  10. 1
      package.json
  11. 13
      yarn.lock

@ -1,9 +1,13 @@
import * as packageJson from '../../../../../package.json'
import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab } from '@remix-ui/remix-ai'
import React from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams } from '@remix/remix-ai-core';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, HandleStreamResponse } from '@remix/remix-ai-core';
type chatRequestBufferT<T> = {
[key in keyof T]: T[key]
}
const profile = {
name: 'remixAI',
@ -11,7 +15,7 @@ const profile = {
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
"initialize"],
"initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
@ -28,6 +32,7 @@ export class RemixAIPlugin extends ViewPlugin {
readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
chatRequestBuffer: chatRequestBufferT<any> = null
constructor(inDesktop:boolean) {
super(profile)
@ -129,6 +134,7 @@ export class RemixAIPlugin extends ViewPlugin {
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)
@ -138,6 +144,10 @@ export class RemixAIPlugin extends ViewPlugin {
}
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
// HandleStreamResponse(result, (text) => {
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: text })
// })
return result
}
@ -171,6 +181,43 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
chatPipe(fn, prompt: string, context?: string, params: IParams=GenerationParams){
if (this.chatRequestBuffer == null){
this.chatRequestBuffer = {
fn_name: fn,
prompt: prompt,
params: params,
context: context
}
if (fn === "code_explaining"){
ChatApi.composer.send("Explain the current code")
}
else if (fn === "solidity_answer"){
ChatApi.composer.send("Answer the following question")
}
}
else{
console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
ProcessChatRequestBuffer(params:IParams=GenerationParams){
if (this.chatRequestBuffer != null){
const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)
this.chatRequestBuffer = null
return result
}
else{
console.log("chatRequestBuffer is empty.")
}
}
isChatRequestPending(){
return this.chatRequestBuffer != null
}
render() {
return (
<RemixAITab plugin={this}></RemixAITab>

@ -0,0 +1,62 @@
import { ChatHistory } from '../prompts/chat';
import { JsonStreamParser} from '../types/types'
export const HandleSimpleResponse = async (response,
cb?: (streamText: string) => void) => {
let resultText = ''
const parser = new JsonStreamParser();
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response);
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
} else {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
}
}
}
export const HandleStreamResponse = async (streamResponse,
cb?: (streamText: string) => void,
done_cb?: (result: string) => void) => {
try {
console.log("streamResponse handler", streamResponse)
let resultText = ''
const parser = new JsonStreamParser();
const reader = streamResponse.body!.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
try {
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
} else {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
}
}
}
catch (error) {
console.error('Error parsing JSON:', error);
}
}
if (done_cb) {
done_cb(resultText)
}
}
catch (error) {
console.error('Error parsing JSON:', error);
}
}
export const UpdtateChatHistory = (userPromptprompt: string, AIAnswer: string) => {
ChatHistory.pushHistory(userPromptprompt, AIAnswer)
}

@ -28,7 +28,6 @@ export class RemoteInferencer implements ICompletions {
this.event.emit("onInference")
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
console.log(requesURL)
try {
const options = { headers: { 'Content-Type': 'application/json', } }
const result = await axios.post(`${requesURL}/${endpoint}`, payload, options)
@ -76,6 +75,7 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) {
return response
}
const reader = response.body!.getReader();
const decoder = new TextDecoder();
@ -112,7 +112,6 @@ export class RemoteInferencer implements ICompletions {
console.error('Error making stream request to Inference server:', error.message);
}
finally {
console.log("end streaming\n" + resultText)
this.event.emit('onInferenceDone')
}
}

@ -17,7 +17,6 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel
} else {
let newPrompt = ""
for (const [question, answer] of ChatHistory.getHistory()) {
console.log("question", question)
if (question.startsWith('sol-gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP)
else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP)
else newPrompt += PromptBuilder(question, answer, modelOP)

@ -0,0 +1,16 @@
import { StatusEvents } from "@remixproject/plugin-utils";
export interface IRemixAIDesktop {
events: {
onStreamResult(streamText: string): Promise<void>,
} & StatusEvents,
methods: {
code_completion(context: string): Promise<string>
code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>,
code_generation(prompt: string): Promise<string>,
code_explaining(code: string, context?: string): Promise<string>,
error_explaining(prompt: string): Promise<string>,
solidity_answer(prompt: string): Promise<string>,
initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise<void>,
}
}

@ -1 +1 @@
export { RemixAITab } from './lib/components/RemixAI'
export { RemixAITab, ChatApi } from './lib/components/RemixAI'

@ -1,35 +1,49 @@
import React, { useContext, useEffect, useState } from 'react'
import React, { useContext, useEffect, useState, useCallback} from 'react'
import '../remix-ai.css'
import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse } from '@remix/remix-ai-core';
import { StreamSend, StreamingAdapterObserver } from '@nlux/react';
import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core';
import { StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
import axios from 'axios';
import { AiChat, useAsStreamAdapter, ChatItem} from '@nlux/react';
import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react';
import '@nlux/themes/nova.css';
import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas';
import {highlighter} from '@nlux/highlighter'
const demoProxyServerUrl = 'https://solcoder.remixproject.org';
let chatobserver: StreamingAdapterObserver = null
export let ChatApi = null
export const Default = (props) => {
const send: StreamSend = async (
prompt: string,
observer: StreamingAdapterObserver,
) => {
chatobserver = observer
GenerationParams.stream_result = true
GenerationParams.return_stream_response = true
GenerationParams.return_stream_response = GenerationParams.stream_result
const response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
HandleStreamResponse(response,
let response = null
if (await props.plugin.call('remixAI', 'isChatRequestPending')){
response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams);
}
else{
response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
}
if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
(result) => {
ChatHistory.pushHistory(prompt, result)
observer.complete() }
observer.complete()
}
)
else{
observer.next(response)
observer.complete()
}
};
ChatApi = useAiChatApi();
// Define initial messages
const initialMessages: ChatItem[] = [
@ -42,6 +56,7 @@ export const Default = (props) => {
return (
<AiChat
api={ChatApi}
adapter={ adapter }
personaOptions={{
assistant: {
@ -62,6 +77,7 @@ export const Default = (props) => {
messageOptions={{ showCodeBlockCopyButton: true,
streamingAnimationSpeed: 2,
waitTimeBeforeStreamCompletion: 1000,
syntaxHighlighter: highlighter
}}
/>
);

@ -1,6 +1,6 @@
import React, { useContext } from 'react'
import '../remix-ai.css'
import { Default } from './Default'
import { Default, ChatApi } from './Default'
export const RemixAITab = (props) => {
@ -12,4 +12,5 @@ export const RemixAITab = (props) => {
</div>
</>
)
}
}
export { ChatApi }

@ -106,7 +106,6 @@ export const TabsUI = (props: TabsUIProps) => {
}
const renderTab = (tab: Tab, index) => {
const classNameImg = 'my-1 mr-1 text-dark ' + tab.iconClass
const classNameTab = 'nav-item nav-link d-flex justify-content-center align-items-center px-2 py-1 tab' + (index === currentIndexRef.current ? ' active' : '')
const invert = props.themeQuality === 'dark' ? 'invert(1)' : 'invert(0)'
@ -251,7 +250,8 @@ export const TabsUI = (props: TabsUIProps) => {
const content = await props.plugin.call('fileManager', 'readFile', path)
if (tabsState.currentExt === 'sol') {
setExplaining(true)
await props.plugin.call('remixAI', 'code_explaining', content)
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
// await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
}

@ -107,6 +107,7 @@
"@metamask/eth-sig-util": "^7.0.2",
"@microlink/react-json-view": "^1.23.0",
"@nlux/core": "^2.17.1",
"@nlux/highlighter": "^2.17.1",
"@nlux/react": "^2.17.1",
"@nlux/themes": "^2.17.1",
"@openzeppelin/contracts": "^5.0.0",

@ -5316,6 +5316,14 @@
resolved "https://registry.yarnpkg.com/@nlux/core/-/core-2.17.1.tgz#18a95e21e5aafae83bf6d515651780497f0f39cc"
integrity sha512-hIvOnuENVqWaIg5Co4JtFmHph7Sp0Nj+QixOMdOW9Ou7CjU7HK+maB5koLoayNL64B+wHTtgPN7zBrB8NCSPXw==
"@nlux/highlighter@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/highlighter/-/highlighter-2.17.1.tgz#e4d0f43b5afeff2631bc118b1cc6db80afb7e99f"
integrity sha512-/ETnJPbNJWY8ZQH6XAQ5zooEMPsy44Lk2tIxMfr5Ca7+0ICpkMP0mppOmAoKCQBNVsqmKe0oczFBk8blddNDaA==
dependencies:
"@nlux/core" "2.17.1"
highlight.js "^11"
"@nlux/react@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/react/-/react-2.17.1.tgz#e4668e7cbe42dd195ea86a02350c8b15cb8f48f0"
@ -17650,6 +17658,11 @@ hey-listen@^1.0.8:
resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
highlight.js@^11:
version "11.10.0"
resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.10.0.tgz#6e3600dc4b33d6dc23d5bd94fbf72405f5892b92"
integrity sha512-SYVnVFswQER+zu1laSya563s+F8VDGt7o35d4utbamowvUNLLMovFqwCLSocpZTz3MgaSRA1IbqRWZv97dtErQ==
hmac-drbg@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"

Loading…
Cancel
Save