cleanup and new send button on remixai chat

pull/5241/head
STetsing 4 months ago
parent 1743146a1c
commit 2b0f7a2cce
  1. 6
      apps/circuit-compiler/src/app/components/container.tsx
  2. 2
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  3. 3
      libs/remix-ai-core/src/helpers/streamHandler.ts
  4. 12
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  5. 4
      libs/remix-api/src/lib/plugins/remixai-api.ts
  6. 2
      libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
  7. 1
      libs/remix-api/src/lib/remix-api.ts
  8. 2
      libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
  9. 29
      libs/remix-ui/remix-ai/src/lib/components/Default.tsx
  10. 9
      libs/remix-ui/remix-ai/src/lib/components/color.css
  11. 1
      libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx

@ -73,18 +73,14 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)} full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it. explain why the error occurred and how to fix it.
` `
// @ts-ignore
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
// await circuitApp.plugin.call('remixAI', 'error_explaining', message)
} else { } else {
const message = ` const message = `
error message: ${error} error message: ${error}
full circom error: ${JSON.stringify(report, null, 2)} full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it. explain why the error occurred and how to fix it.
` `
// @ts-ignore
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
//await circuitApp.plugin.call('remixAI', 'error_explaining', message)
} }
} else { } else {
const error = report.message const error = report.message
@ -93,8 +89,6 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)} full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it. explain why the error occurred and how to fix it.
` `
// @ts-ignore
//await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message) await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
} }
} }

@ -4,6 +4,7 @@ import { Plugin } from '@remixproject/engine';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai' import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react'; import React, { useCallback } from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core'; import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core';
import { CustomRemixApi } from '@remix-api'
type chatRequestBufferT<T> = { type chatRequestBufferT<T> = {
[key in keyof T]: T[key] [key in keyof T]: T[key]
@ -26,6 +27,7 @@ const profile = {
maintainedBy: 'Remix' maintainedBy: 'Remix'
} }
// add Plugin<any, CustomRemixApi>
export class RemixAIPlugin extends ViewPlugin { export class RemixAIPlugin extends ViewPlugin {
isOnDesktop:boolean = false isOnDesktop:boolean = false
aiIsActivated:boolean = false aiIsActivated:boolean = false

@ -24,9 +24,10 @@ export const HandleStreamResponse = async (streamResponse,
try { try {
let resultText = '' let resultText = ''
const parser = new JsonStreamParser(); const parser = new JsonStreamParser();
const reader = streamResponse.body!.getReader(); const reader = streamResponse.body?.getReader();
const decoder = new TextDecoder(); const decoder = new TextDecoder();
// eslint-disable-next-line no-constant-condition
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
if (done) break; if (done) break;

@ -3,13 +3,9 @@ import { GenerationParams, CompletionParams, InsertionParams } from "../../types
import { buildSolgptPromt } from "../../prompts/promptBuilder"; import { buildSolgptPromt } from "../../prompts/promptBuilder";
import EventEmitter from "events"; import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat"; import { ChatHistory } from "../../prompts/chat";
import axios, { AxiosResponse } from 'axios'; import axios from 'axios';
import { Readable } from 'stream';
import { StreamingAdapterObserver } from '@nlux/react';
const defaultErrorMessage = `Unable to get a response from AI server` const defaultErrorMessage = `Unable to get a response from AI server`
export class RemoteInferencer implements ICompletions { export class RemoteInferencer implements ICompletions {
api_url: string api_url: string
completion_url: string completion_url: string
@ -76,9 +72,10 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) { if (payload.return_stream_response) {
return response return response
} }
const reader = response.body!.getReader(); const reader = response.body?.getReader();
const decoder = new TextDecoder(); const decoder = new TextDecoder();
const parser = new JsonStreamParser(); const parser = new JsonStreamParser();
// eslint-disable-next-line no-constant-condition
while (true) { while (true) {
const { done, value } = await reader.read(); const { done, value } = await reader.read();
if (done) break; if (done) break;
@ -113,7 +110,6 @@ export class RemoteInferencer implements ICompletions {
} }
} }
async code_completion(prompt, options:IParams=CompletionParams): Promise<any> { async code_completion(prompt, options:IParams=CompletionParams): Promise<any> {
const payload = { prompt, "endpoint":"code_completion", ...options } const payload = { prompt, "endpoint":"code_completion", ...options }
return this._makeRequest(payload, AIRequestType.COMPLETION) return this._makeRequest(payload, AIRequestType.COMPLETION)
@ -149,7 +145,7 @@ export class RemoteInferencer implements ICompletions {
async error_explaining(prompt, options:IParams=GenerationParams): Promise<any> { async error_explaining(prompt, options:IParams=GenerationParams): Promise<any> {
const payload = { prompt, "endpoint":"error_explaining", ...options } const payload = { prompt, "endpoint":"error_explaining", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload , AIRequestType.GENERAL) if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL) else return this._makeRequest(payload, AIRequestType.GENERAL)
} }
} }

@ -5,9 +5,11 @@ export interface IRemixAI {
events: { events: {
onStreamResult(streamText: string): Promise<void>, onStreamResult(streamText: string): Promise<void>,
activated(): Promise<void>, activated(): Promise<void>,
onInference():void,
onInferenceDone():void,
} & StatusEvents, } & StatusEvents,
methods: { methods: {
code_completion(context: string): Promise<string> code_completion(context: string): Promise<string>
code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>, code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>,
code_generation(prompt: string): Promise<string | null>, code_generation(prompt: string): Promise<string | null>,
code_explaining(code: string, context?: string): Promise<string | null>, code_explaining(code: string, context?: string): Promise<string | null>,

@ -10,7 +10,7 @@ export interface IRemixAID {
} & StatusEvents, } & StatusEvents,
methods: { methods: {
code_completion(context: string): Promise<string> code_completion(context: string): Promise<string>
code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>, code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>,
code_generation(prompt: string): Promise<string | null>, code_generation(prompt: string): Promise<string | null>,
code_explaining(code: string, context?: string): Promise<string | null>, code_explaining(code: string, context?: string): Promise<string | null>,

@ -16,7 +16,6 @@ import { IMatomoApi } from "./plugins/matomo-api"
import { IRemixAI } from "./plugins/remixai-api" import { IRemixAI } from "./plugins/remixai-api"
import { IRemixAID } from "./plugins/remixAIDesktop-api" import { IRemixAID } from "./plugins/remixAIDesktop-api"
export interface ICustomRemixApi extends IRemixApi { export interface ICustomRemixApi extends IRemixApi {
dgitApi: IGitApi dgitApi: IGitApi
config: IConfigApi config: IConfigApi

@ -833,7 +833,6 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile') const file = await props.plugin.call('fileManager', 'getCurrentFile')
const context = await props.plugin.call('fileManager', 'readFile', file) const context = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content:context, currentFunction: currentFunction.current }) const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content:context, currentFunction: currentFunction.current })
// await props.plugin.call('remixAI', 'code_explaining', message, context)
await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', message, context) await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', message, context)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction']) _paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
}, },
@ -856,7 +855,6 @@ export const EditorUI = (props: EditorUIProps) => {
const pipeMessage = intl.formatMessage({ id: 'editor.ExplainPipeMessage' }, { content:selectedCode }) const pipeMessage = intl.formatMessage({ id: 'editor.ExplainPipeMessage' }, { content:selectedCode })
await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', selectedCode, content, pipeMessage) await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', selectedCode, content, pipeMessage)
// await props.plugin.call('remixAI', 'code_explaining', selectedCode, content)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction']) _paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
}, },
} }

@ -1,18 +1,13 @@
import React, { useContext, useEffect, useState, useCallback} from 'react' import React from 'react'
import '../remix-ai.css' import '../remix-ai.css'
import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core'; import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core';
import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react'; import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
import axios from 'axios'; import { AiChat, useAsStreamAdapter, ChatItem } from '@nlux/react';
import { AiChat, useAsStreamAdapter, ChatItem, AiChatUI} from '@nlux/react';
import { JsonStreamParser } from '@remix/remix-ai-core'; import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas'; import { user, assistantAvatar } from './personas';
import {highlighter} from '@nlux/highlighter' import { highlighter } from '@nlux/highlighter'
import './color.css' import './color.css'
import '@nlux/themes/unstyled.css'; import '@nlux/themes/unstyled.css';
// import '@nlux/themes'
import { result } from 'lodash';
const demoProxyServerUrl = 'https://solcoder.remixproject.org';
export let ChatApi = null export let ChatApi = null
@ -27,28 +22,27 @@ export const Default = (props) => {
let response = null let response = null
if (await props.plugin.call('remixAI', 'isChatRequestPending')){ if (await props.plugin.call('remixAI', 'isChatRequestPending')){
response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams); response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams);
} } else {
else{
response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams); response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
} }
if (GenerationParams.return_stream_response) HandleStreamResponse(response, if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)}, (text) => {observer.next(text)},
(result) => { (result) => {
ChatHistory.pushHistory(prompt, result) ChatHistory.pushHistory(prompt, result)
observer.complete() observer.complete()
} }
) )
else{ else {
observer.next(response) observer.next(response)
observer.complete() observer.complete()
} }
}; };
ChatApi = useAiChatApi(); ChatApi = useAiChatApi();
const conversationStarters: ConversationStarter[] = [ const conversationStarters: ConversationStarter[] = [
{prompt: 'Explain briefly the current file in Editor', icon: <span></span>}, { prompt: 'Explain briefly the current file in Editor', icon: <span></span> },
{prompt: 'Explain what is a solidity contract!'}] { prompt: 'Explain what is a solidity contract!' }]
// Define initial messages // Define initial messages
const initialMessages: ChatItem[] = [ const initialMessages: ChatItem[] = [
@ -70,7 +64,6 @@ export const Default = (props) => {
avatar: assistantAvatar avatar: assistantAvatar
}, },
user user
}} }}
//initialConversation={initialMessages} //initialConversation={initialMessages}
conversationOptions={{ layout: 'bubbles', conversationStarters }} conversationOptions={{ layout: 'bubbles', conversationStarters }}

@ -73,6 +73,7 @@
--nlux-CodeBlock--Padding: 20px; --nlux-CodeBlock--Padding: 20px;
--nlux-CodeBlock--TextColor: var(--text); --nlux-CodeBlock--TextColor: var(--text);
--nlux-CodeBlock--FontSize: 14px; --nlux-CodeBlock--FontSize: 14px;
--nlux-cvStrt--wd: 180px
/* Conversation starter colors */ /* Conversation starter colors */
--nlux-ConversationStarter--BackgroundColor: var(--light); --nlux-ConversationStarter--BackgroundColor: var(--light);
@ -83,12 +84,6 @@
'); ');
/* Override icon for the send button */ /* Override icon for the send button */
--nlux-send-icon: url('data:image/svg+xml,\ --nlux-send-icon: url('data:image/svg+xml, <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><!--!Font Awesome Free 6.6.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free Copyright 2024 Fonticons, Inc.--><path d="M0 256a256 256 0 1 0 512 0A256 256 0 1 0 0 256zM297 385c-9.4 9.4-24.6 9.4-33.9 0s-9.4-24.6 0-33.9l71-71L120 280c-13.3 0-24-10.7-24-24s10.7-24 24-24l214.1 0-71-71c-9.4-9.4-9.4-24.6 0-33.9s24.6-9.4 33.9 0L409 239c9.4 9.4 9.4 24.6 0 33.9L297 385z"/></svg>');
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" \
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">\
<circle cx="12" cy="12" r="10"/>\
<path d="M16 12l-4-4-4 4M12 16V9"/>\
</svg>\
');
} }

@ -276,7 +276,6 @@ export const TabsUI = (props: TabsUIProps) => {
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content) await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}, 500) }, 500)
} }
// await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false) setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file']) _paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
} }

Loading…
Cancel
Save