Merge pull request #5241 from ethereum/remixai__chat

Remixai  chat
pull/5283/head
Aniket 4 weeks ago committed by GitHub
commit 05e025ee22
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 9
      apps/circuit-compiler/src/app/components/container.tsx
  2. 2
      apps/remix-ide/src/app.js
  3. 128
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  4. 5
      apps/remix-ide/src/app/tabs/locales/en/editor.json
  5. 1
      apps/remix-ide/src/remixAppManager.js
  6. 14
      apps/remixdesktop/src/lib/InferenceServerManager.ts
  7. 5
      apps/remixdesktop/src/plugins/remixAIDektop.ts
  8. 10
      apps/vyper/src/app/utils/remix-client.tsx
  9. 29
      libs/remix-ai-core/src/agents/codeExplainAgent.ts
  10. 62
      libs/remix-ai-core/src/helpers/streamHandler.ts
  11. 4
      libs/remix-ai-core/src/index.ts
  12. 118
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  13. 8
      libs/remix-ai-core/src/prompts/promptBuilder.ts
  14. 4
      libs/remix-ai-core/src/types/models.ts
  15. 10
      libs/remix-ai-core/src/types/remix-project.code-workspace
  16. 49
      libs/remix-ai-core/src/types/types.ts
  17. 2
      libs/remix-api/src/lib/plugins/remixai-api.ts
  18. 23
      libs/remix-api/src/lib/plugins/remixaiDesktop-api.ts
  19. 1
      libs/remix-api/src/lib/remix-api.ts
  20. 54
      libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
  21. 15
      libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
  22. 2
      libs/remix-ui/remix-ai/src/index.ts
  23. 141
      libs/remix-ui/remix-ai/src/lib/components/Default.tsx
  24. 5
      libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
  25. 89
      libs/remix-ui/remix-ai/src/lib/components/color.css
  26. 8
      libs/remix-ui/remix-ai/src/lib/components/personas.tsx
  27. 47
      libs/remix-ui/remix-ai/src/lib/components/send.ts
  28. 2
      libs/remix-ui/renderer/src/lib/renderer.tsx
  29. 28
      libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
  30. 4
      libs/remix-ui/terminal/src/lib/remix-ui-terminal.tsx
  31. 4
      package.json
  32. 30
      yarn.lock

@ -73,16 +73,14 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
} else {
const message = `
error message: ${error}
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
}
} else {
const error = report.message
@ -91,8 +89,7 @@ export function Container () {
full circom error: ${JSON.stringify(report, null, 2)}
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
await circuitApp.plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
}
}

@ -557,7 +557,7 @@ class AppComponent {
await this.appManager.activatePlugin(['solidity-script', 'remix-templates'])
if (isElectron()) {
await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat', 'remixAID'])
await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat']) // 'remixAID'
}
this.appManager.on(

@ -1,9 +1,14 @@
import * as packageJson from '../../../../../package.json'
import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab } from '@remix-ui/remix-ai'
import React from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel } from '@remix/remix-ai-core';
import { RemixAITab, ChatApi } from '@remix-ui/remix-ai'
import React, { useCallback } from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, CodeExplainAgent } from '@remix/remix-ai-core';
import { CustomRemixApi } from '@remix-api'
type chatRequestBufferT<T> = {
[key in keyof T]: T[key]
}
const profile = {
name: 'remixAI',
@ -11,39 +16,52 @@ const profile = {
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
"initialize"],
"initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
kind: '',
// location: 'sidePanel',
location: 'sidePanel',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
version: packageJson.version,
maintainedBy: 'Remix'
}
export class RemixAIPlugin extends Plugin {
// add Plugin<any, CustomRemixApi>
export class RemixAIPlugin extends ViewPlugin {
isOnDesktop:boolean = false
aiIsActivated:boolean = false
readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
chatRequestBuffer: chatRequestBufferT<any> = null
agent: CodeExplainAgent
useRemoteInferencer:boolean = false
constructor(inDesktop:boolean) {
super(profile)
this.isOnDesktop = inDesktop
this.agent = new CodeExplainAgent(this)
// user machine dont use ressource for remote inferencing
}
onActivation(): void {
this.initialize(null, null, null, false)
if (this.isOnDesktop) {
console.log('Activating RemixAIPlugin on desktop')
// this.on(this.remixDesktopPluginName, 'activated', () => {
this.useRemoteInferencer = true
this.initialize(null, null, null, this.useRemoteInferencer);
// })
} else {
console.log('Activating RemixAIPlugin on browser')
this.useRemoteInferencer = true
this.initialize()
}
}
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
// on desktop use remote inferencer -> false
console.log('initialize on desktop')
const res = await this.call(this.remixDesktopPluginName, 'initializeModelBackend', useRemote, model1, model2)
if (res) {
this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => {
@ -60,7 +78,6 @@ export class RemixAIPlugin extends Plugin {
}
} else {
// on browser
this.remoteInferencer = new RemoteInferencer(remoteModel?.apiUrl, remoteModel?.completionUrl)
this.remoteInferencer.event.on('onInference', () => {
this.isInferencing = true
@ -80,7 +97,7 @@ export class RemixAIPlugin extends Plugin {
return
}
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_generation', prompt)
} else {
return await this.remoteInferencer.code_generation(prompt)
@ -88,82 +105,109 @@ export class RemixAIPlugin extends Plugin {
}
async code_completion(prompt: string, promptAfter: string): Promise<any> {
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_completion', prompt, promptAfter)
} else {
return await this.remoteInferencer.code_completion(prompt, promptAfter)
}
}
async solidity_answer(prompt: string): Promise<any> {
async solidity_answer(prompt: string, params: IParams=GenerationParams): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
const newPrompt = await this.agent.chatCommand(prompt)
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt)
if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', newPrompt)
} else {
result = await this.remoteInferencer.solidity_answer(prompt)
result = await this.remoteInferencer.solidity_answer(newPrompt)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}
async code_explaining(prompt: string): Promise<any> {
async code_explaining(prompt: string, context: string, params: IParams=GenerationParams): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt)
if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt, context, params)
} else {
result = await this.remoteInferencer.code_explaining(prompt)
result = await this.remoteInferencer.code_explaining(prompt, context, params)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}
async error_explaining(prompt: string): Promise<any> {
async error_explaining(prompt: string, context: string="", params: IParams=GenerationParams): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
result = await this.remoteInferencer.error_explaining(prompt)
result = await this.remoteInferencer.error_explaining(prompt, params)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
if (result && params.terminal_output) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
return result
}
async code_insertion(msg_pfx: string, msg_sfx: string): Promise<any> {
if (this.isOnDesktop) {
if (this.isOnDesktop && !this.useRemoteInferencer) {
return await this.call(this.remixDesktopPluginName, 'code_insertion', msg_pfx, msg_sfx)
} else {
return await this.remoteInferencer.code_insertion(msg_pfx, msg_sfx)
}
}
// render() {
// return (
// <RemixAITab plugin={this}></RemixAITab>
// )
// }
chatPipe(fn, prompt: string, context?: string, pipeMessage?: string){
if (this.chatRequestBuffer == null){
this.chatRequestBuffer = {
fn_name: fn,
prompt: prompt,
context: context
}
if (pipeMessage) ChatApi.composer.send(pipeMessage)
else {
if (fn === "code_explaining") ChatApi.composer.send("Explain the current code")
else if (fn === "error_explaining") ChatApi.composer.send("Explain the error")
else if (fn === "solidity_answer") ChatApi.composer.send("Answer the following question")
else console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
else {
console.log("chatRequestBuffer is not empty. First process the last request.")
}
}
async ProcessChatRequestBuffer(params:IParams=GenerationParams){
if (this.chatRequestBuffer != null){
const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)
this.chatRequestBuffer = null
return result
}
else {
console.log("chatRequestBuffer is empty.")
return ""
}
}
isChatRequestPending(){
return this.chatRequestBuffer != null
}
render() {
return (
<RemixAITab plugin={this}></RemixAITab>
)
}
}

@ -25,8 +25,9 @@
"editor.explainFunction": "Explain this function",
"editor.explainFunctionSol": "Explain this code",
"editor.explainFunction2": "Explain the function \"{name}\"",
"editor.explainFunctionByAI": "solidity code: {content}\n Explain the function {currentFunction}",
"editor.explainFunctionByAISol": "solidity code: {content}\n Explain the function {currentFunction}",
"editor.explainFunctionByAI": "```\n{content}\n```\nExplain the function {currentFunction}",
"editor.explainFunctionByAISol": "```\n{content}\n```\nExplain the function {currentFunction}",
"editor.ExplainPipeMessage": "```\n {content}\n```\nExplain the snipped above",
"editor.executeFreeFunction": "Run a free function",
"editor.executeFreeFunction2": "Run the free function \"{name}\"",
"editor.toastText1": "This can only execute free function",

@ -78,7 +78,6 @@ let requiredModules = [
// 'doc-gen',
'remix-templates',
'remixAID',
'remixAI',
'solhint',
'dgit',
'pinnedPanel',

@ -404,8 +404,12 @@ export class InferenceManager implements ICompletions {
}
, responseType: 'stream' });
const userPrompt = payload[Object.keys(payload)[0]]
const userPrompt = payload.prompt
let resultText = ""
if (payload.return_stream_response) {
return response
}
response.data.on('data', (chunk: Buffer) => {
try {
const parsedData = JSON.parse(chunk.toString());
@ -449,14 +453,14 @@ export class InferenceManager implements ICompletions {
}
}
async code_completion(context: any, params:IParams=CompletionParams): Promise<any> {
async code_completion(prompt, promptAfter, params:IParams=CompletionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
// as of now no prompt required
const payload = { context_code: context, ...params }
const payload = { prompt, 'context':promptAfter, ...params }
return this._makeInferenceRequest('code_completion', payload, AIRequestType.COMPLETION)
}
@ -484,9 +488,9 @@ export class InferenceManager implements ICompletions {
return
}
if (params.stream_result) {
return this._streamInferenceRequest('code_explaining', { code, context, ...params })
return this._streamInferenceRequest('code_explaining', { prompt: code, context, ...params })
} else {
return this._makeInferenceRequest('code_explaining', { code, context, ...params }, AIRequestType.GENERAL)
return this._makeInferenceRequest('code_explaining', { prompt: code, context, ...params }, AIRequestType.GENERAL)
}
}

@ -45,6 +45,7 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
async onActivation(): Promise<void> {
this.onload(() => {
this.emit('activated')
})
}
@ -81,9 +82,9 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
return true
}
async code_completion(context: any) {
async code_completion(prompt: string, promptAfter: string) {
// use general purpose model
return this.desktopInferencer.code_completion(context)
return this.desktopInferencer.code_completion(prompt, promptAfter)
}
async code_insertion(msg_pfx: string, msg_sfx: string) {

@ -7,6 +7,7 @@ import { ExampleContract } from '../components/VyperResult'
import EventEmitter from 'events'
import { CustomRemixApi } from '@remix-api'
export type VyperComplierAddress = 'https://vyper2.remixproject.org/' | 'http://localhost:8000/'
export class RemixClient extends PluginClient<any, CustomRemixApi> {
private client = createClient<Api, Readonly<RemixApi>>(this)
@ -67,11 +68,10 @@ export class RemixClient extends PluginClient<any, CustomRemixApi> {
}
try {
// TODO: remove! no formatting required since already handled on server
const formattedMessage = `
${message}
can you explain why this error occurred and how to fix it?
`
await this.client.call('remixAI' as any, 'error_explaining', message)
const file = await this.client.call('fileManager', 'getCurrentFile')
const content = await this.client.call('fileManager', 'readFile', file)
const messageAI = `Vyper code: ${content}\n error message: ${message}\n explain why the error occurred and how to fix it.`
await this.client.call('remixAI' as any, 'chatPipe', 'error_explaining', messageAI)
} catch (err) {
console.error('unable to askGpt')
console.error(err)

@ -1,29 +1,44 @@
// interactive code explaining and highlight security vunerabilities
import * as fs from 'fs';
class CodeExplainAgent {
export class CodeExplainAgent {
private codebase: string[]; // list of code base file
public currentFile: string;
plugin
constructor(codebasePath: string) {
constructor(props) {
this.plugin = props
// git or fs
this.codebase = this.loadCodebase(codebasePath);
const codebase = this.loadCodebase("codebasePath");
}
private loadCodebase(path: string): string[] {
const files = fs.readdirSync(path);
return files
.filter(file => file.endsWith('.ts'))
.flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n'));
return []
}
public update(currentFile, lineNumber){
}
async chatCommand(prompt:string){
// change this function with indexer or related
try {
if (prompt.includes('Explain briefly the current file')){
const file = await this.plugin.call('fileManager', 'getCurrentFile')
const content = `Explain this code:\n ${await this.plugin.call('fileManager', 'readFile', file)}`
return content
} else return prompt
} catch {
console.log('There is No file selected')
return 'There is No file selected'
}
}
public getExplanations(currentLine: string, numSuggestions: number = 3): string[] {
// process the code base explaining the current file and highlight some details
const suggestions: string[] = [];
return suggestions;
}
}
// Handle file changed (significantly)

@ -0,0 +1,62 @@
import { ChatHistory } from '../prompts/chat';
import { JsonStreamParser } from '../types/types'
export const HandleSimpleResponse = async (response,
cb?: (streamText: string) => void) => {
let resultText = ''
const parser = new JsonStreamParser();
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(response);
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
} else {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
}
}
}
export const HandleStreamResponse = async (streamResponse,
cb: (streamText: string) => void,
done_cb?: (result: string) => void) => {
try {
let resultText = ''
const parser = new JsonStreamParser();
const reader = streamResponse.body?.getReader();
const decoder = new TextDecoder();
// eslint-disable-next-line no-constant-condition
while (true) {
const { done, value } = await reader.read();
if (done) break;
try {
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
} else {
resultText += parsedData.generatedText
cb(parsedData.generatedText)
}
}
}
catch (error) {
console.error('Error parsing JSON:', error);
}
}
if (done_cb) {
done_cb(resultText)
}
}
catch (error) {
console.error('Error parsing JSON:', error);
}
}
export const UpdtateChatHistory = (userPromptprompt: string, AIAnswer: string) => {
ChatHistory.pushHistory(userPromptprompt, AIAnswer)
}

@ -18,3 +18,7 @@ export {
RemoteInferencer, InsertionParams, CompletionParams, GenerationParams,
ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable
}
export * from './types/types'
export * from './helpers/streamHandler'
export * from './agents/codeExplainAgent'

@ -1,50 +1,46 @@
import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel } from "../../types/types";
import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel, JsonStreamParser } from "../../types/types";
import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models";
import { buildSolgptPromt } from "../../prompts/promptBuilder";
import axios from "axios";
import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat";
import axios from 'axios';
const defaultErrorMessage = `Unable to get a response from AI server`
export class RemoteInferencer implements ICompletions {
api_url: string
completion_url: string
max_history = 7
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
test_env=false
test_url="http://solcodertest.org"
constructor(apiUrl?:string, completionUrl?:string) {
this.api_url = apiUrl!==undefined ? apiUrl: "https://solcoder.remixproject.org"
this.completion_url = completionUrl!==undefined ? completionUrl : "https://completion.remixproject.org"
this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org"
this.completion_url = completionUrl!==undefined ? completionUrl : this.test_env? this.test_url : "https://completion.remixproject.org"
this.event = new EventEmitter()
}
private async _makeRequest(data, rType:AIRequestType){
private async _makeRequest(payload, rType:AIRequestType){
this.event.emit("onInference")
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
const userPrompt = data.data[0]
const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
try {
const result = await axios(requesURL, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
data: JSON.stringify(data),
})
const options = { headers: { 'Content-Type': 'application/json', } }
const result = await axios.post(`${requestURL}`, payload, options)
switch (rType) {
case AIRequestType.COMPLETION:
if (result.statusText === "OK")
return result.data.data[0]
return result.data.generatedText
else {
return defaultErrorMessage
}
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
const resultText = result.data.data[0]
ChatHistory.pushHistory(userPrompt, resultText)
if (result.data?.error) return result.data?.error
const resultText = result.data.generatedText
ChatHistory.pushHistory(payload.prompt, resultText)
return resultText
} else {
return defaultErrorMessage
@ -54,46 +50,57 @@ export class RemoteInferencer implements ICompletions {
} catch (e) {
ChatHistory.clearHistory()
console.error('Error making request to Inference server:', e.message)
return e
}
finally {
this.event.emit("onInferenceDone")
}
}
private async _streamInferenceRequest(data, rType:AIRequestType){
private async _streamInferenceRequest(endpoint, payload, rType:AIRequestType){
let resultText = ""
try {
this.event.emit('onInference')
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
const userPrompt = data.data[0]
const response = await axios({
method: 'post',
url: requesURL,
data: data,
headers: { 'Content-Type': 'application/json', "Accept": "text/event-stream" },
responseType: 'stream'
const requestURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
const response = await fetch(requestURL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(payload),
});
let resultText = ""
response.data.on('data', (chunk: Buffer) => {
if (payload.return_stream_response) {
return response
}
const reader = response.body?.getReader();
const decoder = new TextDecoder();
const parser = new JsonStreamParser();
// eslint-disable-next-line no-constant-condition
while (true) {
const { done, value } = await reader.read();
if (done) break;
try {
const parsedData = JSON.parse(chunk.toString());
console.log("value" + decoder.decode(value))
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
this.event.emit('onStreamResult', parsedData.generatedText);
resultText = resultText + parsedData.generatedText
} else {
// stream generation is complete
resultText = resultText + parsedData.generatedText
ChatHistory.pushHistory(userPrompt, resultText)
ChatHistory.pushHistory(payload.prompt, resultText)
return parsedData.generatedText
}
}
} catch (error) {
console.error('Error parsing JSON:', error);
ChatHistory.clearHistory()
}
});
}
return "" // return empty string for now as handled in event
return resultText
} catch (error) {
ChatHistory.clearHistory()
console.error('Error making stream request to Inference server:', error.message);
@ -103,39 +110,38 @@ export class RemoteInferencer implements ICompletions {
}
}
async code_completion(prompt, promptAfter, options:IParams=null): Promise<any> {
const payload = !options?
{ "data": [prompt, "code_completion", promptAfter, false, 30, 0.9, 0.90, 50]} :
{ "data": [prompt, "code_completion", promptAfter, options.stream_result,
options.max_new_tokens, options.temperature, options.top_p, options.top_k]
}
async code_completion(prompt, promptAfter, options:IParams=CompletionParams): Promise<any> {
const payload = { prompt, 'context':promptAfter, "endpoint":"code_completion", ...options }
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx, msg_sfx): Promise<any> {
const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
async code_insertion(msg_pfx, msg_sfx, options:IParams=InsertionParams): Promise<any> {
const payload = { "endpoint":"code_insertion", msg_pfx, msg_sfx, ...options, prompt: '' }
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_generation(prompt): Promise<any> {
const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
return this._makeRequest(payload, AIRequestType.COMPLETION)
async code_generation(prompt, options:IParams=GenerationParams): Promise<any> {
const payload = { prompt, "endpoint":"code_completion", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.COMPLETION)
else return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async solidity_answer(prompt): Promise<any> {
async solidity_answer(prompt, options:IParams=GenerationParams): Promise<any> {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
return this._makeRequest(payload, AIRequestType.GENERAL)
const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async code_explaining(prompt, context:string=""): Promise<any> {
const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
return this._makeRequest(payload, AIRequestType.GENERAL)
async code_explaining(prompt, context:string="", options:IParams=GenerationParams): Promise<any> {
const payload = { prompt, "endpoint":"code_explaining", context, ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
async error_explaining(prompt): Promise<any> {
const payload = { "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}
return this._makeRequest(payload, AIRequestType.GENERAL)
async error_explaining(prompt, options:IParams=GenerationParams): Promise<any> {
const payload = { prompt, "endpoint":"error_explaining", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload.endpoint, payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)
}
}

@ -2,7 +2,7 @@ import { RemoteBackendOPModel } from "../types/types"
import { ChatHistory } from "./chat"
export const PromptBuilder = (inst, answr, modelop) => {
if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|><|start_header_id|>assistant<|end_header_id|> ${answr}`
if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|eot_id|>\n<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|> ${answr}\n`
if (modelop === RemoteBackendOPModel.DEEPSEEK) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr
if (modelop === RemoteBackendOPModel.MISTRAL) return ""
}
@ -21,8 +21,10 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel
else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP)
else newPrompt += PromptBuilder(question, answer, modelOP)
}
// finaly
newPrompt = "sol-gpt " + newPrompt + PromptBuilder(userPrompt.split('gpt')[1], "", modelOP)
// remove sol-gpt or gpt from the start of the prompt
const parsedPrompt = userPrompt.replace(/^sol-gpt|^gpt/gm, '')
newPrompt = "sol-gpt " + newPrompt + PromptBuilder(parsedPrompt, "", modelOP)
return newPrompt
}
}

@ -61,6 +61,7 @@ const CompletionParams:IParams = {
topK: 40,
topP: 0.92,
max_new_tokens: 15,
stream_result: false,
}
const InsertionParams:IParams = {
@ -68,6 +69,7 @@ const InsertionParams:IParams = {
topK: 40,
topP: 0.92,
max_new_tokens: 150,
stream_result: false,
}
const GenerationParams:IParams = {
@ -76,6 +78,8 @@ const GenerationParams:IParams = {
topP: 0.92,
max_new_tokens: 2000,
stream_result: false,
repeat_penalty: 1.2,
terminal_output: false,
}
export { DefaultModels, CompletionParams, InsertionParams, GenerationParams }

@ -0,0 +1,10 @@
{
"folders": [
{
"path": "../../../.."
},
{
"path": "../../../../../remix-wildcard"
}
]
}

@ -58,7 +58,7 @@ export interface IParams {
temperature?: number;
max_new_tokens?: number;
repetition_penalty?: number;
repeatPenalty?:any
repeat_penalty?:any
no_repeat_ngram_size?: number;
num_beams?: number;
num_return_sequences?: number;
@ -71,6 +71,8 @@ export interface IParams {
topK?: number;
topP?: number;
temp?: number;
return_stream_response?: boolean;
terminal_output?: boolean;
}
export enum AIRequestType {
@ -85,3 +87,48 @@ export enum RemoteBackendOPModel{
CODELLAMA,
MISTRAL
}
interface GeneratedTextObject {
generatedText: string;
isGenerating: boolean;
}
export class JsonStreamParser {
buffer: string
constructor() {
this.buffer = '';
}
safeJsonParse<T>(chunk: string): T[] | null {
this.buffer += chunk;
const results = [];
let startIndex = 0;
let endIndex: number;
while ((endIndex = this.buffer.indexOf('}', startIndex)) !== -1) {
// check if next character is a opening curly bracket
let modifiedEndIndex = endIndex;
if ((modifiedEndIndex = this.buffer.indexOf('{', endIndex)) !== -1 ) {
endIndex = modifiedEndIndex - 1;
}
if (((modifiedEndIndex = this.buffer.indexOf('{', endIndex)) === -1) &&
(this.buffer.indexOf('}', endIndex) < this.buffer.length)) {
endIndex = this.buffer.indexOf('}', endIndex+1) <0 ? this.buffer.length - 1 : this.buffer.indexOf('}', endIndex+1);
}
const jsonStr = this.buffer.slice(startIndex, endIndex + 1);
try {
const obj: GeneratedTextObject = JSON.parse(jsonStr);
results.push(obj);
} catch (error) {
console.error('Error parsing JSON:', error);
}
startIndex = endIndex + 1;
}
this.buffer = this.buffer.slice(startIndex);
return results;
}
safeJsonParseSingle<T>(chunk: string): T[] | null {
return JSON.parse(this.buffer);
}
}

@ -5,6 +5,8 @@ export interface IRemixAI {
events: {
onStreamResult(streamText: string): Promise<void>,
activated(): Promise<void>,
onInference():void,
onInferenceDone():void,
} & StatusEvents,
methods: {
code_completion(context: string): Promise<string>

@ -0,0 +1,23 @@
import { IParams } from "@remix/remix-ai-core";
import { StatusEvents } from "@remixproject/plugin-utils";
export interface IRemixAID {
events: {
activated():void,
onInference():void,
onInferenceDone():void,
onStreamResult(streamText: string):void,
} & StatusEvents,
methods: {
code_completion(context: string): Promise<string>
code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>,
code_generation(prompt: string): Promise<string | null>,
code_explaining(code: string, context?: string): Promise<string | null>,
error_explaining(prompt: string): Promise<string | null>,
solidity_answer(prompt: string): Promise<string | null>,
initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise<boolean>,
chatPipe(pipeMessage: string): Promise<void>,
ProcessChatRequestBuffer(params:IParams): Promise<void>,
}
}

@ -16,7 +16,6 @@ import { IMatomoApi } from "./plugins/matomo-api"
import { IRemixAI } from "./plugins/remixai-api"
import { IRemixAID } from "./plugins/remixAIDesktop-api"
export interface ICustomRemixApi extends IRemixApi {
dgitApi: IGitApi
config: IConfigApi

@ -1,6 +1,8 @@
/* eslint-disable no-control-regex */
import { EditorUIProps, monacoTypes } from '@remix-ui/editor';
import { JsonStreamParser } from '@remix/remix-ai-core';
import * as monaco from 'monaco-editor';
const _paq = (window._paq = window._paq || [])
export class RemixInLineCompletionProvider implements monacoTypes.languages.InlineCompletionsProvider {
@ -26,9 +28,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
async provideInlineCompletions(model: monacoTypes.editor.ITextModel, position: monacoTypes.Position, context: monacoTypes.languages.InlineCompletionContext, token: monacoTypes.CancellationToken): Promise<monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>> {
if (context.selectedSuggestionInfo) {
return { items: []};
}
const isActivate = await await this.props.plugin.call('settings', 'get', 'settings/copilot/suggest/activate')
if (!isActivate) return
const currentTime = Date.now();
const timeSinceLastRequest = currentTime - this.lastRequestTime;
@ -61,17 +62,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
if (!word.endsWith(' ') &&
!word.endsWith('.') &&
!word.endsWith('"') &&
!word.endsWith('(')) {
return;
}
try {
const isActivate = await await this.props.plugin.call('settings', 'get', 'settings/copilot/suggest/activate')
if (!isActivate) return
} catch (err) {
return;
}
try {
const split = word.split('\n')
if (split.length < 2) return
@ -81,8 +76,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'RemixAI - generating code for following comment: ' + ask.replace('///', '') })
const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
this.task = 'code_generation'
_paq.push(['trackEvent', 'ai', 'remixAI', 'code_generation'])
this.task = 'code_generation'
const parsedData = data.trimStart() //JSON.parse(data).trimStart()
const item: monacoTypes.languages.InlineCompletion = {
@ -93,7 +88,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
this.currentCompletion.item = item
return {
items: [item],
enableForwardStability: false
enableForwardStability: true
}
}
} catch (e) {
@ -110,11 +105,6 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return { items: []}; // do not do completion on single and multiline comment
}
// abort if there is a signal
if (token.isCancellationRequested) {
return
}
if (word.replace(/ +$/, '').endsWith('\n')){
// Code insertion
try {
@ -123,6 +113,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
const generatedText = output // no need to clean it. should already be
this.task = 'code_insertion'
_paq.push(['trackEvent', 'ai', 'remixAI', this.task])
const item: monacoTypes.languages.InlineCompletion = {
insertText: generatedText,
range: new monaco.Range(position.lineNumber, position.column, position.lineNumber, position.column)
@ -132,10 +123,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return {
items: [item],
enableForwardStability: false,
enableForwardStability: true,
}
}
catch (err){
console.log("err: " + err)
return
}
}
@ -151,8 +143,8 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
if (generatedText.indexOf('@custom:dev-run-script./') !== -1) {
clean = generatedText.replace('@custom:dev-run-script', '@custom:dev-run-script ')
}
clean = clean.replace(word, '').trimStart()
clean = this.process_completion(clean)
clean = clean.replace(word, '')
clean = this.process_completion(clean, word_after)
const item: monacoTypes.languages.InlineCompletion = {
insertText: clean,
@ -163,22 +155,30 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return {
items: [item],
enableForwardStability: true
enableForwardStability: true,
}
} catch (err) {
return
const item: monacoTypes.languages.InlineCompletion = { insertText: " " }
return {
items: [item],
enableForwardStability: true,
}
}
}
process_completion(data: any) {
let clean = data.split('\n')[0].startsWith('\n') ? [data.split('\n')[0], data.split('\n')[1]].join('\n'): data.split('\n')[0]
process_completion(data: any, word_after: any) {
let clean = data
// if clean starts with a comment, remove it
if (clean.startsWith('//') || clean.startsWith('/*') || clean.startsWith('*') || clean.startsWith('*/')){
console.log("clean starts with comment")
return ""
}
// remove comment inline
clean = clean.split('//')[0].trimEnd()
const text_after = word_after.split('\n')[0].trim()
if (clean.toLowerCase().includes(text_after.toLowerCase())){
clean = clean.replace(text_after, '') // apply regex to conserve the case
}
return clean
}

@ -776,7 +776,11 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.generateDocumentationByAI' }, { content, currentFunction: currentFunction.current })
const cm = await await props.plugin.call('remixAI', 'code_explaining', message)
// do not stream this response
const pipeMessage = `Generate the documentation for the function **${currentFunction.current}**`
// const cm = await await props.plugin.call('remixAI', 'code_explaining', message)
const cm = await props.plugin.call('remixAI' as any, 'chatPipe', 'solidity_answer', message, '', pipeMessage)
const natSpecCom = "\n" + extractNatspecComments(cm)
const cln = await props.plugin.call('codeParser', "getLineColumnOfNode", currenFunctionNode)
@ -827,9 +831,9 @@ export const EditorUI = (props: EditorUIProps) => {
],
run: async () => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content, currentFunction: currentFunction.current })
await props.plugin.call('remixAI', 'code_explaining', message, content)
const context = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content:context, currentFunction: currentFunction.current })
await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', message, context)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}
@ -848,8 +852,9 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const selectedCode = editor.getModel().getValueInRange(editor.getSelection())
const pipeMessage = intl.formatMessage({ id: 'editor.ExplainPipeMessage' }, { content:selectedCode })
await props.plugin.call('remixAI', 'code_explaining', selectedCode, content)
await props.plugin.call('remixAI' as any, 'chatPipe', 'code_explaining', selectedCode, content, pipeMessage)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}

@ -1 +1 @@
export { RemixAITab } from './lib/components/RemixAI'
export { RemixAITab, ChatApi } from './lib/components/RemixAI'

@ -1,84 +1,83 @@
import React, { useContext, useEffect, useState } from 'react'
import React from 'react'
import '../remix-ai.css'
import { DefaultModels } from '@remix/remix-ai-core';
import { DefaultModels, GenerationParams, ChatHistory, HandleStreamResponse, HandleSimpleResponse } from '@remix/remix-ai-core';
import { ConversationStarter, StreamSend, StreamingAdapterObserver, useAiChatApi } from '@nlux/react';
import { AiChat, useAsStreamAdapter, ChatItem } from '@nlux/react';
import { JsonStreamParser } from '@remix/remix-ai-core';
import { user, assistantAvatar } from './personas';
import { highlighter } from '@nlux/highlighter'
import './color.css'
import '@nlux/themes/unstyled.css';
export let ChatApi = null
export const Default = (props) => {
const [searchText, setSearchText] = useState('');
const [resultText, setResultText] = useState('');
const pluginName = 'remixAI'
const appendText = (newText) => {
setResultText(resultText => resultText + newText);
}
const send: StreamSend = async (
prompt: string,
observer: StreamingAdapterObserver,
) => {
GenerationParams.stream_result = true
GenerationParams.return_stream_response = GenerationParams.stream_result
useEffect(() => {
const handleResultReady = async (e) => {
appendText(e);
};
if (props.plugin.isOnDesktop ) {
props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => {
handleResultReady(value);
})
let response = null
if (await props.plugin.call('remixAI', 'isChatRequestPending')){
response = await props.plugin.call('remixAI', 'ProcessChatRequestBuffer', GenerationParams);
} else {
response = await props.plugin.call('remixAI', 'solidity_answer', prompt, GenerationParams);
}
}, [])
return (
<div>
<div className="remix_ai_plugin_search_container">
<input
type="text"
className="remix_ai_plugin_search-input"
placeholder="Search..."
value={searchText}
onChange={() => console.log('searchText not implememted')}
></input>
<button
className="remix_ai_plugin_search_button text-ai pl-2 pr-0 py-0 d-flex"
onClick={() => console.log('searchText not implememted')}
>
<i
className="fa-solid fa-arrow-right"
style={{ color: 'black' }}
></i>
<span className="position-relative text-ai text-sm pl-1"
style={{ fontSize: "x-small", alignSelf: "end" }}>Search</span>
</button>
<button className="remix_ai_plugin_download_button text-ai pl-2 pr-0 py-0 d-flex"
onClick={async () => {
if (props.plugin.isOnDesktop ) {
await props.plugin.call(pluginName, 'downloadModel', DefaultModels()[3]);
if (GenerationParams.return_stream_response) HandleStreamResponse(response,
(text) => {observer.next(text)},
(result) => {
observer.next(' ') // Add a space to flush the last message
ChatHistory.pushHistory(prompt, result)
observer.complete()
}
)
else {
observer.next(response)
observer.complete()
}
}}
> Download Model </button>
</div>
};
ChatApi = useAiChatApi();
const conversationStarters: ConversationStarter[] = [
{ prompt: 'Explain briefly the current file in Editor', icon: <span></span> },
{ prompt: 'Explain what is a solidity contract!' }]
<div className="remix_ai_plugin_find_container_internal">
<textarea
className="remix_ai_plugin_search_result_textbox"
rows={10}
cols={50}
placeholder="Results..."
onChange={(e) => {
console.log('resultText changed', e.target.value)
setResultText(e.target.value)}
// Define initial messages
const initialMessages: ChatItem[] = [
{
role: 'assistant',
message: 'Welcome to Remix AI! How can I assist you today?'
}
value={resultText}
readOnly
/>
<button className="remix_ai_plugin_download_button text-ai pl-2 pr-0 py-0 d-flex"
];
const adapter = useAsStreamAdapter(send, []);
onClick={async () => {
props.plugin.call("remixAI", 'initialize', DefaultModels()[1], DefaultModels()[3]);
return (
<AiChat
api={ChatApi}
adapter={ adapter }
personaOptions={{
assistant: {
name: "Remix AI",
tagline: "Your Web3 AI Assistant",
avatar: assistantAvatar
},
user
}}
//initialConversation={initialMessages}
conversationOptions={{ layout: 'bubbles', conversationStarters }}
displayOptions={{ colorScheme: "auto", themeId: "remix_ai_theme" }}
composerOptions={{ placeholder: "Type your query",
submitShortcut: 'Enter',
hideStopButton: false,
}}
> Init Model </button>
</div>
<div className="remix_ai_plugin_find-part">
<a href="#" className="remix_ai_plugin_search_result_item_title">/fix the problems in my code</a>
<a href="#" className="remix_ai_plugin_search_result_item_title">/tests add unit tests for my code</a>
<a href="#" className="remix_ai_plugin_search_result_item_title">/explain how the selected code works</a>
</div>
</div>
messageOptions={{ showCodeBlockCopyButton: true,
streamingAnimationSpeed: 2,
waitTimeBeforeStreamCompletion: 1000,
syntaxHighlighter: highlighter
}}
/>
);
}
};

@ -1,15 +1,16 @@
import React, { useContext } from 'react'
import '../remix-ai.css'
import { Default } from './Default'
import { Default, ChatApi } from './Default'
export const RemixAITab = (props) => {
const plugin = props.plugin
return (
<>
<div id="remixAITab pr-4 px-2 pb-4">
<div id="remixAITab" className="px-2 pb-4">
<Default plugin={plugin}></Default>
</div>
</>
)
}
export { ChatApi }

@ -0,0 +1,89 @@
.nlux-theme-remix_ai_theme[data-color-scheme='light'] {
--nlux-ChatRoom--BackgroundColor: var(--text-background);
}
.nlux-theme-remix_ai_theme[data-color-scheme='dark'] {
--nlux-ChatRoom--BackgroundColor: var(--text-background);
}
.nlux-theme-remix_ai_theme {
/* Override top-level chat room colors */
--nlux-ChatRoom--BorderColor: #24233d;
--nlux-ChatRoom-Divider--Color: var(--light);
/* --nlux-ChatRoom-Divider--BorderWidth:2px; */
--nlux-ChatRoom--TextColor: var(--text);
/* Override message bubble colors */
--nlux-AiMessage--BackgroundColor: var(--light);
--nlux-HumanMessage--BackgroundColor: var(--text-background);
/* Override border width */
--nlux-ChatRoom--BorderWidth: 0;
--nlux-SubmitButton--BorderWidth: 0;
--nlux-ChatItem-Avatar--BorderWidth: 0;
--nlux-ChatItem-Message-BubbleLayout--BorderWidth: 0;
--nlux-ConversationStarter--BorderWidth: 1;
/* Override border radius */
--nlux-ChatRoom--BorderRadius: 5px;
--nlux-SubmitButton--BorderRadius: 0 10px 10px 0;
--nlux-SubmitButton--Width: 73px;
--nlux-ChatItem-Avatar--BorderRadius: 5px;
--nlux-ChatItem-Message-BubbleLayout--BorderRadius: 5px;
--nlux-ConversationStarter--BorderRadius: 5px;
--nlux-PromptInput-Focus-Outline--Width: 10px;
--nlux-PromptInput-Max-Height: 50px;
--nlux-PromptInput--BorderWidth: 0;
.nlux-comp-composer > textarea {padding: 8px;}
--nlux-PromptInput--BorderRadius: 10px 0 0 10px;
--nlux-PromptInput-Height: 50px;
/* Override input colors */
--nlux-PromptInput--BackgroundColor: var(--light);
--nlux-PromptInput-Active--BackgroundColor: var(--light);
--nlux-PromptInput-Disabled--BackgroundColor: var(--dark);
/* Gap between submit button and input */
--nlux-Composer--Gap: 0;
/* Override submit button colors */
--nlux-SubmitButton--BackgroundColor: var(--primary);
--nlux-SubmitButton-Active--BackgroundColor:var(--primary);
--nlux-SubmitButton-Disabled--BackgroundColor: var(--dark);
--nlux-SubmitButton-Active--TextColor: var(--text);
--nlux-SubmitButton-Disabled--TextColor: var(--text);
/** Inline code in markdown */
--nlux-InlineCode--BorderRadius: 6px;
--nlux-InlineCode--BorderWidth: 0.5px;
--nlux-InlineCode--Padding: 0 2px;
--nlux-InlineCode--FontSize: 14px;
/*code block */
--nlux-CodeBlock-CopyButton--BackgroundColor: var(--bg-text);
--nlux-CodeBlock-CopyButton--TextColor: var(--text);
/*codeblock*/
/*--nlux-CodeBlock--BackgroundColor: var(--body-bg);*/
--nlux-CodeBlock--BackgroundColor: var(--bg-text);
--nlux-CodeBlock--BorderColor: var(--secondary);
--nlux-CodeBlock--Padding: 20px;
--nlux-CodeBlock--TextColor: var(--text);
--nlux-CodeBlock--FontSize: 14px;
--nlux-cvStrt--wd: var(--nlux-ConversationStarter--Width, 100px);
/* Conversation starter colors */
--nlux-ConversationStarter--BackgroundColor: var(--light);
--nlux-copy-icon: url('data:image/svg+xml,\
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor">\
<path fill-rule="evenodd" clip-rule="evenodd" d="M15 1.25H10.9436C9.10583 1.24998 7.65019 1.24997 6.51098 1.40314C5.33856 1.56076 4.38961 1.89288 3.64124 2.64124C2.89288 3.38961 2.56076 4.33856 2.40314 5.51098C2.24997 6.65019 2.24998 8.10582 2.25 9.94357V16C2.25 17.8722 3.62205 19.424 5.41551 19.7047C5.55348 20.4687 5.81753 21.1208 6.34835 21.6517C6.95027 22.2536 7.70814 22.5125 8.60825 22.6335C9.47522 22.75 10.5775 22.75 11.9451 22.75H15.0549C16.4225 22.75 17.5248 22.75 18.3918 22.6335C19.2919 22.5125 20.0497 22.2536 20.6517 21.6517C21.2536 21.0497 21.5125 20.2919 21.6335 19.3918C21.75 18.5248 21.75 17.4225 21.75 16.0549V10.9451C21.75 9.57754 21.75 8.47522 21.6335 7.60825C21.5125 6.70814 21.2536 5.95027 20.6517 5.34835C20.1208 4.81753 19.4687 4.55348 18.7047 4.41551C18.424 2.62205 16.8722 1.25 15 1.25ZM17.1293 4.27117C16.8265 3.38623 15.9876 2.75 15 2.75H11C9.09318 2.75 7.73851 2.75159 6.71085 2.88976C5.70476 3.02502 5.12511 3.27869 4.7019 3.7019C4.27869 4.12511 4.02502 4.70476 3.88976 5.71085C3.75159 6.73851 3.75 8.09318 3.75 10V16C3.75 16.9876 4.38624 17.8265 5.27117 18.1293C5.24998 17.5194 5.24999 16.8297 5.25 16.0549V10.9451C5.24998 9.57754 5.24996 8.47522 5.36652 7.60825C5.48754 6.70814 5.74643 5.95027 6.34835 5.34835C6.95027 4.74643 7.70814 4.48754 8.60825 4.36652C9.47522 4.24996 10.5775 4.24998 11.9451 4.25H15.0549C15.8297 4.24999 16.5194 4.24998 17.1293 4.27117ZM7.40901 6.40901C7.68577 6.13225 8.07435 5.9518 8.80812 5.85315C9.56347 5.75159 10.5646 5.75 12 5.75H15C16.4354 5.75 17.4365 5.75159 18.1919 5.85315C18.9257 5.9518 19.3142 6.13225 19.591 6.40901C19.8678 6.68577 20.0482 7.07435 20.1469 7.80812C20.2484 8.56347 20.25 9.56458 20.25 11V16C20.25 17.4354 20.2484 18.4365 20.1469 19.1919C20.0482 19.9257 19.8678 20.3142 19.591 20.591C19.3142 20.8678 18.9257 21.0482 18.1919 21.1469C17.4365 21.2484 16.4354 21.25 15 21.25H12C10.5646 21.25 9.56347 21.2484 8.80812 21.1469C8.07435 21.0482 7.68577 20.8678 7.40901 20.591C7.13225 20.3142 6.9518 19.9257 6.85315 19.1919C6.75159 18.4365 6.75 17.4354 6.75 16V11C6.75 9.56458 6.75159 8.56347 6.85315 7.80812C6.9518 7.07435 7.13225 6.68577 7.40901 6.40901Z" fill="currentColor"/>\
</svg>\
');
/* Override icon for the send button */
--nlux-send-icon: url('data:image/svg+xml, <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><!--!Font Awesome Free 6.6.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free Copyright 2024 Fonticons, Inc.--><path d="M0 256a256 256 0 1 0 512 0A256 256 0 1 0 0 256zM297 385c-9.4 9.4-24.6 9.4-33.9 0s-9.4-24.6 0-33.9l71-71L120 280c-13.3 0-24-10.7-24-24s10.7-24 24-24l214.1 0-71-71c-9.4-9.4-9.4-24.6 0-33.9s24.6-9.4 33.9 0L409 239c9.4 9.4 9.4 24.6 0 33.9L297 385z"/></svg>');
}

@ -0,0 +1,8 @@
import { PersonaOptions, UserPersona } from '@nlux/react';
export const user: UserPersona = {
name: 'Pipper',
avatar: 'assets/img/remix-logo-blue.png'
};
export const assistantAvatar = 'assets/img/remi-prof.webp';

@ -0,0 +1,47 @@
// const demoProxyServerUrl = 'https://solcoder.remixproject.org';
// export const send: StreamSend = async (
// prompt: string,
// observer: StreamingAdapterObserver,
// plugin: any,
// ) => {
// const body = {"data": [prompt, 'solidity_answer', false,2000,0.9,0.8,50]};
// const response = await axios(demoProxyServerUrl, {
// method: 'POST',
// headers: {'Content-Type': 'application/json'},
// data: JSON.stringify(body),
// });
// console.log(plugin);
// const result = await plugin.call('remixAI', 'solidity_answer', prompt);
// if (response.status !== 200) {
// observer.error(new Error('Failed to connect to the server'));
// return;
// }
// if (response.statusText !== "OK") {
// return;
// }
// // Read a stream of server-sent events
// // and feed them to the observer as they are being generated
// // const reader = response.body.getReader();
// // const textDecoder = new TextDecoder();
// // while (true) {
// // const {value, done} = await reader.read();
// // if (done) {
// // break;
// // }
// // const content = textDecoder.decode(value);
// // if (content) {
// // observer.next(content);
// // }
// // }
// observer.next(response.data.data[0]);
// observer.complete();
// };

@ -90,7 +90,7 @@ export const Renderer = ({ message, opt, plugin, context }: RendererProps) => {
try {
const content = await plugin.call('fileManager', 'readFile', editorOptions.errFile)
const message = intl.formatMessage({ id: `${context || 'solidity' }.openaigptMessage` }, { content, messageText })
await plugin.call('remixAI', 'error_explaining', message)
await plugin.call('remixAI' as any, 'chatPipe', 'error_explaining', message)
_paq.push(['trackEvent', 'ai', 'remixAI', 'error_explaining_SolidityError'])
} catch (err) {
console.error('unable to askGtp')

@ -106,7 +106,6 @@ export const TabsUI = (props: TabsUIProps) => {
}
const renderTab = (tab: Tab, index) => {
const classNameImg = 'my-1 mr-1 text-dark ' + tab.iconClass
const classNameTab = 'nav-item nav-link d-flex justify-content-center align-items-center px-2 py-1 tab' + (index === currentIndexRef.current ? ' active' : '')
const invert = props.themeQuality === 'dark' ? 'invert(1)' : 'invert(0)'
@ -251,7 +250,32 @@ export const TabsUI = (props: TabsUIProps) => {
const content = await props.plugin.call('fileManager', 'readFile', path)
if (tabsState.currentExt === 'sol') {
setExplaining(true)
await props.plugin.call('remixAI', 'code_explaining', content)
// if plugin is pinned,
if (await props.plugin.call('pinnedPanel', 'currentFocus') === 'remixAI'){
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}
else {
const profile = {
name: 'remixAI',
displayName: 'Remix AI',
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
"initialize", 'chatPipe', 'ProcessChatRequestBuffer', 'isChatRequestPending'],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
kind: '',
location: 'sidePanel',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
maintainedBy: 'Remix'
}
// await props.plugin.call('sidePanel', 'focus', 'remixAI')
await props.plugin.call('sidePanel', 'pinView', profile)
setTimeout(async () => {
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}, 500)
}
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
}

@ -238,11 +238,11 @@ export const RemixUiTerminal = (props: RemixUiTerminalProps) => {
// TODO: rm gpt or redirect gpt to sol-pgt
} else if (script.trim().startsWith('gpt')) {
call('terminal', 'log',{ type: 'warn', value: `> ${script}` })
await call('remixAI', 'solidity_answer', script)
await call('remixAI', 'solidity_answer', script) // No streaming supported in terminal
_paq.push(['trackEvent', 'ai', 'remixAI', 'askFromTerminal'])
} else if (script.trim().startsWith('sol-gpt')) {
call('terminal', 'log',{ type: 'warn', value: `> ${script}` })
await call('remixAI', 'solidity_answer', script)
await call('remixAI', 'solidity_answer', script) // No streaming supported in terminal
_paq.push(['trackEvent', 'ai', 'remixAI', 'askFromTerminal'])
} else {
await call('scriptRunner', 'execute', script)

@ -106,6 +106,10 @@
"@isomorphic-git/lightning-fs": "^4.4.1",
"@metamask/eth-sig-util": "^7.0.2",
"@microlink/react-json-view": "^1.23.0",
"@nlux/core": "^2.17.1",
"@nlux/highlighter": "^2.17.1",
"@nlux/react": "^2.17.1",
"@nlux/themes": "^2.17.1",
"@openzeppelin/contracts": "^5.0.0",
"@openzeppelin/upgrades-core": "^1.30.0",
"@openzeppelin/wizard": "0.4.0",

@ -5311,6 +5311,31 @@
pathval "1.1.1"
type-detect "4.0.8"
"@nlux/core@2.17.1", "@nlux/core@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/core/-/core-2.17.1.tgz#18a95e21e5aafae83bf6d515651780497f0f39cc"
integrity sha512-hIvOnuENVqWaIg5Co4JtFmHph7Sp0Nj+QixOMdOW9Ou7CjU7HK+maB5koLoayNL64B+wHTtgPN7zBrB8NCSPXw==
"@nlux/highlighter@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/highlighter/-/highlighter-2.17.1.tgz#e4d0f43b5afeff2631bc118b1cc6db80afb7e99f"
integrity sha512-/ETnJPbNJWY8ZQH6XAQ5zooEMPsy44Lk2tIxMfr5Ca7+0ICpkMP0mppOmAoKCQBNVsqmKe0oczFBk8blddNDaA==
dependencies:
"@nlux/core" "2.17.1"
highlight.js "^11"
"@nlux/react@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/react/-/react-2.17.1.tgz#e4668e7cbe42dd195ea86a02350c8b15cb8f48f0"
integrity sha512-/t6qDAHIefg1vGIthLOtkQxbI4Sh/aL7/eqVuhcoC1w/8NqnvVxwfxR0mkshcIVrKSwHI8Yjav5edZ2yeRBqMw==
dependencies:
"@nlux/core" "2.17.1"
"@nlux/themes@^2.17.1":
version "2.17.1"
resolved "https://registry.yarnpkg.com/@nlux/themes/-/themes-2.17.1.tgz#f991b1b5fcf9595e59d0abeb76f9997876b44784"
integrity sha512-spD3QJBSdkF+q45rQFFsUQcR4pTy3OEjQywEP+yc9dHcuPrxIMb0/W/whwiHn1aePGL758lKQH3E/NRHA4aSAw==
"@noble/curves@1.0.0", "@noble/curves@~1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.0.0.tgz#e40be8c7daf088aaf291887cbc73f43464a92932"
@ -17700,6 +17725,11 @@ hey-listen@^1.0.8:
resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
highlight.js@^11:
version "11.10.0"
resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.10.0.tgz#6e3600dc4b33d6dc23d5bd94fbf72405f5892b92"
integrity sha512-SYVnVFswQER+zu1laSya563s+F8VDGt7o35d4utbamowvUNLLMovFqwCLSocpZTz3MgaSRA1IbqRWZv97dtErQ==
hmac-drbg@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1"

Loading…
Cancel
Save