mv solcoder to remixAI

pull/5100/head
STetsing 2 months ago
parent 5ee1f2fc9c
commit ee852709a9
  1. 9
      apps/remix-ide/src/app.js
  2. 283
      apps/remix-ide/src/app/plugins/solcoderAI.tsx
  3. 1
      apps/remix-ide/src/remixAppManager.js
  4. 6
      libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
  5. 2
      libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
  6. 15
      libs/remix-ui/remix-ai/src/lib/components/ModelSelection.tsx

@ -263,7 +263,7 @@ class AppComponent {
const contractFlattener = new ContractFlattener()
// ----------------- AI --------------------------------------
const solcoder = new SolCoder()
const remixAI = new RemixAIPlugin(isElectron())
// ----------------- import content service ------------------------
const contentImport = new CompilerImports()
@ -386,11 +386,11 @@ class AppComponent {
contractFlattener,
solidityScript,
templates,
solcoder,
git,
pluginStateLogger,
matomo,
templateSelection
templateSelection,
remixAI
])
//---- fs plugin
@ -412,8 +412,6 @@ class AppComponent {
const remixAIDesktop = new remixAIDesktopPlugin()
this.engine.register([remixAIDesktop])
}
const remixAI = new RemixAIPlugin(isElectron())
this.engine.register([remixAI])
const compilerloader = isElectron() ? new compilerLoaderPluginDesktop() : new compilerLoaderPlugin()
this.engine.register([compilerloader])
@ -567,7 +565,6 @@ class AppComponent {
}
)
await this.appManager.activatePlugin(['solidity-script'])
await this.appManager.activatePlugin(['solcoder'])
await this.appManager.activatePlugin(['filePanel'])
// Set workspace after initial activation

@ -1,283 +0,0 @@
import { Plugin } from '@remixproject/engine'
import axios from 'axios'
export type SuggestOptions = {
max_new_tokens: number,
temperature: number,
do_sample:boolean
top_k: number,
top_p:number,
stream_result:boolean
}
const _paq = (window._paq = window._paq || [])
const profile = {
name: 'solcoder',
displayName: 'solcoder',
description: 'solcoder',
methods: ['code_generation', 'code_completion', "solidity_answer", "code_explaining", "code_insertion", "error_explaining"],
events: [],
maintainedBy: 'Remix',
}
type ChatEntry = [string, string];
enum BackendOPModel{
DeepSeek,
CodeLLama,
Mistral
}
const PromptBuilder = (inst, answr, modelop) => {
if (modelop === BackendOPModel.CodeLLama) return ""
if (modelop === BackendOPModel.DeepSeek) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr
if (modelop === BackendOPModel.Mistral) return ""
}
export class SolCoder extends Plugin {
api_url: string
completion_url: string
solgpt_chat_history:ChatEntry[]
max_history = 7
model_op = BackendOPModel.DeepSeek
constructor() {
super(profile)
this.api_url = "https://solcoder.remixproject.org"
this.completion_url = "https://completion.remixproject.org"
this.solgpt_chat_history = []
}
pushChatHistory(prompt, result){
const chat:ChatEntry = [prompt, result.data[0]]
this.solgpt_chat_history.push(chat)
if (this.solgpt_chat_history.length > this.max_history){this.solgpt_chat_history.shift()}
}
async code_generation(prompt): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
_paq.push(['trackEvent', 'ai', 'solcoder', 'code_generation'])
let result
try {
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}),
})
).json()
if ("error" in result){
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.error })
return result
}
return result.data
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async solidity_answer(prompt): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
_paq.push(['trackEvent', 'ai', 'solcoder', 'answering'])
let result
try {
const main_prompt = this._build_solgpt_promt(prompt)
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[main_prompt, "solidity_answer", false,1000,0.9,0.8,50]}),
})
).json()
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
this.solgpt_chat_history = []
return
} finally {
this.emit("aiInferingDone")
}
if (result) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data[0] })
this.pushChatHistory(prompt, result)
} else if (result.error) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "Error on request" })
}
}
async code_explaining(prompt, context:string=""): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
_paq.push(['trackEvent', 'ai', 'solcoder', 'explaining'])
let result
try {
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}),
})
).json()
if (result) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data[0] })
this.pushChatHistory(prompt, result)
}
return result.data[0]
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async code_completion(prompt, options:SuggestOptions=null): Promise<any> {
this.emit("aiInfering")
_paq.push(['trackEvent', 'ai', 'solcoder', 'code_completion'])
let result
try {
result = await(
await fetch(this.completion_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data": !options? [
prompt, // string in 'context_code' Textbox component
"code_completion",
"", // string in 'comment' Textbox component
false, // boolean in 'stream_result' Checkbox component
30, // number (numeric value between 0 and 2000) in 'max_new_tokens' Slider component
0.9, // number (numeric value between 0.01 and 1) in 'temperature' Slider component
0.90, // number (numeric value between 0 and 1) in 'top_p' Slider component
50, // number (numeric value between 1 and 200) in 'top_k' Slider component
] : [
prompt,
"code_completion",
"",
options.stream_result,
options.max_new_tokens,
options.temperature,
options.top_p,
options.top_k
]}),
})
).json()
if ("error" in result){
return result
}
return result.data
} catch (e) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async code_insertion(msg_pfx, msg_sfx): Promise<any> {
this.emit("aiInfering")
_paq.push(['trackEvent', 'ai', 'solcoder', 'code_insertion'])
let result
try {
result = await(
await fetch(this.completion_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[
msg_pfx, // Text before current cursor line
"code_insertion",
msg_sfx, // Text after current cursor line
1024,
0.5,
0.92,
50
]}),
})
).json()
if ("error" in result){
return result
}
return result.data
} catch (e) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async error_explaining(prompt): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
_paq.push(['trackEvent', 'ai', 'solcoder', 'explaining'])
let result
try {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value:"explaining with axios" })
result = await axios(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
data: JSON.stringify({ "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}),
})
if (result.statusText === "OK") {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data.data[0] })
this.pushChatHistory(prompt, result)
}
return result.data.data[0]
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
_build_solgpt_promt(user_promt:string){
if (this.solgpt_chat_history.length === 0){
return user_promt
} else {
let new_promt = ""
for (const [question, answer] of this.solgpt_chat_history) {
new_promt += PromptBuilder(question.split('sol-gpt')[1], answer, this.model_op)
}
// finaly
new_promt = "sol-gpt " + new_promt + PromptBuilder(user_promt.split('sol-gpt')[1], "", this.model_op)
return new_promt
}
}
}

@ -72,7 +72,6 @@ let requiredModules = [ // services + layout views + system views
'vyperCompilationDetails',
'contractflattener',
'solidity-script',
'solcoder',
'home',
'doc-viewer',
'doc-gen',

@ -77,7 +77,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
const ask = split[split.length - 2].trimStart()
if (split[split.length - 1].trim() === '' && ask.startsWith('///')) {
// use the code generation model, only take max 1000 word as context
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'Solcoder - generating code for following comment: ' + ask.replace('///', '') })
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'RemixAI - generating code for following comment: ' + ask.replace('///', '') })
const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
this.task = 'code_generation'
@ -178,11 +178,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
handleItemDidShow?(completions: monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>, item: monacoTypes.languages.InlineCompletion, updatedInsertText: string): void {
this.currentCompletion.displayed = true
_paq.push(['trackEvent', 'ai', 'solcoder', this.task + '_did_show'])
_paq.push(['trackEvent', 'ai', 'remixAI', this.task + '_did_show'])
}
handlePartialAccept?(completions: monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>, item: monacoTypes.languages.InlineCompletion, acceptedCharacters: number): void {
this.currentCompletion.accepted = true
_paq.push(['trackEvent', 'ai', 'solcoder', this.task + '_partial_accept'])
_paq.push(['trackEvent', 'ai', 'remixAI', this.task + '_partial_accept'])
}
freeInlineCompletions(completions: monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>): void {
}

@ -708,7 +708,7 @@ export const EditorUI = (props: EditorUIProps) => {
const changes = e.changes;
// Check if the change matches the current completion
if (changes.some(change => change.text === inlineCompletionProvider.currentCompletion.item.insertText)) {
_paq.push(['trackEvent', 'ai', 'solcoder', inlineCompletionProvider.currentCompletion.task + '_accepted'])
_paq.push(['trackEvent', 'ai', 'remixAI', inlineCompletionProvider.currentCompletion.task + '_accepted'])
console.log('Accepted completion', inlineCompletionProvider.currentCompletion)
}
}

@ -8,11 +8,10 @@
// Once selected, the model is either loaded from the local storage or downloaded
// the remix ai desktop plugin provided the interface for storing the model in the local storage after downloading
import React, { useState, useEffect } from 'react';
import { Select, Input, Button, Icon } from 'antd';
import { Model } from '@remix/remix-ai-core';
import { getModels } from '../services';
import { IModel } from '@remix/remix-ai-core';
import { DefaultModels } from '@remix/remix-ai-core';
import { ModelType } from '@remix/remix-ai-core';
import { useTranslation } from 'react-i18next';
@ -20,24 +19,24 @@ const { Option } = Select;
const { Search } = Input;
interface ModelSelectionProps {
onSelect: (model: Model) => void;
onSelect: (model: IModel) => void;
}
export const ModelSelection: React.FC<ModelSelectionProps> = ({ onSelect }) => {
const { t } = useTranslation();
const [models, setModels] = useState<Model[]>([]);
const [filteredModels, setFilteredModels] = useState<Model[]>([]);
const [models, setModels] = useState<IModel[]>([]);
const [filteredModels, setFilteredModels] = useState<IModel[]>([]);
const [search, setSearch] = useState<string>('');
const [type, setType] = useState<ModelType | 'all'>('all');
useEffect(() => {
getModels().then(setModels);
setModels(DefaultModels());
}, []);
useEffect(() => {
setFilteredModels(models.filter((model) => {
return model.name.toLowerCase().includes(search.toLowerCase()) &&
(type === 'all' || model.type === type);
(type === 'all' || model.modelType === type);
}));
}, [models, search, type]);

Loading…
Cancel
Save