Merge pull request #5100 from ethereum/desktope2e-remixai

Desktope2e remixai
pull/5211/head^2
Aniket 2 months ago committed by GitHub
commit 299a88ca78
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      .gitignore
  2. 6
      apps/circuit-compiler/src/app/components/container.tsx
  3. 19
      apps/remix-ide/src/app.js
  4. 33
      apps/remix-ide/src/app/plugins/electron/remixAIDesktopPlugin.tsx
  5. 169
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  6. 283
      apps/remix-ide/src/app/plugins/solcoderAI.tsx
  7. 2
      apps/remix-ide/src/app/tabs/locales/en/editor.json
  8. 5
      apps/remix-ide/src/remixAppManager.js
  9. 3
      apps/remix-ide/src/remixEngine.js
  10. 14684
      apps/remixdesktop/package-lock.json
  11. 3
      apps/remixdesktop/src/engine.ts
  12. 525
      apps/remixdesktop/src/lib/InferenceServerManager.ts
  13. 2
      apps/remixdesktop/src/lib/databatcher.ts
  14. 115
      apps/remixdesktop/src/plugins/remixAIDektop.ts
  15. 2
      apps/remixdesktop/src/preload.ts
  16. 6
      apps/remixdesktop/src/utils/config.ts
  17. 3
      apps/remixdesktop/tsconfig.json
  18. 2
      apps/vyper/src/app/utils/remix-client.tsx
  19. 1
      libs/remix-ai-core/.eslintrc
  20. 7
      libs/remix-ai-core/README.md
  21. 5344
      libs/remix-ai-core/package-lock.json
  22. 32
      libs/remix-ai-core/project.json
  23. 29
      libs/remix-ai-core/src/agents/codeExplainAgent.ts
  24. 23
      libs/remix-ai-core/src/agents/completionAgent.ts
  25. 29
      libs/remix-ai-core/src/agents/securityAgent.ts
  26. 0
      libs/remix-ai-core/src/helpers/dowload_model.ts
  27. 55
      libs/remix-ai-core/src/helpers/inferenceServerReleases.ts
  28. 20
      libs/remix-ai-core/src/index.ts
  29. 141
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  30. 21
      libs/remix-ai-core/src/prompts/chat.ts
  31. 18
      libs/remix-ai-core/src/prompts/completionPrompts.ts
  32. 28
      libs/remix-ai-core/src/prompts/promptBuilder.ts
  33. 9
      libs/remix-ai-core/src/types/constants.ts
  34. 81
      libs/remix-ai-core/src/types/models.ts
  35. 87
      libs/remix-ai-core/src/types/types.ts
  36. 10
      libs/remix-ai-core/tsconfig.json
  37. 15
      libs/remix-ai-core/tsconfig.lib.json
  38. 23
      libs/remix-api/src/lib/plugins/remixAIDesktop-api.ts
  39. 21
      libs/remix-api/src/lib/plugins/remixai-api.ts
  40. 1
      libs/remix-api/src/lib/plugins/terminal-api.ts
  41. 5
      libs/remix-api/src/lib/remix-api.ts
  42. 31
      libs/remix-ui/editor/src/lib/providers/completionTimer.ts
  43. 84
      libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
  44. 16
      libs/remix-ui/editor/src/lib/remix-ui-editor.tsx
  45. 1
      libs/remix-ui/remix-ai/src/index.ts
  46. 84
      libs/remix-ui/remix-ai/src/lib/components/Default.tsx
  47. 78
      libs/remix-ui/remix-ai/src/lib/components/ModelSelection.tsx
  48. 15
      libs/remix-ui/remix-ai/src/lib/components/RemixAI.tsx
  49. 167
      libs/remix-ui/remix-ai/src/lib/remix-ai.css
  50. 4
      libs/remix-ui/renderer/src/lib/renderer.tsx
  51. 2
      libs/remix-ui/settings/src/lib/remix-ui-settings.tsx
  52. 6
      libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx
  53. 8
      libs/remix-ui/terminal/src/lib/remix-ui-terminal.tsx
  54. 11
      libs/remix-ui/workspace/src/lib/actions/workspace.ts
  55. 69639
      package-lock.json
  56. 4
      package.json
  57. 7
      tsconfig.paths.json
  58. 71
      yarn.lock

2
.gitignore vendored

@ -66,6 +66,6 @@ apps/remixdesktop/build*/
apps/remix-ide/src/assets/list.json
apps/remix-ide/src/assets/esbuild.wasm
apps/remixdesktop/build*
apps/remixdesktop/reports/
apps/remixdesktop/reports
apps/remixdesktop/logs/
logs

@ -74,7 +74,7 @@ export function Container () {
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('solcoder', 'error_explaining', message)
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
} else {
const message = `
error message: ${error}
@ -82,7 +82,7 @@ export function Container () {
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('solcoder', 'error_explaining', message)
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
}
} else {
const error = report.message
@ -92,7 +92,7 @@ export function Container () {
explain why the error occurred and how to fix it.
`
// @ts-ignore
await circuitApp.plugin.call('solcoder', 'error_explaining', message)
await circuitApp.plugin.call('remixAI', 'error_explaining', message)
}
}

@ -57,6 +57,8 @@ import { xtermPlugin } from './app/plugins/electron/xtermPlugin'
import { ripgrepPlugin } from './app/plugins/electron/ripgrepPlugin'
import { compilerLoaderPlugin, compilerLoaderPluginDesktop } from './app/plugins/electron/compilerLoaderPlugin'
import { appUpdaterPlugin } from './app/plugins/electron/appUpdaterPlugin'
import { remixAIDesktopPlugin } from './app/plugins/electron/remixAIDesktopPlugin'
import { RemixAIPlugin } from './app/plugins/remixAIPlugin'
import { SlitherHandleDesktop } from './app/plugins/electron/slitherPlugin'
import { SlitherHandle } from './app/files/slither-handle'
import { FoundryHandle } from './app/files/foundry-handle'
@ -64,12 +66,9 @@ import { FoundryHandleDesktop } from './app/plugins/electron/foundryPlugin'
import { HardhatHandle } from './app/files/hardhat-handle'
import { HardhatHandleDesktop } from './app/plugins/electron/hardhatPlugin'
import { SolCoder } from './app/plugins/solcoderAI'
import { GitPlugin } from './app/plugins/git'
import { Matomo } from './app/plugins/matomo'
import { TemplatesSelectionPlugin } from './app/plugins/templates-selection/templates-selection-plugin'
const isElectron = require('is-electron')
@ -270,7 +269,7 @@ class AppComponent {
const contractFlattener = new ContractFlattener()
// ----------------- AI --------------------------------------
const solcoder = new SolCoder()
const remixAI = new RemixAIPlugin(isElectron())
// ----------------- import content service ------------------------
const contentImport = new CompilerImports()
@ -393,11 +392,11 @@ class AppComponent {
contractFlattener,
solidityScript,
templates,
solcoder,
git,
pluginStateLogger,
matomo,
templateSelection
templateSelection,
remixAI
])
//---- fs plugin
@ -416,6 +415,8 @@ class AppComponent {
this.engine.register([ripgrep])
const appUpdater = new appUpdaterPlugin()
this.engine.register([appUpdater])
const remixAIDesktop = new remixAIDesktopPlugin()
this.engine.register([remixAIDesktop])
}
const compilerloader = isElectron() ? new compilerLoaderPluginDesktop() : new compilerLoaderPlugin()
@ -547,7 +548,8 @@ class AppComponent {
'fetchAndCompile',
'contentImport',
'gistHandler',
'compilerloader'
'compilerloader',
'remixAI'
])
await this.appManager.activatePlugin(['settings'])
@ -555,7 +557,7 @@ class AppComponent {
await this.appManager.activatePlugin(['solidity-script', 'remix-templates'])
if (isElectron()) {
await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat'])
await this.appManager.activatePlugin(['isogit', 'electronconfig', 'electronTemplates', 'xterm', 'ripgrep', 'appUpdater', 'slither', 'foundry', 'hardhat', 'remixAID'])
}
this.appManager.on(
@ -570,7 +572,6 @@ class AppComponent {
}
)
await this.appManager.activatePlugin(['solidity-script'])
await this.appManager.activatePlugin(['solcoder'])
await this.appManager.activatePlugin(['filePanel'])
// Set workspace after initial activation

@ -0,0 +1,33 @@
import { ElectronPlugin } from '@remixproject/engine-electron'
import { IModel, ModelType, DefaultModels } from '@remix/remix-ai-core';
import axios from 'axios';
import fs from 'fs';
const desktop_profile = {
name: 'remixAID',
displayName: 'RemixAI Desktop',
maintainedBy: 'Remix',
description: 'RemixAI provides AI services to Remix IDE Desktop.',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
icon: 'assets/img/remix-logo-blue.png',
methods: ['initializeModelBackend', 'code_completion', 'code_insertion', 'code_generation', 'code_explaining', 'error_explaining', 'solidity_answer'],
}
export class remixAIDesktopPlugin extends ElectronPlugin {
constructor() {
super(desktop_profile)
}
onActivation(): void {
this.on('remixAI', 'enabled', () => {} )
console.log('remixAIDesktopPlugin activated')
}
}
// class RemixAIPlugin extends ElectronPlugin {
// constructor() {
// super(dek)
// this.methods = ['downloadModel']
// }
// }

@ -0,0 +1,169 @@
import * as packageJson from '../../../../../package.json'
import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab } from '@remix-ui/remix-ai'
import React from 'react';
import { ICompletions, IModel, RemoteInferencer, IRemoteModel } from '@remix/remix-ai-core';
const profile = {
name: 'remixAI',
displayName: 'Remix AI',
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
"initialize"],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
kind: '',
// location: 'sidePanel',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
version: packageJson.version,
maintainedBy: 'Remix'
}
export class RemixAIPlugin extends Plugin {
isOnDesktop:boolean = false
aiIsActivated:boolean = false
readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
constructor(inDesktop:boolean) {
super(profile)
this.isOnDesktop = inDesktop
// user machine dont use ressource for remote inferencing
}
onActivation(): void {
this.initialize(null, null, null, false)
}
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
if (this.isOnDesktop) {
// on desktop use remote inferencer -> false
console.log('initialize on desktop')
const res = await this.call(this.remixDesktopPluginName, 'initializeModelBackend', useRemote, model1, model2)
if (res) {
this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => {
this.call('terminal', 'log', { type: 'log', value: value })
})
this.on(this.remixDesktopPluginName, 'onInference', () => {
this.isInferencing = true
})
this.on(this.remixDesktopPluginName, 'onInferenceDone', () => {
this.isInferencing = false
})
}
} else {
// on browser
this.remoteInferencer = new RemoteInferencer(remoteModel?.apiUrl, remoteModel?.completionUrl)
this.remoteInferencer.event.on('onInference', () => {
this.isInferencing = true
})
this.remoteInferencer.event.on('onInferenceDone', () => {
this.isInferencing = false
})
}
this.aiIsActivated = true
return true
}
async code_generation(prompt: string): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
if (this.isOnDesktop) {
return await this.call(this.remixDesktopPluginName, 'code_generation', prompt)
} else {
return await this.remoteInferencer.code_generation(prompt)
}
}
async code_completion(prompt: string): Promise<any> {
if (this.isOnDesktop) {
return await this.call(this.remixDesktopPluginName, 'code_completion', prompt)
} else {
return await this.remoteInferencer.code_completion(prompt)
}
}
async solidity_answer(prompt: string): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'solidity_answer', prompt)
} else {
result = await this.remoteInferencer.solidity_answer(prompt)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
return result
}
async code_explaining(prompt: string): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'code_explaining', prompt)
} else {
result = await this.remoteInferencer.code_explaining(prompt)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
return result
}
async error_explaining(prompt: string): Promise<any> {
if (this.isInferencing) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI is already busy!" })
return
}
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
let result
if (this.isOnDesktop) {
result = await this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
result = await this.remoteInferencer.error_explaining(prompt)
}
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
// this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
return result
}
async code_insertion(msg_pfx: string, msg_sfx: string): Promise<any> {
if (this.isOnDesktop) {
return await this.call(this.remixDesktopPluginName, 'code_insertion', msg_pfx, msg_sfx)
} else {
return await this.remoteInferencer.code_insertion(msg_pfx, msg_sfx)
}
}
// render() {
// return (
// <RemixAITab plugin={this}></RemixAITab>
// )
// }
}

@ -1,283 +0,0 @@
import { Plugin } from '@remixproject/engine'
export type SuggestOptions = {
max_new_tokens: number,
temperature: number,
do_sample:boolean
top_k: number,
top_p:number,
stream_result:boolean
}
const _paq = (window._paq = window._paq || [])
const profile = {
name: 'solcoder',
displayName: 'solcoder',
description: 'solcoder',
methods: ['code_generation', 'code_completion', "solidity_answer", "code_explaining", "code_insertion", "error_explaining"],
events: [],
maintainedBy: 'Remix',
}
type ChatEntry = [string, string];
enum BackendOPModel{
DeepSeek,
CodeLLama,
Mistral
}
const PromptBuilder = (inst, answr, modelop) => {
if (modelop === BackendOPModel.CodeLLama) return ""
if (modelop === BackendOPModel.DeepSeek) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr
if (modelop === BackendOPModel.Mistral) return ""
}
export class SolCoder extends Plugin {
api_url: string
completion_url: string
solgpt_chat_history:ChatEntry[]
max_history = 7
model_op = BackendOPModel.DeepSeek
constructor() {
super(profile)
this.api_url = "https://solcoder.remixproject.org"
this.completion_url = "https://completion.remixproject.org"
this.solgpt_chat_history = []
}
pushChatHistory(prompt, result){
const chat:ChatEntry = [prompt, result.data[0]]
this.solgpt_chat_history.push(chat)
if (this.solgpt_chat_history.length > this.max_history){this.solgpt_chat_history.shift()}
}
async code_generation(prompt): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
_paq.push(['trackEvent', 'ai', 'solcoder', 'code_generation'])
let result
try {
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}),
})
).json()
if ("error" in result){
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.error })
return result
}
return result.data
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async solidity_answer(prompt): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
_paq.push(['trackEvent', 'ai', 'solcoder', 'answering'])
let result
try {
const main_prompt = this._build_solgpt_promt(prompt)
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[main_prompt, "solidity_answer", false,1000,0.9,0.8,50]}),
})
).json()
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
this.solgpt_chat_history = []
return
} finally {
this.emit("aiInferingDone")
}
if (result) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data[0] })
this.pushChatHistory(prompt, result)
} else if (result.error) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "Error on request" })
}
}
async code_explaining(prompt, context:string=""): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
_paq.push(['trackEvent', 'ai', 'solcoder', 'explaining'])
let result
try {
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}),
})
).json()
if (result) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data[0] })
this.pushChatHistory(prompt, result)
}
return result.data[0]
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async code_completion(prompt, options:SuggestOptions=null): Promise<any> {
this.emit("aiInfering")
_paq.push(['trackEvent', 'ai', 'solcoder', 'code_completion'])
let result
try {
result = await(
await fetch(this.completion_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data": !options? [
prompt, // string in 'context_code' Textbox component
"code_completion",
"", // string in 'comment' Textbox component
false, // boolean in 'stream_result' Checkbox component
30, // number (numeric value between 0 and 2000) in 'max_new_tokens' Slider component
0.9, // number (numeric value between 0.01 and 1) in 'temperature' Slider component
0.90, // number (numeric value between 0 and 1) in 'top_p' Slider component
50, // number (numeric value between 1 and 200) in 'top_k' Slider component
] : [
prompt,
"code_completion",
"",
options.stream_result,
options.max_new_tokens,
options.temperature,
options.top_p,
options.top_k
]}),
})
).json()
if ("error" in result){
return result
}
return result.data
} catch (e) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async code_insertion(msg_pfx, msg_sfx): Promise<any> {
this.emit("aiInfering")
_paq.push(['trackEvent', 'ai', 'solcoder', 'code_insertion'])
let result
try {
result = await(
await fetch(this.completion_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[
msg_pfx, // Text before current cursor line
"code_insertion",
msg_sfx, // Text after current cursor line
1024,
0.5,
0.92,
50
]}),
})
).json()
if ("error" in result){
return result
}
return result.data
} catch (e) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
async error_explaining(prompt): Promise<any> {
this.emit("aiInfering")
this.call('layout', 'maximizeTerminal')
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
_paq.push(['trackEvent', 'ai', 'solcoder', 'explaining'])
let result
try {
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}),
})
).json()
if (result) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data[0] })
this.pushChatHistory(prompt, result)
}
return result.data[0]
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return
} finally {
this.emit("aiInferingDone")
}
}
_build_solgpt_promt(user_promt:string){
if (this.solgpt_chat_history.length === 0){
return user_promt
} else {
let new_promt = ""
for (const [question, answer] of this.solgpt_chat_history) {
new_promt += PromptBuilder(question.split('sol-gpt')[1], answer, this.model_op)
}
// finaly
new_promt = "sol-gpt " + new_promt + PromptBuilder(user_promt.split('sol-gpt')[1], "", this.model_op)
return new_promt
}
}
}

@ -21,7 +21,7 @@
"editor.formatCode": "Format Code",
"editor.generateDocumentation": "Generate documentation for this function",
"editor.generateDocumentation2": "Generate documentation for the function \"{name}\"",
"editor.generateDocumentationByAI": "solidity code: {content}\n Generate the natspec documentation for the function {currentFunction} using the docstring style syntax. Only use docstring supported tags",
"editor.generateDocumentationByAI": "```solidity\n {content}\n```\n You only generate the natspec documentation for the function {currentFunction} using the docstring style syntax. Only use docstring supported tags",
"editor.explainFunction": "Explain this function",
"editor.explainFunctionSol": "Explain this code",
"editor.explainFunction2": "Explain the function \"{name}\"",

@ -72,11 +72,12 @@ let requiredModules = [ // services + layout views + system views
'vyperCompilationDetails',
'contractflattener',
'solidity-script',
'solcoder',
'home',
'doc-viewer',
'doc-gen',
'remix-templates',
'remixAID',
'remixAI',
'solhint',
'dgit',
'pinnedPanel',
@ -165,7 +166,7 @@ export class RemixAppManager extends PluginManager {
this.pluginsDirectory = 'https://raw.githubusercontent.com/ethereum/remix-plugins-directory/master/build/metadata.json'
this.pluginLoader = new PluginLoader()
if (Registry.getInstance().get('platform').api.isDesktop()) {
requiredModules = [...requiredModules, 'fs', 'electronTemplates', 'isogit', 'remix-templates', 'electronconfig', 'xterm', 'compilerloader', 'ripgrep', 'slither']
requiredModules = [...requiredModules, 'fs', 'electronTemplates', 'isogit', 'remix-templates', 'electronconfig', 'xterm', 'compilerloader', 'ripgrep', 'slither', 'remixAID']
}
}

@ -26,7 +26,8 @@ export class RemixEngine extends Engine {
if (name === 'compilerloader') return { queueTimeout: 60000 * 4 }
if (name === 'filePanel') return { queueTimeout: 60000 * 20 }
if (name === 'fileManager') return { queueTimeout: 60000 * 20 }
if (name === 'solcoder') return { queueTimeout: 60000 * 2 }
if (name === 'remixAID') return { queueTimeout: 60000 * 20 }
if (name === 'remixAI') return { queueTimeout: 60000 * 20 }
if (name === 'cookbookdev') return { queueTimeout: 60000 * 3 }
if (name === 'contentImport') return { queueTimeout: 60000 * 3 }

File diff suppressed because it is too large Load Diff

@ -11,6 +11,7 @@ import { RipgrepPlugin } from './plugins/ripgrepPlugin';
import { CompilerLoaderPlugin } from './plugins/compilerLoader';
import { SlitherPlugin } from './plugins/slitherPlugin';
import { AppUpdaterPlugin } from './plugins/appUpdater';
import { RemixAIDesktopPlugin } from './plugins/remixAIDektop';
import { FoundryPlugin } from './plugins/foundryPlugin';
import { HardhatPlugin } from './plugins/hardhatPlugin';
import { isE2E } from './main';
@ -28,6 +29,7 @@ const slitherPlugin = new SlitherPlugin()
const appUpdaterPlugin = new AppUpdaterPlugin()
const foundryPlugin = new FoundryPlugin()
const hardhatPlugin = new HardhatPlugin()
const remixAIDesktopPlugin = new RemixAIDesktopPlugin()
engine.register(appManager)
engine.register(fsPlugin)
@ -41,6 +43,7 @@ engine.register(slitherPlugin)
engine.register(foundryPlugin)
engine.register(appUpdaterPlugin)
engine.register(hardhatPlugin)
engine.register(remixAIDesktopPlugin)
appManager.activatePlugin('electronconfig')
appManager.activatePlugin('fs')

@ -0,0 +1,525 @@
import path, { resolve } from 'path';
const { spawn } = require('child_process'); // eslint-disable-line
import fs from 'fs';
import axios from "axios";
import { EventEmitter } from 'events';
import { ICompletions, IModel, IParams, InsertionParams,
CompletionParams, GenerationParams, ModelType, AIRequestType,
IStreamResponse, ChatHistory, downloadLatestReleaseExecutable,
buildSolgptPromt } from "@remix/remix-ai-core"
import { platform } from 'os';
class ServerStatusTimer {
private intervalId: NodeJS.Timeout | null = null;
public interval: number;
private task: () => void;
constructor(task: () => void, interval: number) {
this.task = task;
this.interval = interval;
}
start(): void {
if (this.intervalId === null) {
this.intervalId = setInterval(() => {
this.task();
}, this.interval);
}
}
stop(): void {
if (this.intervalId !== null) {
clearInterval(this.intervalId);
this.intervalId = null;
}
}
isRunning(): boolean {
return this.intervalId !== null;
}
}
export class InferenceManager implements ICompletions {
isReady: boolean = false
selectedModels: IModel[] = []
event: EventEmitter
modelCacheDir: string = undefined
serverCacheDir: string = undefined
private inferenceProcess: any=null
port = 5501
inferenceURL = 'http://127.0.0.1:' + this.port
private static instance=null
stateTimer: ServerStatusTimer
private constructor(modelDir:string) {
this.event = new EventEmitter()
this.modelCacheDir = path.join(modelDir, 'models')
this.serverCacheDir = path.join(modelDir, 'inferenceServer')
this.stateTimer= new ServerStatusTimer(() => { this._processStatus()}, 20000)
}
static getInstance(modelDir:string){
if (!InferenceManager.instance) {
// check if ther is a process already running
if (!modelDir) {
console.error('model directory is required to create InferenceManager instance')
return null
}
console.log('Creating new InferenceManager instance')
InferenceManager.instance = new InferenceManager(modelDir)
}
return InferenceManager.instance
}
// init the backend with a new model
async init(model:IModel) {
try {
await this._downloadModel(model)
if (model.downloadPath === undefined) {
console.log('Model not downloaded or not found')
return
}
console.log('Model downloaded at', model.downloadPath)
if (this.inferenceProcess === null) await this._startServer()
// check if resources are met before initializing the models
this._handleResources(true)
console.log('Initializing model request', model.modelType)
switch (model.modelType) {
case ModelType.CODE_COMPLETION_INSERTION || ModelType.CODE_COMPLETION:{
console.log('Initializing Completion Model')
const res = await this._makeRequest('init_completion', { model_path: model.downloadPath })
console.log('code completion res is', res?.data?.status)
if (res?.data?.status === "success") {
this.isReady = true
console.log('Completion Model initialized successfully')
} else {
this.isReady = false
console.error('Error initializing the model', res.data?.error)
}
break;
}
case ModelType.GENERAL:{
const res = await this._makeRequest('init', { model_path: model.downloadPath })
if (res.data?.status === "success") {
this.isReady = true
console.log('General Model initialized successfully')
} else {
this.isReady = false
console.error('Error initializing the model', res.data?.error)
}
break;
}
}
this.stateTimer.start() // double call on init completion and general
this.selectedModels.push(model)
} catch (error) {
console.error('Error initializing the model', error)
this.isReady = false
InferenceManager.instance = null
}
}
async _processStatus() {
// check if the server is running
const options = { headers: { 'Content-Type': 'application/json', } }
const state = await axios.get(this.inferenceURL+"/state", options)
if (!state.data?.status) {
console.log('Inference server not running')
InferenceManager.instance = null
this.stateTimer.interval += this.stateTimer.interval
if (this.stateTimer.interval >= 60000) {
// attempt to restart the server
console.log('Attempting to restart the server')
this.stopInferenceServer()
this._startServer()
this.stateTimer.interval = 20000
}
} else {
// Server is running with successful request
// console.log('Inference server is running')
// console.log('completion is runnig', state.data?.completion)
// console.log('general is runnig', state.data?.general)
}
// this._handleResources()
}
async _handleResources(logger:boolean=false) {
// check resrource usage
const options = { headers: { 'Content-Type': 'application/json', } }
const res = await axios.get(this.inferenceURL+"/sys", options)
if (res.data?.status) {
const max_memory = res.data.memory.total
const used_memory = res.data.memory.used
const memory_usage = res.data.memory.percent * 100
const gpu_available = res.data.gpus
for (const model of this.selectedModels) {
if (model.modelReqs.minSysMemory > max_memory) {
if (logger) console.warn('Insufficient memory for the model')
}
if (model.modelReqs.minSysMemory > used_memory) {
if (logger) console.warn('Insufficient memory for the model')
}
if (model.modelReqs.GPURequired) {
if (gpu_available.length < 1) {
if (logger)console.warn('GPU requiredfor desktop inference but not available')
}
}
}
}
}
async _downloadModel(model:IModel): Promise<string> {
if (this.modelCacheDir === undefined) {
console.log('Model cache directory not provided')
return
} else {
const outputLocationPath = path.join(this.modelCacheDir, model.modelName);
console.log('output location path is', outputLocationPath)
if (fs.existsSync(outputLocationPath)) {
model.downloadPath = outputLocationPath
console.log('Model already exists in the output location', outputLocationPath);
return;
}
console.log('Downloading model from', model.downloadUrl);
// Make a HEAD request to get the file size
const { headers } = await axios.head(model.downloadUrl);
const totalSize = parseInt(headers['content-length'], 10);
// Create a write stream to save the file
const writer = fs.createWriteStream(outputLocationPath);
// Start the file download
const response = await axios({
method: 'get',
url: model.downloadUrl,
responseType: 'stream'
});
let downloadedSize = 0;
response.data.on('data', (chunk: Buffer) => {
downloadedSize += chunk.length;
const progress = (Number((downloadedSize / totalSize) * 100).toFixed(2));
console.log(`Downloaded ${progress}%`);
this.event.emit('download', progress);
});
response.data.pipe(writer);
this.event.emit('ready')
model.downloadPath = outputLocationPath
console.log('LLama Download complete');
return new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
}
}
private async _downloadInferenceServer() {
const execPath = this._getServerPath()
try {
if (fs.existsSync(execPath)) {
console.log('Inference server already downloaded')
return true
} else {
downloadLatestReleaseExecutable(process.platform, this.serverCacheDir)
if (fs.existsSync(execPath)) {return true } else {return false}
}
} catch (error) {
console.error('Error downloading Inference server:', error)
return false
}
}
private _getServerPath() {
// get cpu arch
const arch = process.arch
let exec_suffix = ''
if (arch === 'x64') {
exec_suffix = 'x64'
} else if (arch === 'arm' || arch === 'arm64') {
exec_suffix = 'arm'
} else {
throw new Error('Unsupported CPU architecture')
}
// get platform name and return the path to the python script
let exec_name = ''
if (process.platform === 'win32') {
exec_name = 'InferenceServer-' + process.platform + '.exe'
} else if (process.platform === 'linux') {
exec_name = 'InferenceServer-' + process.platform + '_' + exec_suffix
} else if (process.platform === 'darwin') {
exec_name = 'InferenceServer-' + 'mac'
} else {
throw new Error('Unsupported platform')
}
return path.join(this.serverCacheDir, exec_name);
}
private async _handleExistingServer() {
// check if the server is already running, kill it
try {
const options = { headers: { 'Content-Type': 'application/json', } }
const state = await axios.get(this.inferenceURL+"/state", options)
if (state.data?.status) {
console.log('Found existing Inference server running')
this.stopInferenceServer()
await axios.post(this.inferenceURL+"/kill", options)
}
} catch (error) {
// catch connection refused
console.log('No existing Inference server running')
}
}
private async _startServer() {
const serverAvailable = await this._downloadInferenceServer()
if (!serverAvailable) {
console.error('Inference server not available for this platform')
return
}
// kill existing server if running
this._handleExistingServer()
return new Promise<void>((resolve, reject) => {
let serverPath = ""
try {
serverPath = this._getServerPath();
fs.chmodSync(serverPath, '755')
} catch (error) {
console.error('Error script path:', error);
return reject(error)
}
// Check if the file exists
if (!fs.existsSync(serverPath)) {
return reject(new Error(`Inference server not found at ${serverPath}`));
}
// Check file permissions
try {
fs.accessSync(serverPath, fs.constants.X_OK);
} catch (err) {
reject(new Error(`No execute permission on ${serverPath}`));
}
const spawnArgs = [this.port];
// console.log(`Spawning process: ${serverPath} ${spawnArgs.join(' ')}`);
this.inferenceProcess = spawn(serverPath, spawnArgs);
this.inferenceProcess.stdout.on('data', (data) => {
console.log(`Inference server output: ${data}`);
if (data.includes('Running on http://')) {
console.log('Inference server started successfully');
resolve();
}
});
this.inferenceProcess.stderr.on('data', (data) => {
console.error(`Inference log: ${data}`);
if (data.includes('Address already in use')) {
console.error(`Port ${this.port} is already in use. Please stop the existing server and try again`);
reject(new Error(`Port ${this.port} is already in use`));
}
resolve();
});
this.inferenceProcess.on('error', (err) => {
console.error('Failed to start Inference server:', err);
reject(err);
});
this.inferenceProcess.on('close', (code) => {
console.log(`Inference server process exited with code ${code}`);
if (code !== 0) {
reject(new Error(`Inference server exited with code ${code}`));
}
});
});
}
stopInferenceServer() {
if (this.inferenceProcess) {
this.inferenceProcess.kill();
this.inferenceProcess = null;
}
}
private async _makeInferenceRequest(endpoint, payload, rType:AIRequestType){
try {
this.event.emit('onInference')
const options = { headers: { 'Content-Type': 'application/json', } }
const response = await axios.post(`${this.inferenceURL}/${endpoint}`, payload, options)
const userPrompt = payload[Object.keys(payload)[0]]
this.event.emit('onInferenceDone')
if (response.data?.generatedText) {
if (rType === AIRequestType.GENERAL) {
ChatHistory.pushHistory(userPrompt, response.data.generatedText)
}
return response.data.generatedText
} else { return "" }
} catch (error) {
ChatHistory.clearHistory()
console.error('Error making request to Inference server:', error.message);
}
}
private async _streamInferenceRequest(endpoint, payload){
try {
this.event.emit('onInference')
const options = { headers: { 'Content-Type': 'application/json', } }
const response = await axios({
method: 'post',
url: `${this.inferenceURL}/${endpoint}`,
data: payload,
headers: {
"Content-Type": "application/json",
"Accept": "text/event-stream",
}
, responseType: 'stream' });
const userPrompt = payload[Object.keys(payload)[0]]
let resultText = ""
response.data.on('data', (chunk: Buffer) => {
try {
const parsedData = JSON.parse(chunk.toString());
if (parsedData.isGenerating) {
this.event.emit('onStreamResult', parsedData.generatedText);
resultText = resultText + parsedData.generatedText
} else {
resultText = resultText + parsedData.generatedText
// no additional check for streamed results
ChatHistory.pushHistory(userPrompt, resultText)
return parsedData.generatedText
}
} catch (error) {
ChatHistory.clearHistory()
console.error('Error parsing JSON:', error);
}
});
return "" // return empty string for now as payload is/will be handled in event
} catch (error) {
ChatHistory.clearHistory()
console.error('Error making stream request to Inference server:', error.message);
}
finally {
this.event.emit('onInferenceDone')
}
}
private async _makeRequest(endpoint, payload){
// makes a simple request to the inference server
try {
const options = { headers: { 'Content-Type': 'application/json', } }
const response = await axios.post(`${this.inferenceURL}/${endpoint}`, payload, options)
this.event.emit('onInferenceDone')
return response
} catch (error) {
console.error('Error making request to Inference server:', error.message);
}
}
async code_completion(context: any, params:IParams=CompletionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
// as of now no prompt required
const payload = { context_code: context, ...params }
return this._makeInferenceRequest('code_completion', payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx: string, msg_sfx: string, params:IParams=InsertionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
const payload = { code_pfx:msg_pfx, code_sfx:msg_sfx, ...params }
return this._makeInferenceRequest('code_insertion', payload, AIRequestType.COMPLETION)
}
async code_generation(prompt: string, params:IParams=GenerationParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
return this._makeInferenceRequest('code_generation', { prompt, ...params }, AIRequestType.GENERAL)
}
async code_explaining(code:string, context:string, params:IParams=GenerationParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
if (params.stream_result) {
return this._streamInferenceRequest('code_explaining', { code, context, ...params })
} else {
return this._makeInferenceRequest('code_explaining', { code, context, ...params }, AIRequestType.GENERAL)
}
}
async error_explaining(prompt: string, params:IParams=GenerationParams): Promise<any>{
if (!this.isReady) {
console.log('model not ready yet')
return ""
}
if (params.stream_result) {
return this._streamInferenceRequest('error_explaining', { prompt, ...params })
} else {
return this._makeInferenceRequest('error_explaining', { prompt, ...params }, AIRequestType.GENERAL)
}
}
async solidity_answer(userPrompt: string, params:IParams=GenerationParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
let modelOP = undefined
for (const model of this.selectedModels) {
if (model.modelType === ModelType.GENERAL) {
modelOP = model.modelOP
}
}
const prompt = buildSolgptPromt(userPrompt, modelOP)
if (params.stream_result) {
return this._streamInferenceRequest('solidity_answer', { prompt, ...params })
} else {
return this._makeInferenceRequest('solidity_answer', { prompt, ...params }, AIRequestType.GENERAL)
}
}
}

@ -1,4 +1,4 @@
import {EventEmitter} from 'events';
import { EventEmitter } from 'events';
import { StringDecoder } from 'string_decoder';
// Max duration to batch session data before sending it to the renderer process.
const BATCH_DURATION_MS = 16;

@ -0,0 +1,115 @@
import { ElectronBasePlugin, ElectronBasePluginClient } from "@remixproject/plugin-electron"
import { Profile } from "@remixproject/plugin-utils"
// use remix ai core
import { InferenceManager } from "../lib/InferenceServerManager"
import { cacheDir } from "../utils/config"
import { RemoteInferencer } from "@remix/remix-ai-core"
// import { isE2E } from "../main";
const profile = {
name: 'remixAID',
displayName: 'RemixAI Desktop',
maintainedBy: 'Remix',
description: 'RemixAI provides AI services to Remix IDE Desktop.',
kind: '',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
}
export class RemixAIDesktopPlugin extends ElectronBasePlugin {
clients: RemixAIDesktopPluginClient[] = []
constructor() {
super(profile, clientProfile, RemixAIDesktopPluginClient)
this.methods = [...super.methods]
}
}
const clientProfile: Profile = {
name: 'remixAID',
displayName: 'RemixAI Desktop',
maintainedBy: 'Remix',
description: 'RemixAI provides AI services to Remix IDE Desktop.',
kind: '',
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
methods: ['initializeModelBackend', 'code_completion', 'code_insertion', 'code_generation', 'code_explaining', 'error_explaining', 'solidity_answer']
}
class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
readonly modelCacheDir: string = cacheDir
desktopInferencer:InferenceManager | RemoteInferencer = null
constructor (webContentsId: number, profile: Profile){
super(webContentsId, profile)
}
async onActivation(): Promise<void> {
this.onload(() => {
})
}
async enable (){
console.log('Remix AI desktop plugin enabled')
this.emit('enabled')
}
async initializeModelBackend(local, generalModel?, completionModel?){
if (!local){
this.desktopInferencer = new RemoteInferencer()
} else if (generalModel || completionModel){
if (!this.desktopInferencer){
this.desktopInferencer = InferenceManager.getInstance(this.modelCacheDir)
if (this.desktopInferencer instanceof InferenceManager && generalModel) await this.desktopInferencer.init(generalModel)
if (this.desktopInferencer instanceof InferenceManager && completionModel) await this.desktopInferencer.init(completionModel)
} else {
return false // do not set event listener twice
}
} else {
throw new Error('No model provided')
}
// set event listeners
this.desktopInferencer.event.on('onStreamResult', (data) => {
this.emit('onStreamResult', data)
})
this.desktopInferencer.event.on('onInference', () => {
this.emit('onInference')
})
this.desktopInferencer.event.on('onInferenceDone', () => {
this.emit('onInferenceDone')
})
return true
}
async code_completion(context: any) {
// use general purpose model
return this.desktopInferencer.code_completion(context)
}
async code_insertion(msg_pfx: string, msg_sfx: string) {
return this.desktopInferencer.code_insertion(msg_pfx, msg_sfx)
}
async code_generation(prompt: string) {
return this.desktopInferencer.code_generation(prompt)
}
async code_explaining(code:string, context?:string) {
return this.desktopInferencer.code_explaining(code, context)
}
async error_explaining(prompt: string) {
return this.desktopInferencer.error_explaining(prompt)
}
async solidity_answer(prompt: string) {
return this.desktopInferencer.solidity_answer(prompt)
}
changemodel(newModel: any){
/// dereference the current static inference object
/// set new one
}
}

@ -6,7 +6,7 @@ console.log('preload.ts', new Date().toLocaleTimeString())
/* preload script needs statically defined API for each plugin */
const exposedPLugins = ['fs', 'git', 'xterm', 'isogit', 'electronconfig', 'electronTemplates', 'ripgrep', 'compilerloader', 'appUpdater', 'slither', 'foundry', 'hardhat']
const exposedPLugins = ['fs', 'git', 'xterm', 'isogit', 'electronconfig', 'electronTemplates', 'ripgrep', 'compilerloader', 'appUpdater', 'slither', 'foundry', 'hardhat', 'remixAID']
let webContentsId: number | undefined

@ -14,6 +14,12 @@ export const createDefaultConfigLocations = async() => {
if (!fs.existsSync(cacheDir + '/compilers')) {
fs.mkdirSync(cacheDir + '/compilers')
}
if (!fs.existsSync(cacheDir + '/models')) {
fs.mkdirSync(cacheDir + '/models')
}
if (!fs.existsSync(cacheDir + '/inferenceServer')) {
fs.mkdirSync(cacheDir + '/inferenceServer')
}
if (!fs.existsSync(cacheDir + '/remixdesktop.json')) {
console.log('create config file')
fs.writeFileSync(cacheDir + '/remixdesktop.json', JSON.stringify({}))

@ -21,6 +21,9 @@
"@remix-git": [
"../../libs/remix-git/"
],
"@remix/remix-ai-core": [
"../../libs/remix-ai-core/src/index"
]
},
"typeRoots": [
"src/**/*.d.ts",

@ -72,7 +72,7 @@ export class RemixClient extends PluginClient<any, CustomRemixApi> {
${message}
can you explain why this error occurred and how to fix it?
`
await this.client.call('solcoder' as any, 'error_explaining', message)
await this.client.call('remixAI' as any, 'error_explaining', message)
} catch (err) {
console.error('unable to askGpt')
console.error(err)

@ -0,0 +1 @@
{ "extends": "../../.eslintrc", "rules": {}, "ignorePatterns": ["!**/*"] }

@ -0,0 +1,7 @@
# remix-ai-core
[![npm version](https://badge.fury.io/js/%40remix-project%2Fremixd.svg)](https://www.npmjs.com/package/@remix-project/remixd)
[![npm](https://img.shields.io/npm/dt/@remix-project/remixd.svg?label=Total%20Downloads&logo=npm)](https://www.npmjs.com/package/@remix-project/remixd)
[![npm](https://img.shields.io/npm/dw/@remix-project/remixd.svg?logo=npm)](https://www.npmjs.com/package/@remix-project/remixd)

File diff suppressed because it is too large Load Diff

@ -0,0 +1,32 @@
{
"name": "remix-ai-core",
"$schema": "../../node_modules/nx/schemas/project-schema.json",
"sourceRoot": "libs/remix-ai-core/src",
"projectType": "library",
"implicitDependencies": [
],
"targets": {
"build": {
"executor": "@nrwl/js:tsc",
"outputs": ["{options.outputPath}"],
"options": {
"outputPath": "dist/libs/remix-ai-core",
"main": "libs/remix-ai-core/src/index.ts",
"tsConfig": "libs/remix-ai-core/tsconfig.lib.json",
"updateBuildableProjectDepsInPackageJson": false,
"assets": [
"libs/remix-ai-core/*.md"
]
}
},
"lint": {
"executor": "@nrwl/linter:eslint",
"outputs": ["{options.outputFile}"],
"options": {
"lintFilePatterns": ["libs/remix-ai-core/**/*.ts"],
"eslintConfig": "libs/remix-ai-core/.eslintrc"
}
}
},
"tags": []
}

@ -0,0 +1,29 @@
// interactive code explaining and highlight security vunerabilities
import * as fs from 'fs';
class CodeExplainAgent {
private codebase: string[]; // list of code base file
public currentFile: string;
constructor(codebasePath: string) {
// git or fs
this.codebase = this.loadCodebase(codebasePath);
}
private loadCodebase(path: string): string[] {
const files = fs.readdirSync(path);
return files
.filter(file => file.endsWith('.ts'))
.flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n'));
}
public update(currentFile, lineNumber){
}
public getExplanations(currentLine: string, numSuggestions: number = 3): string[] {
// process the code base explaining the current file and highlight some details
const suggestions: string[] = [];
return suggestions;
}
}

@ -0,0 +1,23 @@
import * as fs from 'fs';
class CodeCompletionAgent {
private codebase: string[];
constructor(codebasePath: string) {
// git or fs
this.codebase = this.loadCodebase(codebasePath);
}
private loadCodebase(path: string): string[] {
const files = fs.readdirSync(path);
return files
.filter(file => file.endsWith('.ts'))
.flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n'));
}
public getSuggestions(currentLine: string, numSuggestions: number = 3): string[] {
const suggestions: string[] = [];
// get `numSuggestions` from the llm
return suggestions;
}
}

@ -0,0 +1,29 @@
// security checks
import * as fs from 'fs';
class SecurityAgent {
private codebase: string[]; // list of code base file
public currentFile: string;
constructor(codebasePath: string) {
// git or fs
this.codebase = this.loadCodebase(codebasePath);
}
private loadCodebase(path: string): string[] {
const files = fs.readdirSync(path);
return files
.filter(file => file.endsWith('.ts'))
.flatMap(file => fs.readFileSync(`${path}/${file}`, 'utf-8').split('\n'));
}
public update(currentFile, lineNumber){
}
public getRecommendations(currentLine: string, numSuggestions: number = 3): string[] {
// process the code base highlighting security vunerabilities and deliver recommendations
const suggestions: string[] = [];
return suggestions;
}
}

@ -0,0 +1,55 @@
import axios from 'axios';
import fs from 'fs';
import path from 'path';
interface Asset {
name: string;
browser_download_url: string;
}
interface Release {
assets: Asset[];
}
const owner = 'remix-project-org'
const repo = 'remix_ai_tools'
async function getLatestRelease(owner: string, repo: string): Promise<Release> {
const url = `https://api.github.com/repos/${owner}/${repo}/releases/latest`;
const response = await axios.get(url);
return response.data;
}
async function downloadFile(url: string, filePath: string): Promise<void> {
const writer = fs.createWriteStream(filePath);
const response = await axios({
url,
method: 'GET',
responseType: 'stream'
});
response.data.pipe(writer);
return new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
}
export async function downloadLatestReleaseExecutable(platform: string, outputDir: string): Promise<void> {
try {
const release = await getLatestRelease(owner, repo);
const executables = release.assets.filter(asset =>
asset.name.includes(platform)
);
console.log(`Downloading executables for ${platform}..., ${executables} `);
for (const executable of executables) {
const filePath = path.join(outputDir, executable.name);
console.log(`Downloading ${executable.name}...`);
await downloadFile(executable.browser_download_url, filePath);
console.log(`Downloaded ${executable.name}`);
}
} catch (error) {
console.error('Error downloading executables:', error);
}
}

@ -0,0 +1,20 @@
'use strict'
import { IModel, IModelResponse, IModelRequest, InferenceModel, ICompletions,
IParams, ChatEntry, AIRequestType, IRemoteModel,
RemoteBackendOPModel, IStreamResponse } from './types/types'
import { ModelType } from './types/constants'
import { DefaultModels, InsertionParams, CompletionParams, GenerationParams } from './types/models'
import { getCompletionPrompt, getInsertionPrompt } from './prompts/completionPrompts'
import { buildSolgptPromt, PromptBuilder } from './prompts/promptBuilder'
import { RemoteInferencer } from './inferencers/remote/remoteInference'
import { ChatHistory } from './prompts/chat'
import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases'
export {
IModel, IModelResponse, IModelRequest, InferenceModel,
ModelType, DefaultModels, ICompletions, IParams, IRemoteModel,
getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPromt,
RemoteInferencer, InsertionParams, CompletionParams, GenerationParams,
ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable
}

@ -0,0 +1,141 @@
import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel } from "../../types/types";
import { buildSolgptPromt } from "../../prompts/promptBuilder";
import axios from "axios";
import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat";
const defaultErrorMessage = `Unable to get a response from AI server`
export class RemoteInferencer implements ICompletions {
api_url: string
completion_url: string
max_history = 7
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
constructor(apiUrl?:string, completionUrl?:string) {
this.api_url = apiUrl!==undefined ? apiUrl: "https://solcoder.remixproject.org"
this.completion_url = completionUrl!==undefined ? completionUrl : "https://completion.remixproject.org"
this.event = new EventEmitter()
}
private async _makeRequest(data, rType:AIRequestType){
this.event.emit("onInference")
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
const userPrompt = data.data[0]
try {
const result = await axios(requesURL, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
data: JSON.stringify(data),
})
switch (rType) {
case AIRequestType.COMPLETION:
if (result.statusText === "OK")
return result.data.data[0]
else {
return defaultErrorMessage
}
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
const resultText = result.data.data[0]
ChatHistory.pushHistory(userPrompt, resultText)
return resultText
} else {
return defaultErrorMessage
}
}
} catch (e) {
ChatHistory.clearHistory()
console.error('Error making request to Inference server:', e.message)
return e
}
finally {
this.event.emit("onInferenceDone")
}
}
private async _streamInferenceRequest(data, rType:AIRequestType){
try {
this.event.emit('onInference')
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
const userPrompt = data.data[0]
const response = await axios({
method: 'post',
url: requesURL,
data: data,
headers: { 'Content-Type': 'application/json', "Accept": "text/event-stream" },
responseType: 'stream'
});
let resultText = ""
response.data.on('data', (chunk: Buffer) => {
try {
const parsedData = JSON.parse(chunk.toString());
if (parsedData.isGenerating) {
this.event.emit('onStreamResult', parsedData.generatedText);
resultText = resultText + parsedData.generatedText
} else {
// stream generation is complete
resultText = resultText + parsedData.generatedText
ChatHistory.pushHistory(userPrompt, resultText)
return parsedData.generatedText
}
} catch (error) {
console.error('Error parsing JSON:', error);
ChatHistory.clearHistory()
}
});
return "" // return empty string for now as handled in event
} catch (error) {
ChatHistory.clearHistory()
console.error('Error making stream request to Inference server:', error.message);
}
finally {
this.event.emit('onInferenceDone')
}
}
async code_completion(prompt, options:IParams=null): Promise<any> {
const payload = !options?
{ "data": [prompt, "code_completion", "", false, 30, 0.9, 0.90, 50]} :
{ "data": [prompt, "code_completion", "", options.stream_result,
options.max_new_tokens, options.temperature, options.top_p, options.top_k]
}
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx, msg_sfx): Promise<any> {
const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_generation(prompt): Promise<any> {
const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async solidity_answer(prompt): Promise<any> {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
return this._makeRequest(payload, AIRequestType.GENERAL)
}
async code_explaining(prompt, context:string=""): Promise<any> {
const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
return this._makeRequest(payload, AIRequestType.GENERAL)
}
async error_explaining(prompt): Promise<any> {
const payload = { "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}
return this._makeRequest(payload, AIRequestType.GENERAL)
}
}

@ -0,0 +1,21 @@
import { ChatEntry } from "../types/types"
export abstract class ChatHistory{
private static chatEntries:ChatEntry[] = []
static queuSize:number = 7 // change the queue size wrt the GPU size
public static pushHistory(prompt, result){
const chat:ChatEntry = [prompt, result]
this.chatEntries.push(chat)
if (this.chatEntries.length > this.queuSize){this.chatEntries.shift()}
}
public static getHistory(){
return this.chatEntries
}
public static clearHistory(){
this.chatEntries = []
}
}

@ -0,0 +1,18 @@
import { COMPLETION_SYSTEM_PROMPT } from "../types/constants";
import { IModel } from "../types/types";
export const getInsertionPrompt = (model:IModel, msg_pfx, msg_sfx) => {
if ((model.modelType === 'code_completion_insertion') && (model.modelName.toLocaleLowerCase().includes('deepseek'))){
return `'<|fim▁begin|>' ${msg_pfx} '<|fim▁hole|>' ${msg_sfx} '<|fim▁end|>'`
}
else {
// return error model not supported yet
}
}
export const getCompletionPrompt = (model:IModel, context) => {
if ((model.modelType === 'code_completion') && (model.modelName.toLocaleLowerCase().includes('deepseek'))){
return `{COMPLETION_SYSTEM_PROMPT} \n### Instruction:\n{context}\n ### Response: `
}
}

@ -0,0 +1,28 @@
import { RemoteBackendOPModel } from "../types/types"
import { ChatHistory } from "./chat"
export const PromptBuilder = (inst, answr, modelop) => {
if (modelop === RemoteBackendOPModel.CODELLAMA) return `<|start_header_id|>user<|end_header_id|>${inst}<|eot_id|><|start_header_id|>assistant<|end_header_id|> ${answr}`
if (modelop === RemoteBackendOPModel.DEEPSEEK) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr
if (modelop === RemoteBackendOPModel.MISTRAL) return ""
}
export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel) => {
if (modelOP === undefined) {
console.log('WARNING: modelOP is undefined. Provide a valide model OP for chat history')
return userPrompt
}
if (ChatHistory.getHistory().length === 0){
return userPrompt
} else {
let newPrompt = ""
for (const [question, answer] of ChatHistory.getHistory()) {
if (question.startsWith('sol-gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP)
else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP)
else newPrompt += PromptBuilder(question, answer, modelOP)
}
// finaly
newPrompt = "sol-gpt " + newPrompt + PromptBuilder(userPrompt.split('gpt')[1], "", modelOP)
return newPrompt
}
}

@ -0,0 +1,9 @@
/// constants for modelselection
export enum ModelType {
CODE_COMPLETION = 'code_completion',
GENERAL = 'general',
CODE_COMPLETION_INSERTION = 'code_completion_insertion',
}
export const COMPLETION_SYSTEM_PROMPT = "You are a Solidity AI Assistant that complete user code with provided context. You provide accurate solution and always answer as helpfully as possible, while being safe. You only provide code using this context:\n"

@ -0,0 +1,81 @@
// create a list of supported models
// create a function getModels returning a list of all supported models
// create a function getModel returning a model by its name
import { IModel, IParams, RemoteBackendOPModel } from './types';
import { ModelType } from './constants';
const DefaultModels = (): IModel[] => {
const model1:IModel = {
name: 'DeepSeek',
modelOP: RemoteBackendOPModel.DEEPSEEK,
task: 'text-generation',
modelName: 'deepseek-coder-6.7b-instruct-q4.gguf',
downloadUrl: 'https://drive.usercontent.google.com/download?id=13sz7lnEhpQ6EslABpAKl2HWZdtX3d9Nh&confirm=xxx',
modelType: ModelType.GENERAL,
modelReqs: { backend: 'llamacpp', minSysMemory: 8, GPURequired: false, MinGPUVRAM: 8 }
};
const model2: IModel = {
name: 'DeepSeek',
modelOP: RemoteBackendOPModel.DEEPSEEK,
task: 'text-generation',
modelName: 'deepseek-coder-1.3b-base-q4.gguf',
downloadUrl: 'https://drive.usercontent.google.com/download?id=13UNJuB908kP0pWexrT5n8i2LrhFaWo92&confirm=xxx',
modelType: ModelType.CODE_COMPLETION_INSERTION,
modelReqs: { backend: 'llamacpp', minSysMemory: 2, GPURequired: false, MinGPUVRAM: 2 }
};
const model3: IModel = {
name: 'llaama3.1_8B',
modelOP: RemoteBackendOPModel.CODELLAMA,
task: 'text-generation',
modelName: 'llama3_1_8B-q4_0.gguf',
downloadUrl: 'https://drive.usercontent.google.com/download?id=1I376pl8uORDnUIjfNuqhExK4NCiH3F12&confirm=xxx',
modelType: ModelType.GENERAL,
modelReqs: { backend: 'llamacpp', minSysMemory: 8, GPURequired: false, MinGPUVRAM: 8 }
};
const model4: IModel = {
name: 'llaama3.1_8B_instruct',
modelOP: RemoteBackendOPModel.CODELLAMA,
task: 'text-generation',
modelName: 'llama3_1_8B-q4_0_instruct.gguf',
downloadUrl: 'https://drive.usercontent.google.com/download?id=1P-MEH7cPxaR20v7W1qbOEPBzgiY2RDLx&confirm=xxx',
modelType: ModelType.GENERAL,
modelReqs: { backend: 'llamacpp', minSysMemory: 8, GPURequired: false, MinGPUVRAM: 8 }
};
return [model1, model2, model3, model4];
}
const getModel = async (name: string): Promise<IModel | undefined> => {
return DefaultModels().find(model => model.name === name);
}
const loadModel = async (modelname: string): Promise<void> => {
console.log(`Loading model ${modelname}`);
}
const CompletionParams:IParams = {
temperature: 0.8,
topK: 40,
topP: 0.92,
max_new_tokens: 15,
}
const InsertionParams:IParams = {
temperature: 0.8,
topK: 40,
topP: 0.92,
max_new_tokens: 150,
}
const GenerationParams:IParams = {
temperature: 0.5,
topK: 40,
topP: 0.92,
max_new_tokens: 2000,
stream_result: false,
}
export { DefaultModels, CompletionParams, InsertionParams, GenerationParams }

@ -0,0 +1,87 @@
// model implementation for the model selection component
import exp from 'constants';
import { ModelType } from './constants';
export interface IModelRequirements{
backend: string,
minSysMemory: number,
GPURequired: boolean,
MinGPUVRAM: number,
}
export interface IModel {
name: string;
task: string;
downloadUrl: string;
modelName: string;
modelType: ModelType;
modelReqs: IModelRequirements;
downloadPath?: string;
modelOP?: RemoteBackendOPModel;
}
export interface IRemoteModel {
completionUrl: string;
apiUrl: string;
}
export interface IModelResponse {
output: string;
error: string;
success: boolean;
model: IModel;
}
export interface IStreamResponse {
generatedText: string;
isGenerating: boolean;
}
export interface IModelRequest {
input: string;
model: IModel;
}
export interface InferenceModel {
model: IModel;
location: string;
isRemote: boolean;
}
export interface ICompletions{
code_completion(context, params:IParams): Promise<any>;
code_insertion(msg_pfx, msg_sfx, params:IParams): Promise<any>;
}
export interface IParams {
temperature?: number;
max_new_tokens?: number;
repetition_penalty?: number;
repeatPenalty?:any
no_repeat_ngram_size?: number;
num_beams?: number;
num_return_sequences?: number;
top_k?: number;
top_p?: number;
stream_result?: boolean;
return_full_text?: boolean;
nThreads?: number;
nTokPredict?: number;
topK?: number;
topP?: number;
temp?: number;
}
export enum AIRequestType {
COMPLETION,
GENERAL
}
export type ChatEntry = [string, string];
export enum RemoteBackendOPModel{
DEEPSEEK,
CODELLAMA,
MISTRAL
}

@ -0,0 +1,10 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"types": ["node"],
"module": "commonjs",
"esModuleInterop": true,
"outDir": "./dist",
},
"include": ["**/*.ts"]
}

@ -0,0 +1,15 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "commonjs",
"outDir": "../../dist/out-tsc",
"declaration": true,
"rootDir": "./src",
"types": ["node"]
},
"exclude": [
"**/*.spec.ts",
"test/"
],
"include": ["**/*.ts"]
}

@ -0,0 +1,23 @@
import { IParams } from "@remix/remix-ai-core";
import { StatusEvents } from "@remixproject/plugin-utils";
export interface IRemixAID {
events: {
activated():void,
onInference():void,
onInferenceDone():void,
onStreamResult(streamText: string):void,
} & StatusEvents,
methods: {
code_completion(context: string): Promise<string>
code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>,
code_generation(prompt: string): Promise<string | null>,
code_explaining(code: string, context?: string): Promise<string | null>,
error_explaining(prompt: string): Promise<string | null>,
solidity_answer(prompt: string): Promise<string | null>,
initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise<boolean>,
chatPipe(pipeMessage: string): Promise<void>,
ProcessChatRequestBuffer(params:IParams): Promise<void>,
}
}

@ -0,0 +1,21 @@
import { IModel, IParams, IRemoteModel } from "@remix/remix-ai-core";
import { StatusEvents } from "@remixproject/plugin-utils";
export interface IRemixAI {
events: {
onStreamResult(streamText: string): Promise<void>,
activated(): Promise<void>,
} & StatusEvents,
methods: {
code_completion(context: string): Promise<string>
code_insertion(msg_pfx: string, msg_sfx: string): Promise<string>,
code_generation(prompt: string): Promise<string | null>,
code_explaining(code: string, context?: string): Promise<string | null>,
error_explaining(prompt: string): Promise<string | null>,
solidity_answer(prompt: string): Promise<string | null>,
initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise<void>,
chatPipe(pipeMessage: string): Promise<void>,
ProcessChatRequestBuffer(params:IParams): Promise<void>,
initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean): Promise<void>,
}
}

@ -6,5 +6,6 @@ export interface IExtendedTerminalApi extends ITerminal {
} & StatusEvents
methods: ITerminal['methods'] & {
logHtml(html: string): void
log(message: any): void
}
}

@ -13,6 +13,9 @@ import { ISidePanelApi } from "./plugins/sidePanel-api"
import { IPinnedPanelApi } from "./plugins/pinned-panel-api"
import { ILayoutApi } from "./plugins/layout-api"
import { IMatomoApi } from "./plugins/matomo-api"
import { IRemixAI } from "./plugins/remixai-api"
import { IRemixAID } from "./plugins/remixAIDesktop-api"
export interface ICustomRemixApi extends IRemixApi {
dgitApi: IGitApi
@ -29,6 +32,8 @@ export interface ICustomRemixApi extends IRemixApi {
pinnedPanel: IPinnedPanelApi
layout: ILayoutApi
matomo: IMatomoApi
remixAI: IRemixAI,
remixAID: IRemixAID
}
export declare type CustomRemixApi = Readonly<ICustomRemixApi>

@ -1,31 +0,0 @@
export class CompletionTimer {
private duration: number;
private timerId: NodeJS.Timeout | null = null;
private callback: () => void;
constructor(duration: number, callback: () => void) {
this.duration = duration;
this.callback = callback;
}
start() {
if (this.timerId) {
console.error("Timer is already running.");
return;
}
this.timerId = setTimeout(() => {
this.callback();
this.timerId = null;
}, this.duration);
}
stop() {
if (this.timerId) {
clearTimeout(this.timerId);
this.timerId = null;
} else {
console.error("Timer is not running.");
}
}
}

@ -1,33 +1,42 @@
/* eslint-disable no-control-regex */
import { EditorUIProps, monacoTypes } from '@remix-ui/editor';
import { CompletionTimer } from './completionTimer';
import axios, { AxiosResponse } from 'axios'
import { slice } from 'lodash';
import { activateService } from '@remixproject/plugin-utils';
const _paq = (window._paq = window._paq || [])
const controller = new AbortController();
const { signal } = controller;
const result: string = ''
export class RemixInLineCompletionProvider implements monacoTypes.languages.InlineCompletionsProvider {
props: EditorUIProps
monaco: any
completionEnabled: boolean
task: string
currentCompletion
currentCompletion: any
private lastRequestTime: number = 0;
private readonly minRequestInterval: number = 200;
constructor(props: any, monaco: any) {
this.props = props
this.monaco = monaco
this.completionEnabled = true
this.currentCompletion = {
text: '',
item: [],
task : this.task,
displayed: false,
accepted: false
}
}
async provideInlineCompletions(model: monacoTypes.editor.ITextModel, position: monacoTypes.Position, context: monacoTypes.languages.InlineCompletionContext, token: monacoTypes.CancellationToken): Promise<monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>> {
if (context.selectedSuggestionInfo) {
return;
return { items: []};
}
const currentTime = Date.now();
const timeSinceLastRequest = currentTime - this.lastRequestTime;
if (timeSinceLastRequest < this.minRequestInterval) {
return { items: []}; // dismiss the request
}
this.lastRequestTime = Date.now();
const getTextAtLine = (lineNumber) => {
const lineRange = model.getFullModelRange().setStartPosition(lineNumber, 1).setEndPosition(lineNumber + 1, 1);
return model.getValueInRange(lineRange);
@ -68,18 +77,20 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
const ask = split[split.length - 2].trimStart()
if (split[split.length - 1].trim() === '' && ask.startsWith('///')) {
// use the code generation model, only take max 1000 word as context
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'Solcoder - generating code for following comment: ' + ask.replace('///', '') })
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'RemixAI - generating code for following comment: ' + ask.replace('///', '') })
const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
this.task = 'code_generation'
const data = await this.props.plugin.call('solcoder', 'code_generation', word)
const parsedData = data[0].trimStart() //JSON.parse(data).trimStart()
const parsedData = data.trimStart() //JSON.parse(data).trimStart()
const item: monacoTypes.languages.InlineCompletion = {
insertText: parsedData
};
this.currentCompletion.text = parsedData
this.currentCompletion.item = item
return {
items: [item],
enableForwardStability: true
enableForwardStability: false
}
}
} catch (e) {
@ -93,7 +104,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
word.split('\n').at(-1).trimStart().startsWith('*/') ||
word.split('\n').at(-1).endsWith(';')
){
return; // do not do completion on single and multiline comment
return { items: []}; // do not do completion on single and multiline comment
}
// abort if there is a signal
@ -101,28 +112,22 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
return
}
// abort if the completion is not enabled
if (!this.completionEnabled) {
return
}
if (word.replace(/ +$/, '').endsWith('\n')){
// Code insertion
try {
const output = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
const generatedText = output // no need to clean it. should already be
this.task = 'code_insertion'
const output = await this.props.plugin.call('solcoder', 'code_insertion', word, word_after)
const generatedText = output[0] // no need to clean it. should already be
const item: monacoTypes.languages.InlineCompletion = {
insertText: generatedText
};
this.completionEnabled = false
const handleCompletionTimer = new CompletionTimer(100, () => { this.completionEnabled = true });
handleCompletionTimer.start()
this.currentCompletion.text = generatedText
this.currentCompletion.item = item
return {
items: [item],
enableForwardStability: true
enableForwardStability: false,
}
}
catch (err){
@ -130,12 +135,11 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
}
let result
try {
// Code completion
this.task = 'code_completion'
const output = await this.props.plugin.call('solcoder', 'code_completion', word)
const generatedText = output[0]
const output = await this.props.plugin.call('remixAI', 'code_completion', word)
const generatedText = output
let clean = generatedText
if (generatedText.indexOf('@custom:dev-run-script./') !== -1) {
@ -145,13 +149,10 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
clean = this.process_completion(clean)
const item: monacoTypes.languages.InlineCompletion = {
insertText: clean
insertText: clean,
};
// handle the completion timer by locking suggestions request for 2 seconds
this.completionEnabled = false
const handleCompletionTimer = new CompletionTimer(100, () => { this.completionEnabled = true });
handleCompletionTimer.start()
this.currentCompletion.text = clean
this.currentCompletion.item = item
return {
items: [item],
@ -175,11 +176,14 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
}
handleItemDidShow?(completions: monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>, item: monacoTypes.languages.InlineCompletion, updatedInsertText: string): void {
this.currentCompletion = { 'item':item, 'task':this.task }
_paq.push(['trackEvent', 'ai', 'solcoder', this.task + '_did_show'])
this.currentCompletion.displayed = true
this.currentCompletion.task = this.task
_paq.push(['trackEvent', 'ai', 'remixAI', this.task + '_did_show'])
}
handlePartialAccept?(completions: monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>, item: monacoTypes.languages.InlineCompletion, acceptedCharacters: number): void {
_paq.push(['trackEvent', 'ai', 'solcoder', this.task + '_partial_accept'])
this.currentCompletion.accepted = true
this.currentCompletion.task = this.task
_paq.push(['trackEvent', 'ai', 'remixAI', this.task + '_partial_accept'])
}
freeInlineCompletions(completions: monacoTypes.languages.InlineCompletions<monacoTypes.languages.InlineCompletion>): void {
}

@ -708,8 +708,7 @@ export const EditorUI = (props: EditorUIProps) => {
const changes = e.changes;
// Check if the change matches the current completion
if (changes.some(change => change.text === inlineCompletionProvider.currentCompletion.item.insertText)) {
_paq.push(['trackEvent', 'ai', 'solcoder', inlineCompletionProvider.currentCompletion.task + '_accepted'])
inlineCompletionProvider.currentCompletion = null;
_paq.push(['trackEvent', 'ai', 'remixAI', inlineCompletionProvider.currentCompletion.task + '_accepted'])
}
}
});
@ -777,12 +776,11 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.generateDocumentationByAI' }, { content, currentFunction: currentFunction.current })
const cm = await props.plugin.call('solcoder', 'code_explaining', message)
const cm = await await props.plugin.call('remixAI', 'code_explaining', message)
const natSpecCom = "\n" + extractNatspecComments(cm)
const cln = await props.plugin.call('codeParser', "getLineColumnOfNode", currenFunctionNode)
const range = new monacoRef.current.Range(cln.start.line, cln.start.column, cln.start.line, cln.start.column)
const lines = natSpecCom.split('\n')
const newNatSpecCom = []
@ -813,7 +811,7 @@ export const EditorUI = (props: EditorUIProps) => {
},
]);
_paq.push(['trackEvent', 'ai', 'solcoder', 'generateDocumentation'])
_paq.push(['trackEvent', 'ai', 'remixAI', 'generateDocumentation'])
},
}
@ -831,8 +829,8 @@ export const EditorUI = (props: EditorUIProps) => {
const file = await props.plugin.call('fileManager', 'getCurrentFile')
const content = await props.plugin.call('fileManager', 'readFile', file)
const message = intl.formatMessage({ id: 'editor.explainFunctionByAI' }, { content, currentFunction: currentFunction.current })
await props.plugin.call('solcoder', 'code_explaining', message, content)
_paq.push(['trackEvent', 'ai', 'solcoder', 'explainFunction'])
await props.plugin.call('remixAI', 'code_explaining', message, content)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}
@ -851,8 +849,8 @@ export const EditorUI = (props: EditorUIProps) => {
const content = await props.plugin.call('fileManager', 'readFile', file)
const selectedCode = editor.getModel().getValueInRange(editor.getSelection())
await props.plugin.call('solcoder', 'code_explaining', selectedCode, content)
_paq.push(['trackEvent', 'ai', 'solcoder', 'explainFunction'])
await props.plugin.call('remixAI', 'code_explaining', selectedCode, content)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explainFunction'])
},
}

@ -0,0 +1 @@
export { RemixAITab } from './lib/components/RemixAI'

@ -0,0 +1,84 @@
import React, { useContext, useEffect, useState } from 'react'
import '../remix-ai.css'
import { DefaultModels } from '@remix/remix-ai-core';
export const Default = (props) => {
const [searchText, setSearchText] = useState('');
const [resultText, setResultText] = useState('');
const pluginName = 'remixAI'
const appendText = (newText) => {
setResultText(resultText => resultText + newText);
}
useEffect(() => {
const handleResultReady = async (e) => {
appendText(e);
};
if (props.plugin.isOnDesktop ) {
props.plugin.on(props.plugin.remixDesktopPluginName, 'onStreamResult', (value) => {
handleResultReady(value);
})
}
}, [])
return (
<div>
<div className="remix_ai_plugin_search_container">
<input
type="text"
className="remix_ai_plugin_search-input"
placeholder="Search..."
value={searchText}
onChange={() => console.log('searchText not implememted')}
></input>
<button
className="remix_ai_plugin_search_button text-ai pl-2 pr-0 py-0 d-flex"
onClick={() => console.log('searchText not implememted')}
>
<i
className="fa-solid fa-arrow-right"
style={{ color: 'black' }}
></i>
<span className="position-relative text-ai text-sm pl-1"
style={{ fontSize: "x-small", alignSelf: "end" }}>Search</span>
</button>
<button className="remix_ai_plugin_download_button text-ai pl-2 pr-0 py-0 d-flex"
onClick={async () => {
if (props.plugin.isOnDesktop ) {
await props.plugin.call(pluginName, 'downloadModel', DefaultModels()[3]);
}
}}
> Download Model </button>
</div>
<div className="remix_ai_plugin_find_container_internal">
<textarea
className="remix_ai_plugin_search_result_textbox"
rows={10}
cols={50}
placeholder="Results..."
onChange={(e) => {
console.log('resultText changed', e.target.value)
setResultText(e.target.value)}
}
value={resultText}
readOnly
/>
<button className="remix_ai_plugin_download_button text-ai pl-2 pr-0 py-0 d-flex"
onClick={async () => {
props.plugin.call("remixAI", 'initialize', DefaultModels()[1], DefaultModels()[3]);
}}
> Init Model </button>
</div>
<div className="remix_ai_plugin_find-part">
<a href="#" className="remix_ai_plugin_search_result_item_title">/fix the problems in my code</a>
<a href="#" className="remix_ai_plugin_search_result_item_title">/tests add unit tests for my code</a>
<a href="#" className="remix_ai_plugin_search_result_item_title">/explain how the selected code works</a>
</div>
</div>
);
}

@ -0,0 +1,78 @@
// UI interface for selecting a model from a list of models
// This component is used in the ModelSelectionModal component
// It is a dropdown list of models that the user can select from
// The user can also search for a specific model by typing in the search bar
// The user can also filter the models by type
// The user can select a model from the dropdown list
// the panel controlling the model selection can be hidden or shown
// Once selected, the model is either loaded from the local storage or downloaded
// the remix ai desktop plugin provided the interface for storing the model in the local storage after downloading
import React, { useState, useEffect } from 'react';
import { Select, Input, Button, Icon } from 'antd';
import { IModel } from '@remix/remix-ai-core';
import { DefaultModels } from '@remix/remix-ai-core';
import { ModelType } from '@remix/remix-ai-core';
import { useTranslation } from 'react-i18next';
const { Option } = Select;
const { Search } = Input;
interface ModelSelectionProps {
onSelect: (model: IModel) => void;
}
export const ModelSelection: React.FC<ModelSelectionProps> = ({ onSelect }) => {
const { t } = useTranslation();
const [models, setModels] = useState<IModel[]>([]);
const [filteredModels, setFilteredModels] = useState<IModel[]>([]);
const [search, setSearch] = useState<string>('');
const [type, setType] = useState<ModelType | 'all'>('all');
useEffect(() => {
setModels(DefaultModels());
}, []);
useEffect(() => {
setFilteredModels(models.filter((model) => {
return model.name.toLowerCase().includes(search.toLowerCase()) &&
(type === 'all' || model.modelType === type);
}));
}, [models, search, type]);
return (
<div>
<Search
placeholder={t('search_models')}
onChange={(e) => setSearch(e.target.value)}
style={{ width: 200, marginBottom: 10 }}
/>
<Select
defaultValue="all"
style={{ width: 200, marginBottom: 10 }}
onChange={(value) => setType(value)}
>
<Option value="all">{t('all_models')}</Option>
<Option value={ModelType.IMAGE}>{t('image_models')}</Option>
<Option value={ModelType.TEXT}>{t('text_models')}</Option>
<Option value={ModelType.AUDIO}>{t('audio_models')}</Option>
</Select>
<Select
showSearch
style={{ width: 200 }}
placeholder={t('select_model')}
optionFilterProp="children"
onChange={(value) => onSelect(models.find((model) => model.name === value))}
filterOption={(input, option) =>
option.props.children.toLowerCase().indexOf(input.toLowerCase()) >= 0
}
>
{filteredModels.map((model) => (
<Option key={model.name} value={model.name}>
{model.name}
</Option>
))}
</Select>
</div>
);
};

@ -0,0 +1,15 @@
import React, { useContext } from 'react'
import '../remix-ai.css'
import { Default } from './Default'
export const RemixAITab = (props) => {
const plugin = props.plugin
return (
<>
<div id="remixAITab pr-4 px-2 pb-4">
<Default plugin={plugin}></Default>
</div>
</>
)
}

@ -0,0 +1,167 @@
/* Existing CSS */
.remix_ai_plugin_search_result_item_title {
display: flex;
-webkit-user-select: none; /* Safari */
-moz-user-select: none; /* Firefox */
-ms-user-select: none; /* IE10+/Edge */
user-select: none; /* Standard */
cursor: pointer;
align-items: center;
color: #58a6ff;
text-decoration: none;
font-size: 1.2em;
margin: 10px 0;
}
.remix_ai_plugin_search_result_item_title:hover {
text-decoration: underline;
}
.remix_ai_plugin_wrap_summary {
overflow: hidden;
white-space: nowrap;
-webkit-user-select: none; /* Safari */
-moz-user-select: none; /* Firefox */
-ms-user-select: none; /* IE10+/Edge */
user-select: none; /* Standard */
cursor: pointer;
}
.remix_ai_plugin_find-part {
display: flex;
flex-direction: column;
padding-top: 5px;
}
.remix_ai_plugin_controls {
display: flex;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_line_container {
display: flex;
flex-direction: row;
position: relative;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_line {
width: 100%;
overflow: hidden;
display: flex;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_control {
flex-grow: 0;
position: absolute;
right: 0px;
top: 0px;
}
.remix_ai_plugin_summary_right {
min-width: 0;
white-space: pre;
text-overflow: ellipsis;
overflow: hidden;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_replace_strike {
text-decoration: line-through;
}
.remix_ai_plugin_summary_left {
white-space: pre;
}
.remix_ai_plugin_search_tab mark {
padding: 0;
white-space: pre;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_line_container .remix_ai_plugin_search_control {
display: none;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_line_container:hover .remix_ai_plugin_search_control {
display: block;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_line_container:hover .remix_ai_plugin_search_line {
width: 93%;
}
.remix_ai_plugin_search-input {
display: flex;
flex-direction: row;
align-items: center;
padding: 10px;
margin: 10px 0;
width: 100%;
max-width: 500px;
border: 1px solid #ccc;
border-radius: 4px;
}
.remix_ai_plugin_search_tab .checked {
background-color: var(--secondary);
}
.remix_ai_plugin_search_tab .remix_ai_plugin_search_file_name {
text-overflow: ellipsis;
overflow: hidden;
text-transform: uppercase;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_result_count {
flex-grow: 1;
text-align: right;
display: flex;
justify-content: flex-end;
}
.remix_ai_plugin_search_tab .remix_ai_plugin_result_count_number {
font-size: x-small;
}
.remix_ai_plugin_search_container {
display: flex;
flex-direction: row;
justify-content: center;
margin-top: 20px;
}
.remix_ai_plugin_search_container_internal {
display: flex;
flex-direction: column;
flex-grow: 1;
margin-top: 20px;
align-items: center;
}
.remix_ai_plugin_search_container_arrow {
display: flex !important;
align-items: center;
cursor: pointer !important;
}
.remix_ai_plugin_wrap_summary_replace {
display: flex;
flex-direction: row;
justify-content: flex-end;
}
.remix_ai_plugin_search_indicator {
white-space: pre;
text-overflow: ellipsis;
overflow: hidden;
}
.remix_ai_plugin_search_result_textbox {
width: 100%;
max-width: 500px;
padding: 10px;
border: 1px solid #ccc;
border-radius: 4px;
resize: none;
margin: 10px 0;
color: #333;
}

@ -75,8 +75,8 @@ export const Renderer = ({ message, opt = {}, plugin }: RendererProps) => {
try {
const content = await plugin.call('fileManager', 'readFile', editorOptions.errFile)
const message = intl.formatMessage({ id: 'solidity.openaigptMessage' }, { content, messageText })
await plugin.call('solcoder', 'error_explaining', message)
_paq.push(['trackEvent', 'ai', 'solcoder', 'error_explaining_SolidityError'])
await plugin.call('remixAI', 'error_explaining', message)
_paq.push(['trackEvent', 'ai', 'remixAI', 'error_explaining_SolidityError'])
} catch (err) {
console.error('unable to askGtp')
console.error(err)

@ -477,7 +477,7 @@ export const RemixUiSettings = (props: RemixUiSettingsProps) => {
role='link'
onClick={()=>{
window.open("https://remix-ide.readthedocs.io/en/latest/ai.html")
_paq.push(['trackEvent', 'ai', 'solcoder', 'documentation'])
_paq.push(['trackEvent', 'ai', 'remixAI', 'documentation'])
}}
>
<i aria-hidden="true" className="fas fa-book"></i>

@ -251,9 +251,9 @@ export const TabsUI = (props: TabsUIProps) => {
const content = await props.plugin.call('fileManager', 'readFile', path)
if (tabsState.currentExt === 'sol') {
setExplaining(true)
await props.plugin.call('solcoder', 'code_explaining', content)
await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'solcoder', 'explain_file'])
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])
}
}}
>
@ -283,7 +283,7 @@ export const TabsUI = (props: TabsUIProps) => {
onClick={async () => {
await props.plugin.call('settings', 'updateCopilotChoice', !ai_switch)
setAI_switch(!ai_switch)
ai_switch ? _paq.push(['trackEvent', 'ai', 'solcoder', 'copilot_enabled']) : _paq.push(['trackEvent', 'ai', 'solcoder', 'copilot_disabled'])
ai_switch ? _paq.push(['trackEvent', 'ai', 'remixAI', 'copilot_enabled']) : _paq.push(['trackEvent', 'ai', 'remixAI', 'copilot_disabled'])
}}
>
<i className={ai_switch ? "fas fa-toggle-on fa-lg" : "fas fa-toggle-off fa-lg"}></i>

@ -238,12 +238,12 @@ export const RemixUiTerminal = (props: RemixUiTerminalProps) => {
// TODO: rm gpt or redirect gpt to sol-pgt
} else if (script.trim().startsWith('gpt')) {
call('terminal', 'log',{ type: 'warn', value: `> ${script}` })
await call('solcoder', 'solidity_answer', script)
_paq.push(['trackEvent', 'ai', 'solcoder', 'askFromTerminal'])
await call('remixAI', 'solidity_answer', script)
_paq.push(['trackEvent', 'ai', 'remixAI', 'askFromTerminal'])
} else if (script.trim().startsWith('sol-gpt')) {
call('terminal', 'log',{ type: 'warn', value: `> ${script}` })
await call('solcoder', 'solidity_answer', script)
_paq.push(['trackEvent', 'ai', 'solcoder', 'askFromTerminal'])
await call('remixAI', 'solidity_answer', script)
_paq.push(['trackEvent', 'ai', 'remixAI', 'askFromTerminal'])
} else {
await call('scriptRunner', 'execute', script)
}

@ -193,6 +193,7 @@ export const createWorkspace = async (
}
}
}
await populateWorkspace(workspaceTemplateName, opts, isEmpty, (err: Error) => { cb && cb(err, workspaceName) }, isGitRepo, createCommit)
// this call needs to be here after the callback because it calls dGitProvider which also calls this function and that would cause an infinite loop
await plugin.setWorkspaces(await getWorkspaces())
@ -545,7 +546,6 @@ export const switchToWorkspace = async (name: string) => {
await plugin.fileProviders.workspace.setWorkspace(name)
await plugin.setWorkspace({ name, isLocalhost: false })
const isGitRepo = await plugin.fileManager.isGitRepo()
dispatch(setMode('browser'))
dispatch(setCurrentWorkspace({ name, isGitRepo }))
dispatch(setReadOnlyMode(false))
@ -703,9 +703,10 @@ export const cloneRepository = async (url: string) => {
dispatch(cloneRepositoryRequest())
promise
.then(async () => {
const isActive = await plugin.call('manager', 'isActive', 'dgit')
if (!isActive) await plugin.call('manager', 'activatePlugin', 'dgit')
if (!plugin.registry.get('platform').api.isDesktop()) {
const isActive = await plugin.call('manager', 'isActive', 'dgit')
if (!isActive) await plugin.call('manager', 'activatePlugin', 'dgit')
}
await fetchWorkspaceDirectory(ROOT_PATH)
const workspacesPath = plugin.fileProviders.workspace.workspacesPath
const branches = await getGitRepoBranches(workspacesPath + '/' + repoName)
@ -790,7 +791,7 @@ export const getGitRepoCurrentBranch = async (workspaceName: string) => {
}
export const showAllBranches = async () => {
if (plugin.registry.get('platform').api.isDesktop()) return
const isActive = await plugin.call('manager', 'isActive', 'dgit')
if (!isActive) await plugin.call('manager', 'activatePlugin', 'dgit')
plugin.call('menuicons', 'select', 'dgit')

69639
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -122,7 +122,6 @@
"@ricarso/react-image-magnifiers": "^1.9.0",
"@types/nightwatch": "^2.3.1",
"@web3modal/ethers5": "^4.0.1",
"@xenova/transformers": "^2.7.0",
"ansi-gray": "^0.1.1",
"assert": "^2.1.0",
"async": "^2.6.2",
@ -170,7 +169,6 @@
"merge": "^2.1.1",
"npm-install-version": "^6.0.2",
"octokit": "^3.1.2",
"openai": "^3.3.0",
"path-browserify": "^1.0.1",
"prettier": "^2.8.4",
"prettier-plugin-solidity": "^1.0.0-beta.24",
@ -278,6 +276,7 @@
"@uniswap/v2-core": "^1.0.1",
"@uniswap/v3-core": "^1.0.1",
"@vercel/webpack-asset-relocator-loader": "^1.7.3",
"@xenova/transformers": "^2.17.2",
"ace-mode-lexon": "^1.*.*",
"ace-mode-move": "0.0.1",
"ace-mode-solidity": "^0.1.0",
@ -351,6 +350,7 @@
"nx": "15.7.1",
"nyc": "^13.3.0",
"onchange": "^3.2.1",
"onnxruntime-web": "^1.18.0",
"os-browserify": "^0.3.0",
"process": "^0.11.10",
"react-refresh": "^0.14.0",

@ -175,6 +175,13 @@
"@remix-ui/xterm": [
"libs/remix-ui/xterm/src/index.ts"
],
"@remix-ui/remix-ai": [
"libs/remix-ui/remix-ai/src/index.ts"
],
"@remix/remix-ai-core": [
"libs/remix-ai-core/src/index.ts"
],
"@remix-ui/git": [
"libs/remix-ui/git/src/index.ts"
],

@ -3980,6 +3980,11 @@
resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861"
integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==
"@huggingface/jinja@^0.2.2":
version "0.2.2"
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
integrity sha512-/KPde26khDUIPkTGU82jdtTW9UAuvUTumCAbFs/7giR0SxsvZC4hru51PBvpijH6BVkHcROcvZM/lpy5h1jRRA==
"@humanwhocodes/config-array@^0.11.10":
version "0.11.10"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.10.tgz#5a3ffe32cc9306365fb3fd572596cd602d5e12d2"
@ -8689,11 +8694,12 @@
dependencies:
tslib "^2.3.0"
"@xenova/transformers@^2.7.0":
version "2.7.0"
resolved "https://registry.yarnpkg.com/@xenova/transformers/-/transformers-2.7.0.tgz#0aabc8700d32ed8e28f6aa61abf8653f62fb1678"
integrity sha512-py5RqZt9lL/FFUT5X6St+TOSBoVaEmDETI98lK9ApEOvlWeX4bTS2nMQDFe3nFMpv24+wllhmPw2Www/f/ubJA==
"@xenova/transformers@^2.17.2":
version "2.17.2"
resolved "https://registry.yarnpkg.com/@xenova/transformers/-/transformers-2.17.2.tgz#7448d73b90f67bced66f39fe2dd656adc891fde5"
integrity sha512-lZmHqzrVIkSvZdKZEx7IYY51TK0WDrC8eR0c5IMnBsO8di8are1zzw8BlLhyO2TklZKLN5UffNGs1IJwT6oOqQ==
dependencies:
"@huggingface/jinja" "^0.2.2"
onnxruntime-web "1.14.0"
sharp "^0.32.0"
optionalDependencies:
@ -9733,13 +9739,6 @@ axios@^0.21.1:
dependencies:
follow-redirects "^1.14.0"
axios@^0.26.0:
version "0.26.1"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
dependencies:
follow-redirects "^1.14.8"
axobject-query@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be"
@ -16076,7 +16075,7 @@ fnjson@^1.3.0:
resolved "https://registry.yarnpkg.com/fnjson/-/fnjson-1.3.0.tgz#a03f7a80cff29c82dee59ac9c77a62e82b876459"
integrity sha1-oD96gM/ynILe5ZrJx3pi6CuHZFk=
follow-redirects@^1.0.0, follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.14.8, follow-redirects@^1.15.0:
follow-redirects@^1.0.0, follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.15.0:
version "1.15.2"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
@ -21164,6 +21163,11 @@ long@^4.0.0:
resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28"
integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==
long@^5.0.0, long@^5.2.3:
version "5.2.3"
resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
longest-streak@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/longest-streak/-/longest-streak-3.1.0.tgz#62fa67cd958742a1574af9f39866364102d90cd4"
@ -24067,6 +24071,11 @@ onnx-proto@^4.0.4:
dependencies:
protobufjs "^6.8.8"
onnxruntime-common@1.19.2:
version "1.19.2"
resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.19.2.tgz#39447d703aef6499f71487cb8970f58752234523"
integrity sha512-a4R7wYEVFbZBlp0BfhpbFWqe4opCor3KM+5Wm22Az3NGDcQMiU2hfG/0MfnBs+1ZrlSGmlgWeMcXQkDk1UFb8Q==
onnxruntime-common@~1.14.0:
version "1.14.0"
resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.14.0.tgz#2bb5dac5261269779aa5fb6536ca379657de8bf6"
@ -24091,6 +24100,18 @@ onnxruntime-web@1.14.0:
onnxruntime-common "~1.14.0"
platform "^1.3.6"
onnxruntime-web@^1.18.0:
version "1.19.2"
resolved "https://registry.yarnpkg.com/onnxruntime-web/-/onnxruntime-web-1.19.2.tgz#b0291df867a9892230f98a6d90b4fc6d29b8c099"
integrity sha512-r0ok6KpTUXR4WA+rHvUiZn7JoH02e8iS7XE1p5bXk7q3E0UaRFfYvpMNUHqEPiTBMuIssfBxDCQjUihV8dDFPg==
dependencies:
flatbuffers "^1.12.0"
guid-typescript "^1.0.9"
long "^5.2.3"
onnxruntime-common "1.19.2"
platform "^1.3.6"
protobufjs "^7.2.4"
open@^8.0.9, open@^8.4.0:
version "8.4.0"
resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8"
@ -24100,14 +24121,6 @@ open@^8.0.9, open@^8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"
openai@^3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-3.3.0.tgz#a6408016ad0945738e1febf43f2fccca83a3f532"
integrity sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==
dependencies:
axios "^0.26.0"
form-data "^4.0.0"
opener@^1.5.1, opener@^1.5.2:
version "1.5.2"
resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598"
@ -25629,6 +25642,24 @@ protobufjs@^6.8.8:
"@types/node" ">=13.7.0"
long "^4.0.0"
protobufjs@^7.2.4:
version "7.4.0"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.4.0.tgz#7efe324ce9b3b61c82aae5de810d287bc08a248a"
integrity sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==
dependencies:
"@protobufjs/aspromise" "^1.1.2"
"@protobufjs/base64" "^1.1.2"
"@protobufjs/codegen" "^2.0.4"
"@protobufjs/eventemitter" "^1.1.0"
"@protobufjs/fetch" "^1.1.0"
"@protobufjs/float" "^1.0.2"
"@protobufjs/inquire" "^1.1.0"
"@protobufjs/path" "^1.1.2"
"@protobufjs/pool" "^1.1.0"
"@protobufjs/utf8" "^1.1.0"
"@types/node" ">=13.7.0"
long "^5.0.0"
protocol-buffers-schema@^3.3.1:
version "3.6.0"
resolved "https://registry.yarnpkg.com/protocol-buffers-schema/-/protocol-buffers-schema-3.6.0.tgz#77bc75a48b2ff142c1ad5b5b90c94cd0fa2efd03"

Loading…
Cancel
Save