added inference server for mac

pull/5098/head
Stéphane Tetsing 4 months ago
parent 66cca1f491
commit 09fbfdfa71
  1. 79
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  2. 26
      apps/remix-ide/src/app/plugins/solcoderAI.tsx
  3. 218
      apps/remixdesktop/src/lib/InferenceServerManager.ts
  4. 104
      apps/remixdesktop/src/plugins/remixAIDektop.ts
  5. 10
      libs/remix-ai-core/src/index.ts
  6. 181
      libs/remix-ai-core/src/inferencers/local/completionTransformer.ts
  7. 155
      libs/remix-ai-core/src/inferencers/local/llamaInferencer.ts
  8. 125
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  9. 5
      libs/remix-ai-core/src/prompts/completionPrompts.ts
  10. 7
      libs/remix-ai-core/src/prompts/promptBuilder.ts
  11. 11
      libs/remix-ai-core/src/types/models.ts
  12. 19
      libs/remix-ai-core/src/types/types.ts
  13. 17
      libs/remix-ui/editor/src/lib/providers/inlineCompletionProvider.ts
  14. 29
      libs/remix-ui/remix-ai/src/lib/components/Default.tsx
  15. 4
      package.json
  16. 476
      yarn.lock

@ -3,11 +3,15 @@ import { ViewPlugin } from '@remixproject/engine-web'
import { Plugin } from '@remixproject/engine';
import { RemixAITab } from '@remix-ui/remix-ai'
import React from 'react';
import { ICompletions, IModel, RemoteInferencer } from '@remix/remix-ai-core';
const profile = {
name: 'remixAI',
displayName: 'Remix AI',
methods: [''],
methods: ['code_generation', 'code_completion',
"solidity_answer", "code_explaining",
"code_insertion", "error_explaining",
"initializeRemixAI"],
events: [],
icon: 'assets/img/remix-logo-blue.png',
description: 'RemixAI provides AI services to Remix IDE.',
@ -18,20 +22,89 @@ const profile = {
maintainedBy: 'Remix'
}
export class RemixAIPlugin extends ViewPlugin {
isOnDesktop:boolean = false
aiIsActivated:boolean = false
selectedModel:IModel = null
readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null
constructor(inDesktop:boolean) {
console.log('remixAIPlugin loaded')
super(profile)
this.isOnDesktop = inDesktop
// not user machine ressource for remote inferencing
}
onActivation(): void {
if (this.isOnDesktop) {
console.log('Activating RemixAIPlugin on desktop')
} else {
console.log('Activating RemixAIPlugin on browser')
}
}
async initializeRemixAI(model: IModel) {
this.selectedModel = model
if (this.isOnDesktop) {
this.call(this.remixDesktopPluginName, 'initializeModelBackend', this.selectedModel)
} else {
// on browser
console.log('Initializing RemixAIPlugin on browser')
this.remoteInferencer = new RemoteInferencer(this)
}
this.aiIsActivated = true
return true
}
async code_generation(prompt: string): Promise<any> {
if (this.isOnDesktop) {
return this.call(this.remixDesktopPluginName, 'code_generation', prompt)
} else {
return this.remoteInferencer.code_generation(prompt)
}
}
async code_completion(prompt: string): Promise<any> {
if (this.isOnDesktop) {
return this.call(this.remixDesktopPluginName, 'code_completion', prompt)
} else {
return this.remoteInferencer.code_completion(prompt)
}
}
async solidity_answer(prompt: string): Promise<any> {
if (this.isOnDesktop) {
return this.call(this.remixDesktopPluginName, 'solidity_answer', prompt)
} else {
return this.remoteInferencer.solidity_answer(prompt)
}
}
async code_explaining(prompt: string): Promise<any> {
if (this.isOnDesktop) {
return this.call(this.remixDesktopPluginName, 'code_explaining', prompt)
} else {
return this.remoteInferencer.code_explaining(prompt)
}
}
async error_explaining(prompt: string): Promise<any> {
if (this.isOnDesktop) {
return this.call(this.remixDesktopPluginName, 'error_explaining', prompt)
} else {
return this.remoteInferencer.error_explaining(prompt)
}
}
async code_insertion(msg_pfx: string, msg_sfx: string): Promise<any> {
if (this.isOnDesktop) {
return this.call(this.remixDesktopPluginName, 'code_insertion', msg_pfx, msg_sfx)
} else {
return this.remoteInferencer.code_insertion(msg_pfx, msg_sfx)
}
}

@ -1,4 +1,5 @@
import { Plugin } from '@remixproject/engine'
import axios from 'axios'
export type SuggestOptions = {
max_new_tokens: number,
@ -243,21 +244,20 @@ export class SolCoder extends Plugin {
let result
try {
result = await(
await fetch(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({ "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}),
})
).json()
if (result) {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data[0] })
this.call('terminal', 'log', { type: 'aitypewriterwarning', value:"explaining with axios" })
result = await axios(this.api_url, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
data: JSON.stringify({ "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}),
})
if (result.statusText === "OK") {
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result.data.data[0] })
this.pushChatHistory(prompt, result)
}
return result.data[0]
return result.data.data[0]
} catch (e) {
this.call('terminal', 'log', { type: 'typewritererror', value: `Unable to get a response ${e.message}` })
return

@ -0,0 +1,218 @@
import path from 'path';
const { spawn } = require('child_process'); // eslint-disable-line
import fs from 'fs';
import axios from "axios";
import { EventEmitter } from 'events';
import { ICompletions, IModel, IParams } from "@remix/remix-ai-core";
import { getInsertionPrompt } from "@remix/remix-ai-core";
const completionParams:IParams = {
temperature: 0.8,
topK: 40,
topP: 0.92,
max_new_tokens: 15,
}
const insertionParams:IParams = {
temperature: 0.8,
topK: 40,
topP: 0.92,
max_new_tokens: 150,
}
export class InferenceManager implements ICompletions {
isReady: boolean = false
selectedModel: any
modelPath: string
event: EventEmitter
modelCacheDir: string = undefined
isInferencing: boolean = false
inferenceProcess: any=null
inferenceURL = 'http://127.0.0.1:5501'
static instance=null
private constructor(model:IModel, modelDir:string) {
this.selectedModel = model
this.event = new EventEmitter()
this.modelCacheDir = path.join(modelDir, 'models')
}
static getInstance(model:IModel, modelDir:string){
if (!InferenceManager.instance) {
// check if ther is a process already running
if (!model || !modelDir) {
console.error('Model and model directory is required to create InferenceManager instance')
return null
}
console.log('Creating new InferenceManager instance')
InferenceManager.instance = new InferenceManager(model, modelDir)
}
return InferenceManager.instance
}
async init() {
try {
await this._downloadModel(this.selectedModel)
if (this.modelPath === undefined) {
console.log('Model not downloaded or not found')
return
}
console.log('Model downloaded at', this.modelPath)
this._startServer()
this.isReady = true
} catch (error) {
console.error('Error initializing the model', error)
this.isReady = false
}
}
async _downloadModel(model): Promise<void> {
if (this.modelCacheDir === undefined) {
console.log('Model cache directory not provided')
return
} else {
const outputLocationPath = path.join(this.modelCacheDir, model.modelName);
console.log('output location path is', outputLocationPath)
if (fs.existsSync(outputLocationPath)) {
this.modelPath = outputLocationPath
console.log('Model already exists in the output location', outputLocationPath);
return;
}
// Make a HEAD request to get the file size
const { headers } = await axios.head(model.downloadUrl);
const totalSize = parseInt(headers['content-length'], 10);
// Create a write stream to save the file
const writer = fs.createWriteStream(outputLocationPath);
// Start the file download
const response = await axios({
method: 'get',
url: model.downloadUrl,
responseType: 'stream'
});
let downloadedSize = 0;
response.data.on('data', (chunk: Buffer) => {
downloadedSize += chunk.length;
const progress = (Number((downloadedSize / totalSize) * 100).toFixed(2));
console.log(`Downloaded ${progress}%`);
this.event.emit('download', progress);
});
response.data.pipe(writer);
this.event.emit('ready')
this.modelPath = outputLocationPath
console.log('LLama Download complete');
return new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
}
}
getPythonScriptPath() {
return path.join(process.cwd(), 'dist', 'InferenceServer');
}
private _startServer() {
return new Promise<void>((resolve, reject) => {
const serverPath = this.getPythonScriptPath();
// Check if the file exists
if (!fs.existsSync(serverPath)) {
return reject(new Error(`Python script not found at ${serverPath}`));
}
// Check file permissions
try {
fs.accessSync(serverPath, fs.constants.X_OK);
} catch (err) {
return reject(new Error(`No execute permission on ${serverPath}`));
}
console.log('Running in non-pkg environment');
const spawnArgs = ['5501', this.modelPath];
console.log(`Spawning process: ${serverPath} ${spawnArgs.join(' ')}`);
this.inferenceProcess = spawn(serverPath, spawnArgs);
this.inferenceProcess.stdout.on('data', (data) => {
console.log(`Inference server output: ${data}`);
if (data.includes('Running on http://')) {
console.log('Inference server started successfully');
resolve();
}
});
this.inferenceProcess.stderr.on('data', (data) => {
console.error(`Inference server: ${data}`);
resolve();
});
this.inferenceProcess.on('error', (err) => {
console.error('Failed to start Inference server:', err);
reject(err);
});
this.inferenceProcess.on('close', (code) => {
console.log(`Inference server process exited with code ${code}`);
if (code !== 0) {
reject(new Error(`Inference server exited with code ${code}`));
}
});
});
}
stopInferenceServer() {
if (this.inferenceProcess) {
this.inferenceProcess.kill();
this.inferenceProcess = null;
}
}
private async _makeRequest(endpoint, payload){
try {
this.event.emit('onInference')
const options = { headers: { 'Content-Type': 'application/json', } }
const response = await axios.post(`${this.inferenceURL}/${endpoint}`, payload, options)
this.event.emit('onInferenceDone')
if (response?.data?.generatedText) {
return response.data.generatedText
} else { return "" }
} catch (error) {
console.error('Error making request to Inference server:', error.message);
}
}
async code_completion(context: any, params:IParams=completionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
// as of now no prompt required
const payload = { context_code: context, ...params }
return this._makeRequest('code_completion', payload)
}
async code_insertion(msg_pfx: string, msg_sfx: string, params:IParams=insertionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
const payload = { code_pfx:msg_pfx, code_sfx:msg_sfx, ...params }
return this._makeRequest('code_insertion', payload)
}
}

@ -1,10 +1,9 @@
import { ElectronBasePlugin, ElectronBasePluginClient } from "@remixproject/plugin-electron"
import { Profile } from "@remixproject/plugin-utils"
// import { IModel, ModelType, DefaultModels } from '@remix/remix-ai-core';
//import { pipeline, env } from '@xenova/transformers';
// use remix ai core
import { InlineCompletionServiceTransformer, LLamaInferencer } from '../../../../libs/remix-ai-core/src/index'
import { InferenceManager } from "../lib/InferenceServerManager"
import { cacheDir } from "../utils/config"
// import { isE2E } from "../main";
@ -20,7 +19,7 @@ const profile = {
export class RemixAIDesktopPlugin extends ElectronBasePlugin {
clients: RemixAIDesktopPluginClient[] = []
constructor() {
console.log("loading the remix plugin")
console.log("loading the remix plugin main plugin ")
super(profile, clientProfile, RemixAIDesktopPluginClient)
this.methods = [...super.methods]
@ -42,9 +41,8 @@ const clientProfile: Profile = {
}
class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
multitaskModel: LLamaInferencer| InlineCompletionServiceTransformer = null
completionModel: LLamaInferencer| InlineCompletionServiceTransformer = null
readonly modelCacheDir: string = cacheDir
InferenceModel:InferenceManager = null
constructor (webContentsId: number, profile: Profile){
console.log("loading the remix plugin client ........................")
@ -64,103 +62,25 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
this.emit('enabled')
}
async initializeModelBackend(multitaskModel: any, completionModel?: any){
// console.log("Initializing backend with model ", multitaskModel, completionModel)
// if (completionModel && completionModel.modelType === 'CODE_COMPLETION'){
// switch (completionModel.modelReqs.backend) {
// case 'llamacpp':
// this.completionModel = new LLamaInferencer(completionModel, this.modelCacheDir)
// break;
// case 'transformerjs':
// this.completionModel = new InlineCompletionServiceTransformer(completionModel, this.modelCacheDir)
// break;
// default:
// console.log("Backend not supported")
// break;
// }
// }
console.log("Initializing backend with model ", multitaskModel)
switch (multitaskModel.modelReqs.backend) {
case 'llamacpp':
this.multitaskModel = new LLamaInferencer(multitaskModel, this.modelCacheDir)
break;
case 'transformerjs':
this.multitaskModel = new InlineCompletionServiceTransformer(multitaskModel, this.modelCacheDir)
break;
default:
console.log("Backend not supported")
break;
}
// init the mmodels
if (this.multitaskModel){
await this.multitaskModel.init()
}
if (this.completionModel){
await this.completionModel.init()
async initializeModelBackend(multitaskModel: any){
if (this.InferenceModel === null) {
console.log('Initializing Inference model')
this.InferenceModel = InferenceManager.getInstance(multitaskModel, this.modelCacheDir)
if (!this.InferenceModel.isReady) this.InferenceModel.init()
} else {
console.log('Inference model already initialized')
}
}
async code_completion(context: any) {
console.log("Code completion called")
if (this.completionModel){
return this.completionModel.code_completion(context)
}
// use general purpose model
return this.multitaskModel.code_completion(context)
return this.InferenceModel.code_completion(context)
}
async code_insertion(msg_pfx: string, msg_sfx: string) {
console.log("Code insertion called")
if (this.completionModel){
return this.completionModel.code_insertion(msg_pfx, msg_sfx)
}
// use general purpose model
return this.multitaskModel.code_insertion(msg_pfx, msg_sfx)
return this.InferenceModel.code_insertion(msg_pfx, msg_sfx)
}
// async _loadLocalModel(): Promise<LlamaChatSession> {
// if (!this.SelectedModelPath) {
// console.log('No model selected yet');
// return;
// }
// console.log('Loading model at ', this.SelectedModelPath);
// const model = new LlamaModel(this._getModelOptions());
// const context = new LlamaContext({model});
// const session = new LlamaChatSession({context});
// return session;
// }
// _getModelOptions(): LlamaModelOptions {
// const options: LlamaModelOptions = {
// modelPath: this.SelectedModelPath? this.SelectedModelPath: null,
// contextSize: 1024,
// batchSize: 1,
// gpuLayers: this.selectedModel.modelReqs.GPURequired? -1: 0,
// threads: 1,
// temperature: 0.9,
// topK: 0,
// topP: 1,
// logitsAll: false,
// vocabOnly: false,
// useMmap: false,
// useMlock: false,
// embedding: false,
// };
// return options;
// }
// async getInferenceModel(): Promise<LlamaChatSession> {
// return this._loadLocalModel();
// }
changemodel(newModel: any){
/// dereference the current static inference object
/// set new one

@ -1,15 +1,17 @@
'use strict'
import { IModel, IModelResponse, IModelRequest, InferenceModel, ICompletions, IParams } from './types/types'
import { IModel, IModelResponse, IModelRequest, InferenceModel, ICompletions,
IParams, ChatEntry, AIRequestType, RemoteBackendOPModel } from './types/types'
import { ModelType } from './types/constants'
import { DefaultModels } from './types/models'
import { getCompletionPrompt, getInsertionPrompt } from './prompts/completionPrompts'
import { InlineCompletionServiceTransformer } from './inferencers/local/completionTransformer'
import { LLamaInferencer } from './inferencers/local/llamaInferencer'
import { PromptBuilder } from './prompts/promptBuilder'
import { RemoteInferencer } from './inferencers/remote/remoteInference'
export {
IModel, IModelResponse, IModelRequest, InferenceModel,
ModelType, DefaultModels, ICompletions, IParams,
getCompletionPrompt, getInsertionPrompt,
InlineCompletionServiceTransformer, LLamaInferencer
RemoteInferencer,
ChatEntry, AIRequestType, RemoteBackendOPModel, PromptBuilder
}

@ -1,181 +0,0 @@
import { EventEmitter } from 'events';
import path from 'path';
import { ICompletions, IModel, IParams } from '../../types/types';
import { getInsertionPrompt } from '../../prompts/completionPrompts';
const insertionParams:IParams = {
temperature: 0.9,
max_new_tokens: 1024,
return_full_text: false,
// repetition_penalty: 1.5,
// num_beams: 1,
// num_return_sequences: 1,
}
const completionParams:IParams = {
temperature: 0.3,
max_new_tokens: 15,
return_full_text: false,
top_p: 0.9,
top_k: 50
}
class InlineCompletionTransformer {
static task = null
static model = null
static instance = null;
static defaultModels = null
// getting the instance of the model for the first time will download the model to the cache
static async getInstance(progress_callback = null, modelCacheDir:string) {
if (InlineCompletionTransformer.instance === null) {
const TransformersApi = Function('return import("@xenova/transformers")')();
const { pipeline, env } = await TransformersApi;
if (InlineCompletionTransformer.model.modelReqs.backend !== 'transformerjs') {
console.log('model not supported')
return
}
console.log('loading model', InlineCompletionTransformer.model)
InlineCompletionTransformer.instance = pipeline(InlineCompletionTransformer.task, InlineCompletionTransformer.model.modelName, { progress_callback, quantized: true, cache_dir: modelCacheDir, return_full_text: false });
}
return this.instance;
}
}
class DownloadManager {
// eslint-disable-next-line @typescript-eslint/ban-types
responses: { [key: number]: Function }
events: EventEmitter
current: number
constructor() {
this.events = new EventEmitter()
this.responses = {}
this.current
}
onMessageReceived = (e) => {
switch (e.status) {
case 'initiate':
this.events.emit(e.status, e)
// Model file start load: add a new progress item to the list.
break;
case 'progress':
this.events.emit(e.status, e)
// Model file progress: update one of the progress items.
break;
case 'done':
this.events.emit(e.status, e)
// Model file loaded: remove the progress item from the list.
break;
case 'ready':
this.events.emit(e.status, e)
// Pipeline ready: the worker is ready to accept messages.
break;
case 'update':
this.events.emit(e.status, e)
// Generation update: update the output text.
break;
case 'complete':
this.events.emit(e.status, e)
if (this.responses[e.id]) {
if (this.current === e.id) {
this.responses[e.id](null, e)
} else {
this.responses[e.id]('aborted')
}
delete this.responses[e.id]
this.current = null
}
// Generation complete: re-enable the "Generate" button
break;
}
}
}
export class InlineCompletionServiceTransformer implements ICompletions{
dMng = new DownloadManager()
isReady = false
event = new EventEmitter()
selectedModel: any
inferencer = null
modelCacheDir: string = undefined
constructor(model:IModel, modelDir:string) {
this.selectedModel = model
this.modelCacheDir = path.join(modelDir, 'models')
this.dMng.events.on('progress', (data) => {
// log progress percentage
const loaded = ((Number(data.loaded * 100 / data.total)).toFixed(2)).toString()
console.log('download progress:', loaded + '%')
if (loaded === '100.00') {
this.dMng.events.emit('done', data)
this.isReady = true
}
})
this.dMng.events.on('done', (data) => {
})
this.dMng.events.on('ready', (data) => {
console.log('model ready')
this.isReady = true
})
this.dMng.events.on('complete', (data) => {
})
}
async init() {
InlineCompletionTransformer.model = this.selectedModel
InlineCompletionTransformer.task = InlineCompletionTransformer.model.task
// create inference instance
this.inferencer = await InlineCompletionTransformer.getInstance(this.dMng.onMessageReceived, this.modelCacheDir);
console.log('inference instance created', this)
}
async code_completion(context: any, params:IParams=completionParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
console.log('in transformer code_completion')
// as of now no prompt required
this.event.emit('onInference')
const result = await this.inferencer(context, params)
this.event.emit('onInferenceDone')
console.log('result', result)
return result
}
async code_insertion(msg_pfx: string, msg_sfx: string, params:IParams=insertionParams): Promise<any> {
console.log('in transformer code_insertion')
if (!this.isReady) {
console.log('model not ready yet')
return
}
this.event.emit('onInference')
const prompt = getInsertionPrompt(InlineCompletionTransformer.model, msg_pfx, msg_sfx)
console.log('prompt', prompt)
const result = this.inferencer(prompt, insertionParams)
this.event.emit('onInferenceDone')
console.log('result', result)
return result
}
}
module.exports = {
InlineCompletionServiceTransformer
}

@ -1,155 +0,0 @@
import path from 'path';
import fs from 'fs';
import axios from "axios";
import { EventEmitter } from 'events';
import { ICompletions, IModel, IParams } from '../../types/types';
import { getInsertionPrompt } from '../../prompts/completionPrompts';
class LLamaBackend {
static instance: any
static model: any
static modelPath: string
static async getInstance() {
if (this.instance === null || this.instance === undefined) {
const LlamaApi = Function('return import("node-llama-cpp")')();
const { LlamaModel, LlamaContext, LlamaChatSession, LlamaModelOptions } = await LlamaApi;
const getModelOptions = () => {
const options = {
modelPath: this.modelPath? this.modelPath: null,
threads: 1,
temperature: 0.6,
topK: 40,
topP: 0.92,
logitsAll: false,
vocabOnly: false,
useMmap: false,
useMlock: false,
embedding: false,
};
return options;
}
console.log('loading model with options', getModelOptions())
const m = new LlamaModel(getModelOptions());
console.log("system infos\n", LlamaModel.systemInfo)
const context = new LlamaContext({ model: m });
const session = new LlamaChatSession({ context });
this.instance = session
return this.instance
}
return this.instance
}
}
export class LLamaInferencer implements ICompletions {
isReady: boolean = false
selectedModel: any
modelPath: string
event: EventEmitter
inferencer: any
modelCacheDir: string = undefined
constructor(model:IModel, modelDir:string) {
this.selectedModel = model
this.event = new EventEmitter()
this.modelCacheDir = path.join(modelDir, 'models')
}
async init() {
try {
await this._downloadModel(this.selectedModel)
if (this.modelPath === undefined) {
console.log('Model not downloaded or not found')
return
}
console.log('Model downloaded at', this.modelPath)
LLamaBackend.model = this.selectedModel
LLamaBackend.modelPath = this.modelPath
this.inferencer = await LLamaBackend.getInstance()
this.inferencer.init()
this.isReady = this.inferencer.initialized
} catch (error) {
console.log('Error initializing the model', error)
}
}
async _downloadModel(model): Promise<void> {
if (this.modelCacheDir === undefined) {
console.log('Model cache directory not provided')
return
} else {
const outputLocationPath = path.join(this.modelCacheDir, model.modelName);
console.log('output location path is', outputLocationPath)
if (fs.existsSync(outputLocationPath)) {
this.modelPath = outputLocationPath
console.log('Model already exists in the output location', outputLocationPath);
return;
}
// Make a HEAD request to get the file size
const { headers } = await axios.head(model.downloadUrl);
const totalSize = parseInt(headers['content-length'], 10);
// Create a write stream to save the file
const writer = fs.createWriteStream(outputLocationPath);
// Start the file download
const response = await axios({
method: 'get',
url: model.downloadUrl,
responseType: 'stream'
});
let downloadedSize = 0;
response.data.on('data', (chunk: Buffer) => {
downloadedSize += chunk.length;
const progress = (Number((downloadedSize / totalSize) * 100).toFixed(2));
console.log(`Downloaded ${progress}%`);
this.event.emit('download', progress);
});
response.data.pipe(writer);
this.event.emit('ready')
this.modelPath = outputLocationPath
console.log('LLama Download complete');
return new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
}
}
async code_completion(context: any, params?:IParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
// as of now no prompt required
this.event.emit('onInference')
const result = params? this.inferencer.prompt(context, params): this.inferencer.prompt(context)
this.event.emit('onInferenceDone')
console.log('result', await result)
return result
}
async code_insertion(msg_pfx: string, msg_sfx: string, params?:IParams): Promise<any> {
if (!this.isReady) {
console.log('model not ready yet')
return
}
this.event.emit('onInference')
const prompt = getInsertionPrompt(this.selectedModel, msg_pfx, msg_sfx)
const result = params? this.inferencer.prompt(prompt, params): this.inferencer.prompt(prompt)
this.event.emit('onInferenceDone')
return result
}
}

@ -0,0 +1,125 @@
import { ICompletions, IParams, ChatEntry, AIRequestType, RemoteBackendOPModel } from "../../types/types";
import { PromptBuilder } from "../../prompts/promptBuilder";
import axios from "axios";
const defaultErrorMessage = `Unable to get a response from AI server`
export class RemoteInferencer implements ICompletions {
api_url: string
completion_url: string
solgpt_chat_history:ChatEntry[]
max_history = 7
model_op = RemoteBackendOPModel.DEEPSEEK
mainPlugin = null
constructor(plugin, apiUrl?:string, completionUrl?:string) {
this.api_url = apiUrl!==undefined ? apiUrl: "https://solcoder.remixproject.org"
this.completion_url = completionUrl!==undefined ? completionUrl : "https://completion.remixproject.org"
this.solgpt_chat_history = []
this.mainPlugin = plugin
}
private pushChatHistory(prompt, result){
const chat:ChatEntry = [prompt, result.data[0]]
this.solgpt_chat_history.push(chat)
if (this.solgpt_chat_history.length > this.max_history){this.solgpt_chat_history.shift()}
}
private async _makeRequest(data, rType:AIRequestType){
this.mainPlugin.emit("aiInfering")
const requesURL = rType === AIRequestType.COMPLETION ? this.completion_url : this.api_url
console.log("requesting on ", requesURL, rType, data.data[1])
try {
const result = await axios(requesURL, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
data: JSON.stringify(data),
})
switch (rType) {
case AIRequestType.COMPLETION:
if (result.statusText === "OK")
return result.data.data[0]
else {
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: defaultErrorMessage })
return ""
}
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
const resultText = result.data.data[0]
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: resultText })
this.pushChatHistory(prompt, resultText)
} else {
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: defaultErrorMessage })
}
break
}
} catch (e) {
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: defaultErrorMessage })
this.solgpt_chat_history = []
return ""
}
finally {
this.mainPlugin.emit("aiInferingDone")
}
}
async code_completion(prompt, options:IParams=null): Promise<any> {
const payload = !options?
{ "data": [prompt, "code_completion", "", false, 30, 0.9, 0.90, 50]} :
{ "data": [prompt, "code_completion", "", options.stream_result,
options.max_new_tokens, options.temperature, options.top_p, options.top_k]
}
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_insertion(msg_pfx, msg_sfx): Promise<any> {
const payload = { "data":[msg_pfx, "code_insertion", msg_sfx, 1024, 0.5, 0.92, 50]}
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async code_generation(prompt): Promise<any> {
const payload = { "data":[prompt, "code_completion", "", false,1000,0.9,0.92,50]}
return this._makeRequest(payload, AIRequestType.COMPLETION)
}
async solidity_answer(prompt): Promise<any> {
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
const main_prompt = this._build_solgpt_promt(prompt)
const payload = { "data":[main_prompt, "solidity_answer", false,2000,0.9,0.8,50]}
return this._makeRequest(payload, AIRequestType.GENERAL)
}
async code_explaining(prompt, context:string=""): Promise<any> {
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
const payload = { "data":[prompt, "code_explaining", false,2000,0.9,0.8,50, context]}
return this._makeRequest(payload, AIRequestType.GENERAL)
}
async error_explaining(prompt): Promise<any> {
this.mainPlugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: `\n\nWaiting for RemixAI answer...` })
const payload = { "data":[prompt, "error_explaining", false,2000,0.9,0.8,50]}
return this._makeRequest(payload, AIRequestType.GENERAL)
}
private _build_solgpt_promt(user_promt:string){
if (this.solgpt_chat_history.length === 0){
return user_promt
} else {
let new_promt = ""
for (const [question, answer] of this.solgpt_chat_history) {
new_promt += PromptBuilder(question.split('sol-gpt')[1], answer, this.model_op)
}
// finaly
new_promt = "sol-gpt " + new_promt + PromptBuilder(user_promt.split('sol-gpt')[1], "", this.model_op)
return new_promt
}
}
}

@ -1,10 +1,9 @@
import { COMPLETION_SYSTEM_PROMPT } from "../types/constants";
import { IModel } from "../types/types";
export const getInsertionPrompt = (model:IModel, msg_pfx, msg_sfx) => {
if ((model.modelType === 'code_completion_insertion') && (model.modelName.toLocaleLowerCase().includes('deepseek'))){
return "<|fim▁begin|>" + msg_pfx + "<|fim▁hole|> " + msg_sfx + "<|fim▁end|>"
return `'<|fim▁begin|>' ${msg_pfx} '<|fim▁hole|>' ${msg_sfx} '<|fim▁end|>'`
}
else {
// return error model not supported yet
@ -14,6 +13,6 @@ export const getInsertionPrompt = (model:IModel, msg_pfx, msg_sfx) => {
export const getCompletionPrompt = (model:IModel, context) => {
if ((model.modelType === 'code_completion') && (model.modelName.toLocaleLowerCase().includes('deepseek'))){
return `{COMPLETION_SYSTEM_PROMPT} \n### Instruction:\n{context}\n ### Response: `
return `{COMPLETION_SYSTEM_PROMPT} \n### Instruction:\n{context}\n ### Response: `
}
}

@ -0,0 +1,7 @@
import { RemoteBackendOPModel } from "../types/types"
export const PromptBuilder = (inst, answr, modelop) => {
if (modelop === RemoteBackendOPModel.CODELLAMA) return ""
if (modelop === RemoteBackendOPModel.DEEPSEEK) return "\n### INSTRUCTION:\n" + inst + "\n### RESPONSE:\n" + answr
if (modelop === RemoteBackendOPModel.MISTRAL) return ""
}

@ -38,7 +38,16 @@ const DefaultModels = (): IModel[] => {
modelType: ModelType.CODE_COMPLETION_INSERTION,
modelReqs: { backend: 'llamacpp', minSysMemory: 2, GPURequired: false, MinGPUVRAM: 2 }
};
return [model1, model2, model3, model4];
const model5: IModel = {
name: 'DeepSeek',
task: 'text-generation',
modelName: 'deepseek-coder-6.7B-base-GGUF',
downloadUrl: 'https://huggingface.co/TheBloke/deepseek-coder-6.7B-base-GGUF/resolve/main/deepseek-coder-6.7b-base.Q4_K_M.gguf?download=true',
modelType: ModelType.CODE_COMPLETION_INSERTION,
modelReqs: { backend: 'llamacpp', minSysMemory: 2, GPURequired: false, MinGPUVRAM: 2 }
};
return [model1, model2, model3, model4, model5];
}
const getModel = async (name: string): Promise<IModel | undefined> => {

@ -46,6 +46,7 @@ export interface IParams {
temperature?: number;
max_new_tokens?: number;
repetition_penalty?: number;
repeatPenalty?:any
no_repeat_ngram_size?: number;
num_beams?: number;
num_return_sequences?: number;
@ -53,4 +54,22 @@ export interface IParams {
top_p?: number;
stream_result?: boolean;
return_full_text?: boolean;
nThreads?: number;
nTokPredict?: number;
topK?: number;
topP?: number;
temp?: number;
}
export enum AIRequestType {
COMPLETION,
GENERAL
}
export type ChatEntry = [string, string];
export enum RemoteBackendOPModel{
DEEPSEEK,
CODELLAMA,
MISTRAL
}

@ -61,9 +61,9 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
// use the code generation model, only take max 1000 word as context
this.props.plugin.call('terminal', 'log', { type: 'aitypewriterwarning', value: 'Solcoder - generating code for following comment: ' + ask.replace('///', '') })
const data = await this.props.plugin.call('remixAID', 'code_insertion', word, word_after)
const data = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
const parsedData = data[0].trimStart() //JSON.parse(data).trimStart()
const parsedData = data.trimStart() //JSON.parse(data).trimStart()
const item: monacoTypes.languages.InlineCompletion = {
insertText: parsedData
};
@ -99,15 +99,15 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
if (word.replace(/ +$/, '').endsWith('\n')){
// Code insertion
try {
const output = await this.props.plugin.call('remixAID', 'code_insertion', word, word_after)
const generatedText = output[0].generated_text // no need to clean it. should already be
const output = await this.props.plugin.call('remixAI', 'code_insertion', word, word_after)
const generatedText = output // no need to clean it. should already be
const item: monacoTypes.languages.InlineCompletion = {
insertText: generatedText
};
this.completionEnabled = false
const handleCompletionTimer = new CompletionTimer(5000, () => { this.completionEnabled = true });
const handleCompletionTimer = new CompletionTimer(50, () => { this.completionEnabled = true });
handleCompletionTimer.start()
return {
@ -122,8 +122,9 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
try {
// Code completion
const output = await this.props.plugin.call('remixAID', 'code_completion', word)
const generatedText = output[0].generated_text
const output = await this.props.plugin.call('remixAI', 'code_completion', word)
console.log('code output', output)
const generatedText = output
let clean = generatedText
if (generatedText.indexOf('@custom:dev-run-script./') !== -1) {
@ -138,7 +139,7 @@ export class RemixInLineCompletionProvider implements monacoTypes.languages.Inli
// handle the completion timer by locking suggestions request for 2 seconds
this.completionEnabled = false
const handleCompletionTimer = new CompletionTimer(2000, () => { this.completionEnabled = true });
const handleCompletionTimer = new CompletionTimer(20, () => { this.completionEnabled = true });
handleCompletionTimer.start()
return {

@ -5,7 +5,7 @@ import { DefaultModels } from '@remix/remix-ai-core';
export const Default = (props) => {
const [searchText, setSearchText] = useState('');
const [resultText, setResultText] = useState('');
const pluginName = props.plugin.isOnDesktop ? 'remixAID' : 'remixAI'
const pluginName = 'remixAI'
return (
<div>
@ -33,7 +33,7 @@ export const Default = (props) => {
onClick={async () => {
if (props.plugin.isOnDesktop ) {
await props.plugin.call(pluginName, 'downloadModel', DefaultModels()[0]);
await props.plugin.call(pluginName, 'downloadModel', DefaultModels()[3]);
}
}}
> Download Model </button>
@ -52,24 +52,13 @@ export const Default = (props) => {
<button className="remix_ai_plugin_download_button text-ai pl-2 pr-0 py-0 d-flex"
onClick={async () => {
if (props.plugin.isOnDesktop ) {
// const completer = new InlineCompletionServiceTransformer();
// if (!completer.ready) {
// await completer.init();
// }
await props.plugin.call(pluginName, 'initializeModelBackend', DefaultModels()[3]);
// // const code = completer.code_completion("pragma solidity ^0.8.0;\n")
console.log("Got transformer model completion ");
const result = await props.plugin.call(pluginName, 'code_completion', "pragma solidity ^0.8.0;\n contract Storage");
console.log("Got code completion\n",result);
// const inferenceModel = await props.plugin.call(pluginName, 'getInferenceModel');
// console.log("Got inference model ",inferenceModel);
// const result = await inferenceModel.prompt("What is the meaning of life?");
// console.log("Got result ",result);
}
props.plugin.call(pluginName, 'initializeRemixAI', DefaultModels()[3]);
// if (props.plugin.isOnDesktop ) {
// console.log(Date.now(), "Init model backend");
// props.plugin.call(pluginName, 'initializeModelBackend', DefaultModels()[3]);
// console.log(Date.now(), "after Init model backend");
// console.log("Got transformer model completion ");
// }
}}
> Init Model </button>
</div>

@ -163,8 +163,8 @@
"latest-version": "^5.1.0",
"llama-node": "^0.1.6",
"merge": "^2.1.1",
"npm-install-version": "^6.0.2",
"node-llama-cpp": "^2.8.11",
"npm-install-version": "^6.0.2",
"octokit": "^3.1.2",
"openai": "^3.3.0",
"path-browserify": "^1.0.1",
@ -339,8 +339,8 @@
"npm-run-all": "^4.0.2",
"nx": "15.7.1",
"nyc": "^13.3.0",
"onnxruntime-web": "^1.18.0",
"onchange": "^3.2.1",
"onnxruntime-web": "^1.18.0",
"os-browserify": "^0.3.0",
"process": "^0.11.10",
"react-refresh": "^0.14.0",

@ -2980,6 +2980,11 @@
semiver "^1.1.0"
ws "^8.13.0"
"@huggingface/jinja@^0.2.2":
version "0.2.2"
resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627"
integrity sha512-/KPde26khDUIPkTGU82jdtTW9UAuvUTumCAbFs/7giR0SxsvZC4hru51PBvpijH6BVkHcROcvZM/lpy5h1jRRA==
"@humanwhocodes/config-array@^0.11.10":
version "0.11.10"
resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.10.tgz#5a3ffe32cc9306365fb3fd572596cd602d5e12d2"
@ -4032,6 +4037,29 @@
dependencies:
"@lit-labs/ssr-dom-shim" "^1.1.2"
"@llama-node/cli@0.1.6":
version "0.1.6"
resolved "https://registry.yarnpkg.com/@llama-node/cli/-/cli-0.1.6.tgz#ab743f865ca9442b9c28ad0cbb859059391956e3"
integrity sha512-cTlSHfGgBBTPtieX5bd6SvtY0Xp/IcXTzaCKWoMgCQprnwpe/UyMadW15lOPjByXf/Fxo6O+hH5l/QucxJK6dQ==
dependencies:
"@llama-node/core" "0.1.6"
yargs "^17.7.1"
"@llama-node/core@0.1.6":
version "0.1.6"
resolved "https://registry.yarnpkg.com/@llama-node/core/-/core-0.1.6.tgz#e7f2a5c22bccf5bf5b4d3e64af755e4e6ce06002"
integrity sha512-Mg6lZ0jjOhK2AhvUOPZ4WDb0Rap02gwUbcm4Q4gIV1WsLpB/bGlgxq0z7uXF+3NExAvkcv3/bb1MujrE6re34g==
"@llama-node/llama-cpp@0.1.6":
version "0.1.6"
resolved "https://registry.yarnpkg.com/@llama-node/llama-cpp/-/llama-cpp-0.1.6.tgz#206d515d5b93f9908657039b94f5540b50b689f9"
integrity sha512-AzU2Gc5bX5V74SnO9E17IA5N/Rbd8trd7qkzp/H4zmmz/Chh7MmaiUhAewUt7kPuk3/LahGiDxxAOjiKtMJiNQ==
"@llama-node/rwkv-cpp@0.1.6":
version "0.1.6"
resolved "https://registry.yarnpkg.com/@llama-node/rwkv-cpp/-/rwkv-cpp-0.1.6.tgz#04f459dfe26f548742d6375642ed4126910d99e0"
integrity sha512-kaB01kfwthz/euHE67MWDzrZacFL9CSPsDcQx4ZLJii98HQCiuQ24g6CU8cVFYRm45a/o0qCjR4G/j1TyPIl8g==
"@malept/cross-spawn-promise@^1.1.0":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@malept/cross-spawn-promise/-/cross-spawn-promise-1.1.1.tgz#504af200af6b98e198bce768bc1730c6936ae01d"
@ -5389,6 +5417,59 @@
resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.8.tgz#6b79032e760a0899cd4204710beede972a3a185f"
integrity sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==
"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf"
integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==
"@protobufjs/base64@^1.1.2":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735"
integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==
"@protobufjs/codegen@^2.0.4":
version "2.0.4"
resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb"
integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==
"@protobufjs/eventemitter@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70"
integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==
"@protobufjs/fetch@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45"
integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==
dependencies:
"@protobufjs/aspromise" "^1.1.1"
"@protobufjs/inquire" "^1.1.0"
"@protobufjs/float@^1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1"
integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==
"@protobufjs/inquire@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089"
integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==
"@protobufjs/path@^1.1.2":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d"
integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==
"@protobufjs/pool@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54"
integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==
"@protobufjs/utf8@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570"
integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==
"@redux-saga/core@^1.3.0":
version "1.3.0"
resolved "https://registry.yarnpkg.com/@redux-saga/core/-/core-1.3.0.tgz#2ce08b73d407fc6ea9e7f7d83d2e97d981a3a8b8"
@ -5473,13 +5554,6 @@
"@remixproject/plugin-api" "0.3.42"
"@remixproject/plugin-utils" "0.3.42"
"@remixproject/plugin-api@0.3.33":
version "0.3.33"
resolved "https://registry.yarnpkg.com/@remixproject/plugin-api/-/plugin-api-0.3.33.tgz#29699f980ea00bebf720961cc0e78887e03903ec"
integrity sha512-fBEbRr6/mgQdfNdRqYQL3yewsPfTxV41F509CngbD6YdY5YKBihJhfGFHbd2rKSyXOgBiHIbe0SsV3OXpFdWnw==
dependencies:
"@remixproject/plugin-utils" "0.3.33"
"@remixproject/plugin-api@0.3.42":
version "0.3.42"
resolved "https://registry.yarnpkg.com/@remixproject/plugin-api/-/plugin-api-0.3.42.tgz#c64d8b75a139d4e5cc342861d288d638818a8081"
@ -5497,13 +5571,6 @@
"@remixproject/plugin-api" "0.3.42"
"@remixproject/plugin-utils" "0.3.42"
"@remixproject/plugin-utils@0.3.33":
version "0.3.33"
resolved "https://registry.yarnpkg.com/@remixproject/plugin-utils/-/plugin-utils-0.3.33.tgz#7b697403031598276baaf16bb82d6c62062053fc"
integrity sha512-cAo21ot4/G5BkN8ypDwg8MMCrEmLdXwMd3lQZUeB5enPC3KxmzQz71+OgEYl718Hwy+GtHaLq17FEXCHC5YV9w==
dependencies:
tslib "2.0.1"
"@remixproject/plugin-utils@0.3.42":
version "0.3.42"
resolved "https://registry.yarnpkg.com/@remixproject/plugin-utils/-/plugin-utils-0.3.42.tgz#4ac4b4aaa15e14f1a905236645a4813a63b00c9c"
@ -5521,15 +5588,6 @@
"@remixproject/plugin-utils" "0.3.42"
axios "^0.21.1"
"@remixproject/plugin-ws@0.3.33":
version "0.3.33"
resolved "https://registry.yarnpkg.com/@remixproject/plugin-ws/-/plugin-ws-0.3.33.tgz#98a003e83ffafb5a7a35ca4e8c59d849ecb017cf"
integrity sha512-Zkp8MK8jxnNm3uruu0dF8vqeh90JsLXttJP4LZF0HaStRRK4d2XG6CgE5mBiC2J4uTEwGP26H/vmqi+POBPTEg==
dependencies:
"@remixproject/plugin" "0.3.33"
"@remixproject/plugin-api" "0.3.33"
"@remixproject/plugin-utils" "0.3.33"
"@remixproject/plugin-ws@0.3.42":
version "0.3.42"
resolved "https://registry.yarnpkg.com/@remixproject/plugin-ws/-/plugin-ws-0.3.42.tgz#5c93112445de3bfbaddd3ce04e177d66e2ebd08a"
@ -5539,15 +5597,6 @@
"@remixproject/plugin-api" "0.3.42"
"@remixproject/plugin-utils" "0.3.42"
"@remixproject/plugin@0.3.33":
version "0.3.33"
resolved "https://registry.yarnpkg.com/@remixproject/plugin/-/plugin-0.3.33.tgz#2939cdb6a1231743d7f00c10f5ea47eddd49b602"
integrity sha512-ia6LevsWYPkcRwOBl3umA2fPCgYt2TmB437Pafs9BE6fD9judEvlvEqXjBy9GLBsZzZWSiyYenOnW8HIiwqfMA==
dependencies:
"@remixproject/plugin-api" "0.3.33"
"@remixproject/plugin-utils" "0.3.33"
events "3.2.0"
"@remixproject/plugin@0.3.42":
version "0.3.42"
resolved "https://registry.yarnpkg.com/@remixproject/plugin/-/plugin-0.3.42.tgz#26709eedf53a7fe13717fa909eebebfd757f74bf"
@ -6494,6 +6543,11 @@
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.176.tgz#641150fc1cda36fbfa329de603bbb175d7ee20c0"
integrity sha512-xZmuPTa3rlZoIbtDUyJKZQimJV3bxCmzMIO2c9Pz9afyDro6kr7R79GwcB6mRhuoPmV2p1Vb66WOJH7F886WKQ==
"@types/long@^4.0.1":
version "4.0.2"
resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.2.tgz#b74129719fc8d11c01868010082d483b7545591a"
integrity sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==
"@types/lru-cache@5.1.1", "@types/lru-cache@^5.1.0":
version "5.1.1"
resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.1.tgz#c48c2e27b65d2a153b19bfc1a317e30872e01eef"
@ -6550,6 +6604,13 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-8.9.5.tgz#162b864bc70be077e6db212b322754917929e976"
integrity sha512-jRHfWsvyMtXdbhnz5CVHxaBgnV6duZnPlQuRSo/dm/GnmikNcmZhxIES4E9OZjUmQ8C+HCl4KJux+cXN/ErGDQ==
"@types/node@>=13.7.0":
version "20.14.10"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.10.tgz#a1a218290f1b6428682e3af044785e5874db469a"
integrity sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==
dependencies:
undici-types "~5.26.4"
"@types/normalize-package-data@^2.4.0":
version "2.4.1"
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301"
@ -7543,6 +7604,17 @@
resolved "https://registry.yarnpkg.com/@webpack-cli/serve/-/serve-1.7.0.tgz#e1993689ac42d2b16e9194376cfb6753f6254db1"
integrity sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q==
"@xenova/transformers@^2.17.2":
version "2.17.2"
resolved "https://registry.yarnpkg.com/@xenova/transformers/-/transformers-2.17.2.tgz#7448d73b90f67bced66f39fe2dd656adc891fde5"
integrity sha512-lZmHqzrVIkSvZdKZEx7IYY51TK0WDrC8eR0c5IMnBsO8di8are1zzw8BlLhyO2TklZKLN5UffNGs1IJwT6oOqQ==
dependencies:
"@huggingface/jinja" "^0.2.2"
onnxruntime-web "1.14.0"
sharp "^0.32.0"
optionalDependencies:
onnxruntime-node "1.14.0"
"@xtuc/ieee754@^1.2.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790"
@ -8592,6 +8664,11 @@ b4a@^1.0.1:
resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.6.4.tgz#ef1c1422cae5ce6535ec191baeed7567443f36c9"
integrity sha512-fpWrvyVHEKyeEvbKZTVOeZF3VSKKWtJxFIxX/jaVPf+cLbGUSitjb49pHLqPV2BUNNZ0LcoeEGfE/YCpyDYHIw==
b4a@^1.6.4:
version "1.6.6"
resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.6.6.tgz#a4cc349a3851987c3c4ac2d7785c18744f6da9ba"
integrity sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==
babel-code-frame@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
@ -9290,6 +9367,39 @@ balanced-match@^1.0.0:
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
bare-events@^2.0.0, bare-events@^2.2.0:
version "2.4.2"
resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.4.2.tgz#3140cca7a0e11d49b3edc5041ab560659fd8e1f8"
integrity sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==
bare-fs@^2.1.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/bare-fs/-/bare-fs-2.3.1.tgz#cdbd63dac7a552dfb2b87d18c822298d1efd213d"
integrity sha512-W/Hfxc/6VehXlsgFtbB5B4xFcsCl+pAh30cYhoFyXErf6oGrwjh8SwiPAdHgpmWonKuYpZgGywN0SXt7dgsADA==
dependencies:
bare-events "^2.0.0"
bare-path "^2.0.0"
bare-stream "^2.0.0"
bare-os@^2.1.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/bare-os/-/bare-os-2.4.0.tgz#5de5e3ba7704f459c9656629edca7cc736e06608"
integrity sha512-v8DTT08AS/G0F9xrhyLtepoo9EJBJ85FRSMbu1pQUlAf6A8T0tEEQGMVObWeqpjhSPXsE0VGlluFBJu2fdoTNg==
bare-path@^2.0.0, bare-path@^2.1.0:
version "2.1.3"
resolved "https://registry.yarnpkg.com/bare-path/-/bare-path-2.1.3.tgz#594104c829ef660e43b5589ec8daef7df6cedb3e"
integrity sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA==
dependencies:
bare-os "^2.1.0"
bare-stream@^2.0.0:
version "2.1.3"
resolved "https://registry.yarnpkg.com/bare-stream/-/bare-stream-2.1.3.tgz#070b69919963a437cc9e20554ede079ce0a129b2"
integrity sha512-tiDAH9H/kP+tvNO5sczyn9ZAA7utrSMobyDchsnyyXBuUe2FSQWbxhtuHB8jwpHYYevVo2UJpcmvvjrbHboUUQ==
dependencies:
streamx "^2.18.0"
base-x@^3.0.2:
version "3.0.9"
resolved "https://registry.yarnpkg.com/base-x/-/base-x-3.0.9.tgz#6349aaabb58526332de9f60995e548a53fe21320"
@ -11059,6 +11169,14 @@ color-string@^1.6.0:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
color-string@^1.9.0:
version "1.9.1"
resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.9.1.tgz#4467f9146f036f855b764dfb5bf8582bf342c7a4"
integrity sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==
dependencies:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
color-support@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2"
@ -11072,6 +11190,14 @@ color@^3.1.3:
color-convert "^1.9.3"
color-string "^1.6.0"
color@^4.2.3:
version "4.2.3"
resolved "https://registry.yarnpkg.com/color/-/color-4.2.3.tgz#d781ecb5e57224ee43ea9627560107c0e0c6463a"
integrity sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==
dependencies:
color-convert "^2.0.1"
color-string "^1.9.0"
colord@^2.9.1:
version "2.9.1"
resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.1.tgz#c961ea0efeb57c9f0f4834458f26cb9cc4a3f90e"
@ -12564,6 +12690,11 @@ detect-libc@^1.0.3:
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
integrity sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==
detect-libc@^2.0.0, detect-libc@^2.0.2:
version "2.0.3"
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.3.tgz#f0cd503b40f9939b894697d19ad50895e30cf700"
integrity sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==
detect-libc@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd"
@ -14155,6 +14286,11 @@ expand-range@^1.8.1:
dependencies:
fill-range "^2.1.0"
expand-template@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
expand-tilde@^2.0.0, expand-tilde@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502"
@ -14335,6 +14471,11 @@ fast-fifo@^1.0.0:
resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.0.0.tgz#9bc72e6860347bb045a876d1c5c0af11e9b984e7"
integrity sha512-4VEXmjxLj7sbs8J//cn2qhRap50dGzF5n8fjay8mau+Jn4hxSeR3xPFwxMaQq/pDaq7+KQk0PAbC2+nWDkJrmQ==
fast-fifo@^1.2.0, fast-fifo@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c"
integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==
fast-glob@3.2.7:
version "3.2.7"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1"
@ -14753,6 +14894,11 @@ flat@^5.0.2:
resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241"
integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==
flatbuffers@^1.12.0:
version "1.12.0"
resolved "https://registry.yarnpkg.com/flatbuffers/-/flatbuffers-1.12.0.tgz#72e87d1726cb1b216e839ef02658aa87dcef68aa"
integrity sha512-c7CZADjRcl6j0PlvFy0ZqXQ67qSEZfrVPynmnL+2zPc+NtMvrF8Y0QceMo7QqnSPc7+uWjUIAbvCQ5WIKlMVdQ==
flatted@^3.1.0:
version "3.2.7"
resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787"
@ -15535,6 +15681,11 @@ github-base@^0.5.4:
static-extend "^0.1.2"
use "^3.0.0"
github-from-package@0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce"
integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==
glob-base@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4"
@ -15915,6 +16066,11 @@ growly@^1.2.0:
resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081"
integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=
guid-typescript@^1.0.9:
version "1.0.9"
resolved "https://registry.yarnpkg.com/guid-typescript/-/guid-typescript-1.0.9.tgz#e35f77003535b0297ea08548f5ace6adb1480ddc"
integrity sha512-Y8T4vYhEfwJOTbouREvG+3XDsjr8E3kIr7uf+JZ0BYloFsttiHU0WfvANVsR7TxNUJa/WpCnw/Ino/p+DeBhBQ==
gulp-cli@^2.2.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/gulp-cli/-/gulp-cli-2.3.0.tgz#ec0d380e29e52aa45e47977f0d32e18fd161122f"
@ -19338,6 +19494,17 @@ lit@3.1.0:
lit-element "^4.0.0"
lit-html "^3.1.0"
llama-node@^0.1.6:
version "0.1.6"
resolved "https://registry.yarnpkg.com/llama-node/-/llama-node-0.1.6.tgz#6156c117e64a5d8a0cc4f14846620f111343aad7"
integrity sha512-LZIEG0RNyqX21C5Ms8kc40syy2ZMYR5plRFkw9flziEx9y659k16HvB+2Dt3dJfhsqJB2Ji/QFfRJ6DUb0Trcg==
dependencies:
"@llama-node/cli" "0.1.6"
optionalDependencies:
"@llama-node/core" "0.1.6"
"@llama-node/llama-cpp" "0.1.6"
"@llama-node/rwkv-cpp" "0.1.6"
load-json-file@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0"
@ -19767,6 +19934,16 @@ logplease@^1.2.15:
resolved "https://registry.yarnpkg.com/logplease/-/logplease-1.2.15.tgz#3da442e93751a5992cc19010a826b08d0293c48a"
integrity sha512-jLlHnlsPSJjpwUfcNyUxXCl33AYg2cHhIf9QhGL2T4iPT0XPB+xP1LRKFPgIg1M/sg9kAJvy94w9CzBNrfnstA==
long@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28"
integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==
long@^5.0.0, long@^5.2.3:
version "5.2.3"
resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1"
integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==
longest-streak@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/longest-streak/-/longest-streak-3.1.0.tgz#62fa67cd958742a1574af9f39866364102d90cd4"
@ -20889,7 +21066,7 @@ minimist@1.2.6, minimist@^1.2.6:
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
minimist@^1.1.0, minimist@^1.2.0, minimist@^1.2.7:
minimist@^1.1.0, minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.7:
version "1.2.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c"
integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==
@ -21030,7 +21207,7 @@ mixin-deep@^1.2.0:
for-in "^1.0.2"
is-extendable "^1.0.1"
mkdirp-classic@^0.5.2:
mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3:
version "0.5.3"
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
@ -21499,6 +21676,11 @@ nanomatch@^1.2.9:
snapdragon "^0.8.1"
to-regex "^3.0.1"
napi-build-utils@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806"
integrity sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==
napi-macros@^2.2.2:
version "2.2.2"
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.2.2.tgz#817fef20c3e0e40a963fbf7b37d1600bd0201044"
@ -21608,6 +21790,13 @@ node-abi@^3.0.0:
dependencies:
semver "^7.3.5"
node-abi@^3.3.0:
version "3.65.0"
resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.65.0.tgz#ca92d559388e1e9cab1680a18c1a18757cdac9d3"
integrity sha512-ThjYBfoDNr08AWx6hGaRbfPwxKV9kVzAzOzlLKbk2CuqXE2xnCh+cbAGnwM3t8Lq4v9rUB7VfondlkBckcJrVA==
dependencies:
semver "^7.3.5"
node-abort-controller@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.0.1.tgz#f91fa50b1dee3f909afabb7e261b1e1d6b0cb74e"
@ -21628,6 +21817,11 @@ node-addon-api@^3.2.1:
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161"
integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==
node-addon-api@^6.1.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76"
integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==
node-addon-api@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-7.1.0.tgz#71f609369379c08e251c558527a107107b5e0fdb"
@ -22662,6 +22856,54 @@ onetime@^6.0.0:
dependencies:
mimic-fn "^4.0.0"
onnx-proto@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/onnx-proto/-/onnx-proto-4.0.4.tgz#2431a25bee25148e915906dda0687aafe3b9e044"
integrity sha512-aldMOB3HRoo6q/phyB6QRQxSt895HNNw82BNyZ2CMh4bjeKv7g/c+VpAFtJuEMVfYLMbRx61hbuqnKceLeDcDA==
dependencies:
protobufjs "^6.8.8"
onnxruntime-common@1.18.0:
version "1.18.0"
resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.18.0.tgz#b904dc6ff134e7f21a3eab702fac17538f59e116"
integrity sha512-lufrSzX6QdKrktAELG5x5VkBpapbCeS3dQwrXbN0eD9rHvU0yAWl7Ztju9FvgAKWvwd/teEKJNj3OwM6eTZh3Q==
onnxruntime-common@~1.14.0:
version "1.14.0"
resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.14.0.tgz#2bb5dac5261269779aa5fb6536ca379657de8bf6"
integrity sha512-3LJpegM2iMNRX2wUmtYfeX/ytfOzNwAWKSq1HbRrKc9+uqG/FsEA0bbKZl1btQeZaXhC26l44NWpNUeXPII7Ew==
onnxruntime-node@1.14.0:
version "1.14.0"
resolved "https://registry.yarnpkg.com/onnxruntime-node/-/onnxruntime-node-1.14.0.tgz#c4ae6c355cfae7d83abaf36dd39a905c4a010217"
integrity sha512-5ba7TWomIV/9b6NH/1x/8QEeowsb+jBEvFzU6z0T4mNsFwdPqXeFUM7uxC6QeSRkEbWu3qEB0VMjrvzN/0S9+w==
dependencies:
onnxruntime-common "~1.14.0"
onnxruntime-web@1.14.0:
version "1.14.0"
resolved "https://registry.yarnpkg.com/onnxruntime-web/-/onnxruntime-web-1.14.0.tgz#c8cee538781b1d4c1c6b043934f4a3e6ddf1466e"
integrity sha512-Kcqf43UMfW8mCydVGcX9OMXI2VN17c0p6XvR7IPSZzBf/6lteBzXHvcEVWDPmCKuGombl997HgLqj91F11DzXw==
dependencies:
flatbuffers "^1.12.0"
guid-typescript "^1.0.9"
long "^4.0.0"
onnx-proto "^4.0.4"
onnxruntime-common "~1.14.0"
platform "^1.3.6"
onnxruntime-web@^1.18.0:
version "1.18.0"
resolved "https://registry.yarnpkg.com/onnxruntime-web/-/onnxruntime-web-1.18.0.tgz#cd46268d9472f89697da0a3282f13129f0acbfa0"
integrity sha512-o1UKj4ABIj1gmG7ae0RKJ3/GT+3yoF0RRpfDfeoe0huzRW4FDRLfbkDETmdFAvnJEXuYDE0YT+hhkia0352StQ==
dependencies:
flatbuffers "^1.12.0"
guid-typescript "^1.0.9"
long "^5.2.3"
onnxruntime-common "1.18.0"
platform "^1.3.6"
protobufjs "^7.2.4"
open@^8.0.9, open@^8.4.0:
version "8.4.0"
resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8"
@ -23523,6 +23765,11 @@ pkg-up@^3.1.0:
dependencies:
find-up "^3.0.0"
platform@^1.3.6:
version "1.3.6"
resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7"
integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==
plist@^3.0.0, plist@^3.0.4, plist@^3.0.5:
version "3.0.6"
resolved "https://registry.yarnpkg.com/plist/-/plist-3.0.6.tgz#7cfb68a856a7834bca6dbfe3218eb9c7740145d3"
@ -23963,6 +24210,24 @@ preact@^10.16.0:
resolved "https://registry.yarnpkg.com/preact/-/preact-10.19.3.tgz#7a7107ed2598a60676c943709ea3efb8aaafa899"
integrity sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==
prebuild-install@^7.1.1:
version "7.1.2"
resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.2.tgz#a5fd9986f5a6251fbc47e1e5c65de71e68c0a056"
integrity sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==
dependencies:
detect-libc "^2.0.0"
expand-template "^2.0.3"
github-from-package "0.0.0"
minimist "^1.2.3"
mkdirp-classic "^0.5.3"
napi-build-utils "^1.0.1"
node-abi "^3.3.0"
pump "^3.0.0"
rc "^1.2.7"
simple-get "^4.0.0"
tar-fs "^2.0.0"
tunnel-agent "^0.6.0"
prelude-ls@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
@ -24153,6 +24418,43 @@ proto-list@~1.2.1:
resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849"
integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk=
protobufjs@^6.8.8:
version "6.11.4"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.4.tgz#29a412c38bf70d89e537b6d02d904a6f448173aa"
integrity sha512-5kQWPaJHi1WoCpjTGszzQ32PG2F4+wRY6BmAT4Vfw56Q2FZ4YZzK20xUYQH4YkfehY1e6QSICrJquM6xXZNcrw==
dependencies:
"@protobufjs/aspromise" "^1.1.2"
"@protobufjs/base64" "^1.1.2"
"@protobufjs/codegen" "^2.0.4"
"@protobufjs/eventemitter" "^1.1.0"
"@protobufjs/fetch" "^1.1.0"
"@protobufjs/float" "^1.0.2"
"@protobufjs/inquire" "^1.1.0"
"@protobufjs/path" "^1.1.2"
"@protobufjs/pool" "^1.1.0"
"@protobufjs/utf8" "^1.1.0"
"@types/long" "^4.0.1"
"@types/node" ">=13.7.0"
long "^4.0.0"
protobufjs@^7.2.4:
version "7.3.2"
resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.3.2.tgz#60f3b7624968868f6f739430cfbc8c9370e26df4"
integrity sha512-RXyHaACeqXeqAKGLDl68rQKbmObRsTIn4TYVUUug1KfS47YWCo5MacGITEryugIgZqORCvJWEk4l449POg5Txg==
dependencies:
"@protobufjs/aspromise" "^1.1.2"
"@protobufjs/base64" "^1.1.2"
"@protobufjs/codegen" "^2.0.4"
"@protobufjs/eventemitter" "^1.1.0"
"@protobufjs/fetch" "^1.1.0"
"@protobufjs/float" "^1.0.2"
"@protobufjs/inquire" "^1.1.0"
"@protobufjs/path" "^1.1.2"
"@protobufjs/pool" "^1.1.0"
"@protobufjs/utf8" "^1.1.0"
"@types/node" ">=13.7.0"
long "^5.0.0"
protocol-buffers-schema@^3.3.1:
version "3.6.0"
resolved "https://registry.yarnpkg.com/protocol-buffers-schema/-/protocol-buffers-schema-3.6.0.tgz#77bc75a48b2ff142c1ad5b5b90c94cd0fa2efd03"
@ -24373,6 +24675,11 @@ queue-microtask@^1.2.2, queue-microtask@^1.2.3:
resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
queue-tick@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/queue-tick/-/queue-tick-1.0.1.tgz#f6f07ac82c1fd60f82e098b417a80e52f1f4c142"
integrity sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==
quick-format-unescaped@^4.0.3:
version "4.0.4"
resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7"
@ -25378,44 +25685,6 @@ remark-rehype@^10.0.0:
mdast-util-to-hast "^12.1.0"
unified "^10.0.0"
remix-ai-core@^0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/remix-ai-core/-/remix-ai-core-0.0.2.tgz#4417ddcb105d7d82a6cbbe0fd2893296e7607f8e"
integrity sha512-pKo80rXYJl8dR/bDy0Lsfp4MjBlpoxNLev8jsN0gjr3UdtdqWEPuCwP5ROzvegSVvkd2XhTTMFr/Nu0yybfGZQ==
dependencies:
"@remixproject/plugin" "0.3.33"
"@remixproject/plugin-api" "0.3.33"
"@remixproject/plugin-utils" "0.3.33"
"@remixproject/plugin-ws" "0.3.33"
axios "1.6.0"
chokidar "^2.1.8"
commander "^9.4.1"
fs-extra "^3.0.1"
isbinaryfile "^3.0.2"
latest-version "^5.1.0"
semver "^6.3.0"
ws "^7.3.0"
remix-ai-core@^0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/remix-ai-core/-/remix-ai-core-0.0.6.tgz#92e56d86f61a055c1bb30c35f50372889c81ae8f"
integrity sha512-k+YVVl6YnOp+ZRoUCUpjLmt3s6fyELDtIQW7/I737/R3HPQzBjhAZe4Lp8uPjZdlGTucdYnVAbFoRlyNACNQWg==
dependencies:
"@remixproject/plugin" "0.3.33"
"@remixproject/plugin-api" "0.3.33"
"@remixproject/plugin-utils" "0.3.33"
"@remixproject/plugin-ws" "0.3.33"
axios "1.6.0"
chokidar "^2.1.8"
commander "^9.4.1"
fs-extra "^3.0.1"
isbinaryfile "^3.0.2"
latest-version "^5.1.0"
node-llama-cpp "^2.8.11"
remix-ai-core "^0.0.2"
semver "^6.3.0"
ws "^7.3.0"
remove-bom-buffer@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/remove-bom-buffer/-/remove-bom-buffer-3.0.0.tgz#c2bf1e377520d324f623892e33c10cac2c252b53"
@ -26346,6 +26615,20 @@ shallow-clone@^3.0.0:
dependencies:
kind-of "^6.0.2"
sharp@^0.32.0:
version "0.32.6"
resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.32.6.tgz#6ad30c0b7cd910df65d5f355f774aa4fce45732a"
integrity sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w==
dependencies:
color "^4.2.3"
detect-libc "^2.0.2"
node-addon-api "^6.1.0"
prebuild-install "^7.1.1"
semver "^7.5.4"
simple-get "^4.0.1"
tar-fs "^3.0.4"
tunnel-agent "^0.6.0"
shasum-object@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shasum-object/-/shasum-object-1.0.0.tgz#0b7b74ff5b66ecf9035475522fa05090ac47e29e"
@ -26468,7 +26751,7 @@ simple-get@^2.5.1:
once "^1.3.1"
simple-concat "^1.0.0"
simple-get@^4.0.1:
simple-get@^4.0.0, simple-get@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543"
integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==
@ -27154,6 +27437,17 @@ stream-to-it@^0.2.0, stream-to-it@^0.2.1:
dependencies:
get-iterator "^1.0.2"
streamx@^2.15.0, streamx@^2.18.0:
version "2.18.0"
resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.18.0.tgz#5bc1a51eb412a667ebfdcd4e6cf6a6fc65721ac7"
integrity sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==
dependencies:
fast-fifo "^1.3.2"
queue-tick "^1.0.1"
text-decoder "^1.1.0"
optionalDependencies:
bare-events "^2.2.0"
strict-uri-encode@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546"
@ -27766,7 +28060,7 @@ tape@^4.13.3:
string.prototype.trim "~1.2.4"
through "~2.3.8"
tar-fs@2.1.1:
tar-fs@2.1.1, tar-fs@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784"
integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==
@ -27776,6 +28070,17 @@ tar-fs@2.1.1:
pump "^3.0.0"
tar-stream "^2.1.4"
tar-fs@^3.0.4:
version "3.0.6"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.6.tgz#eaccd3a67d5672f09ca8e8f9c3d2b89fa173f217"
integrity sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w==
dependencies:
pump "^3.0.0"
tar-stream "^3.1.5"
optionalDependencies:
bare-fs "^2.1.1"
bare-path "^2.1.0"
tar-js@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/tar-js/-/tar-js-0.3.0.tgz#6949aabfb0ba18bb1562ae51a439fd0f30183a17"
@ -27792,6 +28097,15 @@ tar-stream@^2.1.0, tar-stream@^2.1.4, tar-stream@~2.2.0:
inherits "^2.0.3"
readable-stream "^3.1.1"
tar-stream@^3.1.5:
version "3.1.7"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b"
integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==
dependencies:
b4a "^1.6.4"
fast-fifo "^1.2.0"
streamx "^2.15.0"
tar@^2.0.0, tar@~2.2.1:
version "2.2.2"
resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.2.tgz#0ca8848562c7299b8b446ff6a4d60cdbb23edc40"
@ -27910,6 +28224,13 @@ test-exclude@^6.0.0:
glob "^7.1.4"
minimatch "^3.0.4"
text-decoder@^1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/text-decoder/-/text-decoder-1.1.1.tgz#5df9c224cebac4a7977720b9f083f9efa1aefde8"
integrity sha512-8zll7REEv4GDD3x4/0pW+ppIxSNs7H1J10IKFZsuOMscumCdM2a+toDGLPA3T+1+fLBql4zbt5z83GEQGGV5VA==
dependencies:
b4a "^1.6.4"
text-encoding@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/text-encoding/-/text-encoding-0.7.0.tgz#f895e836e45990624086601798ea98e8f36ee643"
@ -28749,6 +29070,11 @@ undertaker@^1.2.1:
object.reduce "^1.0.0"
undertaker-registry "^1.0.0"
undici-types@~5.26.4:
version "5.26.5"
resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617"
integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==
undici@^5.14.0:
version "5.28.4"
resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068"
@ -30530,7 +30856,7 @@ yargs@^15.3.1:
y18n "^4.0.0"
yargs-parser "^18.1.2"
yargs@^17.0.1, yargs@^17.7.2:
yargs@^17.0.1, yargs@^17.7.1, yargs@^17.7.2:
version "17.7.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269"
integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==

Loading…
Cancel
Save