fixed double init

pull/5098/head
Stéphane Tetsing 7 months ago
parent 10128bf873
commit 7cd9803260
  1. 8
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  2. 3
      apps/remixdesktop/src/lib/InferenceServerManager.ts
  3. 1
      apps/remixdesktop/src/plugins/remixAIDektop.ts
  4. 2
      libs/remix-ai-core/src/prompts/promptBuilder.ts
  5. 2
      libs/remix-ai-core/src/types/models.ts
  6. 2
      libs/remix-ui/remix-ai/src/lib/components/Default.tsx

@ -28,6 +28,7 @@ export class RemixAIPlugin extends ViewPlugin {
aiIsActivated:boolean = false aiIsActivated:boolean = false
readonly remixDesktopPluginName = 'remixAID' readonly remixDesktopPluginName = 'remixAID'
remoteInferencer:RemoteInferencer = null remoteInferencer:RemoteInferencer = null
isInferencing: boolean = false
constructor(inDesktop:boolean) { constructor(inDesktop:boolean) {
console.log('remixAIPlugin loaded') console.log('remixAIPlugin loaded')
@ -42,8 +43,8 @@ export class RemixAIPlugin extends ViewPlugin {
console.log('Activating RemixAIPlugin on desktop') console.log('Activating RemixAIPlugin on desktop')
} else { } else {
console.log('Activating RemixAIPlugin on browser') console.log('Activating RemixAIPlugin on browser')
this.initialize()
} }
this.initialize()
} }
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel){ async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel){
@ -51,7 +52,6 @@ export class RemixAIPlugin extends ViewPlugin {
if (this.isOnDesktop) { if (this.isOnDesktop) {
this.call(this.remixDesktopPluginName, 'initializeModelBackend', false, model1, model2) this.call(this.remixDesktopPluginName, 'initializeModelBackend', false, model1, model2)
this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => { this.on(this.remixDesktopPluginName, 'onStreamResult', (value) => {
console.log('onStreamResult remixai plugin', value)
this.call('terminal', 'log', { type: 'log', value: value }) this.call('terminal', 'log', { type: 'log', value: value })
}) })
} else { } else {
@ -91,7 +91,7 @@ export class RemixAIPlugin extends ViewPlugin {
result = await this.remoteInferencer.solidity_answer(prompt) result = await this.remoteInferencer.solidity_answer(prompt)
} }
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
} }
async code_explaining(prompt: string): Promise<any> { async code_explaining(prompt: string): Promise<any> {
@ -105,6 +105,7 @@ export class RemixAIPlugin extends ViewPlugin {
result = await this.remoteInferencer.code_explaining(prompt) result = await this.remoteInferencer.code_explaining(prompt)
} }
if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) if (result) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
} }
async error_explaining(prompt: string): Promise<any> { async error_explaining(prompt: string): Promise<any> {
@ -117,6 +118,7 @@ export class RemixAIPlugin extends ViewPlugin {
result = await this.remoteInferencer.error_explaining(prompt) result = await this.remoteInferencer.error_explaining(prompt)
} }
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result }) this.call('terminal', 'log', { type: 'aitypewriterwarning', value: result })
this.call('terminal', 'log', { type: 'aitypewriterwarning', value: "RemixAI Done" })
} }
async code_insertion(msg_pfx: string, msg_sfx: string): Promise<any> { async code_insertion(msg_pfx: string, msg_sfx: string): Promise<any> {

@ -43,7 +43,6 @@ export class InferenceManager implements ICompletions {
selectedModels: IModel[] = [] selectedModels: IModel[] = []
event: EventEmitter event: EventEmitter
modelCacheDir: string = undefined modelCacheDir: string = undefined
isInferencing: boolean = false
private inferenceProcess: any=null private inferenceProcess: any=null
port = 5501 port = 5501
inferenceURL = 'http://127.0.0.1:' + this.port inferenceURL = 'http://127.0.0.1:' + this.port
@ -330,7 +329,7 @@ export class InferenceManager implements ICompletions {
} }
private async _makeRequest(endpoint, payload){ private async _makeRequest(endpoint, payload){
// make a simple request to the inference server // makes a simple request to the inference server
try { try {
const options = { headers: { 'Content-Type': 'application/json', } } const options = { headers: { 'Content-Type': 'application/json', } }
const response = await axios.post(`${this.inferenceURL}/${endpoint}`, payload, options) const response = await axios.post(`${this.inferenceURL}/${endpoint}`, payload, options)

@ -80,6 +80,7 @@ class RemixAIDesktopPluginClient extends ElectronBasePluginClient {
} }
// set event listeners // set event listeners
console.log('Setting event listeners')
this.desktopInferencer.event.on('onStreamResult', (data) => { this.desktopInferencer.event.on('onStreamResult', (data) => {
this.emit('onStreamResult', data) this.emit('onStreamResult', data)
}) })

@ -18,7 +18,7 @@ export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel
let newPrompt = "" let newPrompt = ""
for (const [question, answer] of ChatHistory.getHistory()) { for (const [question, answer] of ChatHistory.getHistory()) {
if (question.startsWith('sol-gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP) if (question.startsWith('sol-gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP)
else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('sol-gpt')[1], answer, modelOP) else if (question.startsWith('gpt')) newPrompt += PromptBuilder(question.split('gpt')[1], answer, modelOP)
else newPrompt += PromptBuilder(question, answer, modelOP) else newPrompt += PromptBuilder(question, answer, modelOP)
} }
// finaly // finaly

@ -94,7 +94,7 @@ const GenerationParams:IParams = {
topK: 40, topK: 40,
topP: 0.92, topP: 0.92,
max_new_tokens: 2000, max_new_tokens: 2000,
stream_result: false, stream_result: true,
} }
export { DefaultModels, CompletionParams, InsertionParams, GenerationParams } export { DefaultModels, CompletionParams, InsertionParams, GenerationParams }

@ -6,7 +6,7 @@ export const Default = (props) => {
const [searchText, setSearchText] = useState(''); const [searchText, setSearchText] = useState('');
const [resultText, setResultText] = useState(''); const [resultText, setResultText] = useState('');
const pluginName = 'remixAI' const pluginName = 'remixAI'
const appendText = (newText) => { const appendText = (newText) => {
setResultText(resultText => resultText + newText); setResultText(resultText => resultText + newText);
} }

Loading…
Cancel
Save