From 5ed9c814cae493f74958d76cd1bc913b84f5a450 Mon Sep 17 00:00:00 2001 From: STetsing <41009393+STetsing@users.noreply.github.com> Date: Tue, 18 Feb 2025 14:30:10 +0100 Subject: [PATCH] sig in api --- .../src/lib/plugins/remixAIDesktop-api.ts | 16 +++++++--------- libs/remix-api/src/lib/plugins/remixai-api.ts | 13 +++++++------ 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/libs/remix-api/src/lib/plugins/remixAIDesktop-api.ts b/libs/remix-api/src/lib/plugins/remixAIDesktop-api.ts index 9e4cdd4716..4ccd1c7c46 100644 --- a/libs/remix-api/src/lib/plugins/remixAIDesktop-api.ts +++ b/libs/remix-api/src/lib/plugins/remixAIDesktop-api.ts @@ -10,14 +10,12 @@ export interface IRemixAID { } & StatusEvents, methods: { - code_completion(context: string): Promise - code_insertion(msg_pfx: string, msg_sfx: string): Promise, - code_generation(prompt: string): Promise, - code_explaining(code: string, context?: string): Promise, - error_explaining(prompt: string): Promise, - solidity_answer(prompt: string): Promise, - initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise, - chatPipe(pipeMessage: string): Promise, - ProcessChatRequestBuffer(params:IParams): Promise, + code_completion(prompt: string, context: string, params?): Promise + code_insertion(msg_pfx: string, msg_sfx: string, params?): Promise, + code_generation(prompt: string, params?): Promise, + code_explaining(code: string, context?: string, params?): Promise, + error_explaining(prompt: string, context?: string, params?): Promise, + solidity_answer(prompt: string, params?): Promise, + initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise, } } diff --git a/libs/remix-api/src/lib/plugins/remixai-api.ts b/libs/remix-api/src/lib/plugins/remixai-api.ts index 0ea1498151..6128acd96b 100644 --- a/libs/remix-api/src/lib/plugins/remixai-api.ts +++ b/libs/remix-api/src/lib/plugins/remixai-api.ts @@ -9,15 +9,16 @@ export interface IRemixAI { onInferenceDone():void, } & StatusEvents, methods: { - code_completion(context: string): Promise - code_insertion(msg_pfx: string, msg_sfx: string): Promise, - code_generation(prompt: string): Promise, - code_explaining(code: string, context?: string): Promise, - error_explaining(prompt: string): Promise, - solidity_answer(prompt: string): Promise, + code_completion(prompt: string, context: string, params?): Promise + code_insertion(msg_pfx: string, msg_sfx: string, params?): Promise, + code_generation(prompt: string, params?): Promise, + code_explaining(code: string, context?: string, params?): Promise, + error_explaining(prompt: string, context?: string, params?): Promise, + solidity_answer(prompt: string, params?): Promise, initializeModelBackend(local: boolean, generalModel?, completionModel?): Promise, chatPipe(pipeMessage: string): Promise, ProcessChatRequestBuffer(params:IParams): Promise, initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean): Promise, + vulnerability_check(prompt: string, params?): Promise, } } \ No newline at end of file