pull/5241/head
STetsing 4 months ago
parent 39e89ab6ac
commit 52408f32e5
  1. 3
      apps/remix-ide/src/app/plugins/remixAIPlugin.tsx
  2. 6
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  3. 2
      libs/remix-ai-core/src/types/models.ts
  4. 3
      libs/remix-ui/tabs/src/lib/remix-ui-tabs.tsx

@ -52,8 +52,8 @@ export class RemixAIPlugin extends ViewPlugin {
console.log('Activating RemixAIPlugin on browser')
this.initialize()
}
this.call('sidePanel', 'pinView', profile)
}
async initialize(model1?:IModel, model2?:IModel, remoteModel?:IRemoteModel, useRemote?:boolean){
if (this.isOnDesktop) {
// on desktop use remote inferencer -> false
@ -186,7 +186,6 @@ export class RemixAIPlugin extends ViewPlugin {
}
}
async ProcessChatRequestBuffer(params:IParams=GenerationParams){
if (this.chatRequestBuffer != null){
const result = this[this.chatRequestBuffer.fn_name](this.chatRequestBuffer.prompt, this.chatRequestBuffer.context, params)

@ -17,7 +17,7 @@ export class RemoteInferencer implements ICompletions {
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
event: EventEmitter
test_env=false
test_url="http://solcodertest.org/"
test_url="http://solcodertest.org"
constructor(apiUrl?:string, completionUrl?:string) {
this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org"
@ -42,6 +42,7 @@ export class RemoteInferencer implements ICompletions {
}
case AIRequestType.GENERAL:
if (result.statusText === "OK") {
if (result.data?.error) return result.data?.error
const resultText = result.data.generatedText
ChatHistory.pushHistory(payload.prompt, resultText)
return resultText
@ -75,11 +76,9 @@ export class RemoteInferencer implements ICompletions {
if (payload.return_stream_response) {
return response
}
const reader = response.body!.getReader();
const decoder = new TextDecoder();
const parser = new JsonStreamParser();
while (true) {
const { done, value } = await reader.read();
if (done) break;
@ -87,7 +86,6 @@ export class RemoteInferencer implements ICompletions {
try {
console.log("value" + decoder.decode(value))
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true }));
for (const parsedData of chunk) {
if (parsedData.isGenerating) {
this.event.emit('onStreamResult', parsedData.generatedText);

@ -61,6 +61,7 @@ const CompletionParams:IParams = {
topK: 40,
topP: 0.92,
max_new_tokens: 15,
stream_result: false,
}
const InsertionParams:IParams = {
@ -68,6 +69,7 @@ const InsertionParams:IParams = {
topK: 40,
topP: 0.92,
max_new_tokens: 150,
stream_result: false,
}
const GenerationParams:IParams = {

@ -252,7 +252,6 @@ export const TabsUI = (props: TabsUIProps) => {
setExplaining(true)
// if plugin is pinned,
if (await props.plugin.call('pinnedPanel', 'currentFocus') === 'remixAI'){
console.log("pinned has focus")
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}
else {
@ -271,14 +270,12 @@ export const TabsUI = (props: TabsUIProps) => {
documentation: 'https://remix-ide.readthedocs.io/en/latest/remixai.html',
maintainedBy: 'Remix'
}
console.log("pinned does not have focus")
// await props.plugin.call('sidePanel', 'focus', 'remixAI')
await props.plugin.call('sidePanel', 'pinView', profile)
setTimeout(async () => {
await props.plugin.call('remixAI', 'chatPipe', 'code_explaining', content)
}, 500)
}
// await props.plugin.call('remixAI', 'code_explaining', content)
setExplaining(false)
_paq.push(['trackEvent', 'ai', 'remixAI', 'explain_file'])

Loading…
Cancel
Save