|
|
|
@ -17,7 +17,7 @@ export class RemoteInferencer implements ICompletions { |
|
|
|
|
model_op = RemoteBackendOPModel.CODELLAMA // default model operation change this to llama if necessary
|
|
|
|
|
event: EventEmitter |
|
|
|
|
test_env=false |
|
|
|
|
test_url="http://solcodertest.org/" |
|
|
|
|
test_url="http://solcodertest.org" |
|
|
|
|
|
|
|
|
|
constructor(apiUrl?:string, completionUrl?:string) { |
|
|
|
|
this.api_url = apiUrl!==undefined ? apiUrl: this.test_env? this.test_url : "https://solcoder.remixproject.org" |
|
|
|
@ -42,6 +42,7 @@ export class RemoteInferencer implements ICompletions { |
|
|
|
|
} |
|
|
|
|
case AIRequestType.GENERAL: |
|
|
|
|
if (result.statusText === "OK") { |
|
|
|
|
if (result.data?.error) return result.data?.error |
|
|
|
|
const resultText = result.data.generatedText |
|
|
|
|
ChatHistory.pushHistory(payload.prompt, resultText) |
|
|
|
|
return resultText |
|
|
|
@ -75,11 +76,9 @@ export class RemoteInferencer implements ICompletions { |
|
|
|
|
if (payload.return_stream_response) { |
|
|
|
|
return response |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
const reader = response.body!.getReader(); |
|
|
|
|
const decoder = new TextDecoder(); |
|
|
|
|
const parser = new JsonStreamParser(); |
|
|
|
|
|
|
|
|
|
while (true) { |
|
|
|
|
const { done, value } = await reader.read(); |
|
|
|
|
if (done) break; |
|
|
|
@ -87,7 +86,6 @@ export class RemoteInferencer implements ICompletions { |
|
|
|
|
try { |
|
|
|
|
console.log("value" + decoder.decode(value)) |
|
|
|
|
const chunk = parser.safeJsonParse<{ generatedText: string; isGenerating: boolean }>(decoder.decode(value, { stream: true })); |
|
|
|
|
|
|
|
|
|
for (const parsedData of chunk) { |
|
|
|
|
if (parsedData.isGenerating) { |
|
|
|
|
this.event.emit('onStreamResult', parsedData.generatedText); |
|
|
|
|