Merge branch 'master' into pastedCodeSafety

pull/5344/head
Aniket 2 weeks ago committed by GitHub
commit df7c5285e5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      apps/remixdesktop/src/lib/InferenceServerManager.ts
  2. 4
      libs/remix-ai-core/src/index.ts
  3. 4
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  4. 2
      libs/remix-ai-core/src/prompts/promptBuilder.ts

@ -6,7 +6,7 @@ import { EventEmitter } from 'events';
import { ICompletions, IModel, IParams, InsertionParams,
CompletionParams, GenerationParams, ModelType, AIRequestType,
IStreamResponse, ChatHistory, downloadLatestReleaseExecutable,
buildSolgptPromt } from "@remix/remix-ai-core"
buildSolgptPrompt } from "@remix/remix-ai-core"
import { platform } from 'os';
class ServerStatusTimer {

@ -6,7 +6,7 @@ import { IModel, IModelResponse, IModelRequest, InferenceModel, ICompletions,
import { ModelType } from './types/constants'
import { DefaultModels, InsertionParams, CompletionParams, GenerationParams } from './types/models'
import { getCompletionPrompt, getInsertionPrompt } from './prompts/completionPrompts'
import { buildSolgptPromt, PromptBuilder } from './prompts/promptBuilder'
import { buildSolgptPrompt, PromptBuilder } from './prompts/promptBuilder'
import { RemoteInferencer } from './inferencers/remote/remoteInference'
import { ChatHistory } from './prompts/chat'
import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases'
@ -14,7 +14,7 @@ import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleas
export {
IModel, IModelResponse, IModelRequest, InferenceModel,
ModelType, DefaultModels, ICompletions, IParams, IRemoteModel,
getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPromt,
getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPrompt,
RemoteInferencer, InsertionParams, CompletionParams, GenerationParams,
ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable
}

@ -1,6 +1,6 @@
import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel, JsonStreamParser } from "../../types/types";
import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models";
import { buildSolgptPromt } from "../../prompts/promptBuilder";
import { buildSolgptPrompt } from "../../prompts/promptBuilder";
import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat";
import axios from 'axios';
@ -127,7 +127,7 @@ export class RemoteInferencer implements ICompletions {
}
async solidity_answer(prompt, options:IParams=GenerationParams): Promise<any> {
const main_prompt = buildSolgptPromt(prompt, this.model_op)
const main_prompt = buildSolgptPrompt(prompt, this.model_op)
const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL)

@ -7,7 +7,7 @@ export const PromptBuilder = (inst, answr, modelop) => {
if (modelop === RemoteBackendOPModel.MISTRAL) return ""
}
export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel) => {
export const buildSolgptPrompt = (userPrompt:string, modelOP:RemoteBackendOPModel) => {
if (modelOP === undefined) {
console.log('WARNING: modelOP is undefined. Provide a valid model OP for chat history')
return userPrompt

Loading…
Cancel
Save