Merge branch 'master' into pastedCodeSafety

pull/5344/head
Aniket 3 weeks ago committed by GitHub
commit df7c5285e5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 2
      apps/remixdesktop/src/lib/InferenceServerManager.ts
  2. 4
      libs/remix-ai-core/src/index.ts
  3. 4
      libs/remix-ai-core/src/inferencers/remote/remoteInference.ts
  4. 2
      libs/remix-ai-core/src/prompts/promptBuilder.ts

@ -6,7 +6,7 @@ import { EventEmitter } from 'events';
import { ICompletions, IModel, IParams, InsertionParams, import { ICompletions, IModel, IParams, InsertionParams,
CompletionParams, GenerationParams, ModelType, AIRequestType, CompletionParams, GenerationParams, ModelType, AIRequestType,
IStreamResponse, ChatHistory, downloadLatestReleaseExecutable, IStreamResponse, ChatHistory, downloadLatestReleaseExecutable,
buildSolgptPromt } from "@remix/remix-ai-core" buildSolgptPrompt } from "@remix/remix-ai-core"
import { platform } from 'os'; import { platform } from 'os';
class ServerStatusTimer { class ServerStatusTimer {

@ -6,7 +6,7 @@ import { IModel, IModelResponse, IModelRequest, InferenceModel, ICompletions,
import { ModelType } from './types/constants' import { ModelType } from './types/constants'
import { DefaultModels, InsertionParams, CompletionParams, GenerationParams } from './types/models' import { DefaultModels, InsertionParams, CompletionParams, GenerationParams } from './types/models'
import { getCompletionPrompt, getInsertionPrompt } from './prompts/completionPrompts' import { getCompletionPrompt, getInsertionPrompt } from './prompts/completionPrompts'
import { buildSolgptPromt, PromptBuilder } from './prompts/promptBuilder' import { buildSolgptPrompt, PromptBuilder } from './prompts/promptBuilder'
import { RemoteInferencer } from './inferencers/remote/remoteInference' import { RemoteInferencer } from './inferencers/remote/remoteInference'
import { ChatHistory } from './prompts/chat' import { ChatHistory } from './prompts/chat'
import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases' import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases'
@ -14,7 +14,7 @@ import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleas
export { export {
IModel, IModelResponse, IModelRequest, InferenceModel, IModel, IModelResponse, IModelRequest, InferenceModel,
ModelType, DefaultModels, ICompletions, IParams, IRemoteModel, ModelType, DefaultModels, ICompletions, IParams, IRemoteModel,
getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPromt, getCompletionPrompt, getInsertionPrompt, IStreamResponse, buildSolgptPrompt,
RemoteInferencer, InsertionParams, CompletionParams, GenerationParams, RemoteInferencer, InsertionParams, CompletionParams, GenerationParams,
ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable ChatEntry, AIRequestType, RemoteBackendOPModel, ChatHistory, downloadLatestReleaseExecutable
} }

@ -1,6 +1,6 @@
import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel, JsonStreamParser } from "../../types/types"; import { ICompletions, IParams, AIRequestType, RemoteBackendOPModel, JsonStreamParser } from "../../types/types";
import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models"; import { GenerationParams, CompletionParams, InsertionParams } from "../../types/models";
import { buildSolgptPromt } from "../../prompts/promptBuilder"; import { buildSolgptPrompt } from "../../prompts/promptBuilder";
import EventEmitter from "events"; import EventEmitter from "events";
import { ChatHistory } from "../../prompts/chat"; import { ChatHistory } from "../../prompts/chat";
import axios from 'axios'; import axios from 'axios';
@ -127,7 +127,7 @@ export class RemoteInferencer implements ICompletions {
} }
async solidity_answer(prompt, options:IParams=GenerationParams): Promise<any> { async solidity_answer(prompt, options:IParams=GenerationParams): Promise<any> {
const main_prompt = buildSolgptPromt(prompt, this.model_op) const main_prompt = buildSolgptPrompt(prompt, this.model_op)
const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options } const payload = { 'prompt': main_prompt, "endpoint":"solidity_answer", ...options }
if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL) if (options.stream_result) return this._streamInferenceRequest(payload, AIRequestType.GENERAL)
else return this._makeRequest(payload, AIRequestType.GENERAL) else return this._makeRequest(payload, AIRequestType.GENERAL)

@ -7,7 +7,7 @@ export const PromptBuilder = (inst, answr, modelop) => {
if (modelop === RemoteBackendOPModel.MISTRAL) return "" if (modelop === RemoteBackendOPModel.MISTRAL) return ""
} }
export const buildSolgptPromt = (userPrompt:string, modelOP:RemoteBackendOPModel) => { export const buildSolgptPrompt = (userPrompt:string, modelOP:RemoteBackendOPModel) => {
if (modelOP === undefined) { if (modelOP === undefined) {
console.log('WARNING: modelOP is undefined. Provide a valid model OP for chat history') console.log('WARNING: modelOP is undefined. Provide a valid model OP for chat history')
return userPrompt return userPrompt

Loading…
Cancel
Save