reflect monolith2 changes

This commit is contained in:
bignutty 2024-05-12 16:22:50 +02:00
parent d4fff1403b
commit 97204fd33b
3 changed files with 32 additions and 12 deletions

View file

@ -6,6 +6,10 @@ const ObeliskHosts = Object.freeze({
const ObeliskApi = Object.freeze({
HOST: ObeliskHosts.prod,
// monolith2
LLM_PRIVATE_BARD: "/llm/v1/_private:bard",
LLM_MODELS_GENERATE: "/llm/v1/generate",
GOOGLE_BARD: "/parrot/v1/google:bard",
GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini",
GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision",

View file

@ -8,8 +8,7 @@ async function request(path, type, headers, args, host) {
if(host) url = host + path
// apply default headers
if(!headers["Authorization"]) headers["Authorization"] = process.env.OBELISK_API_KEY
if(!headers["x-obelisk-client"]) headers["x-obelisk-client"] = process.env.OBELISK_CLIENT_ID || "labscore-production-001"
if(!headers["Authorization"]) headers["Authorization"] = process.env.MONOLITH_API_KEY
if (type === "GET") {
if(!args){
@ -40,6 +39,23 @@ async function request(path, type, headers, args, host) {
throw new Error("unsupported, must either use GET or POST");
}
// monolith2
module.exports.LlmPrivateBard = async function(context, prompt){
return await request(ObeliskApi.LLM_PRIVATE_BARD, "POST", {}, {
prompt
})
}
module.exports.LlmModelsGenerate = async function(context, model, prompt, harmLevel = "BLOCK_NONE"){
return await request(ObeliskApi.LLM_PRIVATE_BARD, "POST", {}, {
user_prompt: prompt,
model: model,
safety_config: {
default_safety_threshold: harmLevel
}
})
}
// GENERATIVEAI
module.exports.bard = async function(context, input){
return await request(ObeliskApi.GOOGLE_BARD, "POST", {}, {