reflect monolith2 changes

This commit is contained in:
bignutty 2024-05-12 16:22:50 +02:00
parent d4fff1403b
commit 97204fd33b
3 changed files with 32 additions and 12 deletions

View file

@ -8,7 +8,7 @@ const { iconPill, stringwrap } = require('../../../labscore/utils/markdown')
const { Permissions, InteractionCallbackTypes } = require("detritus-client/lib/constants"); const { Permissions, InteractionCallbackTypes } = require("detritus-client/lib/constants");
const { Components } = require('detritus-client/lib/utils'); const { Components } = require('detritus-client/lib/utils');
const { bard } = require('../../../labscore/api/obelisk'); const { LlmPrivateBard } = require('../../../labscore/api/obelisk');
const { hasFeature } = require('../../../labscore/utils/testing'); const { hasFeature } = require('../../../labscore/utils/testing');
module.exports = { module.exports = {
@ -34,23 +34,23 @@ module.exports = {
try{ try{
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_bard)) await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_bard))
let res = await bard(context, input) let res = await LlmPrivateBard(context, input)
res = res.response res = res.response
let description = [] let description = []
let files = []; let files = [];
if(!res.body.drafts) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`)) if(!res.body.candidates) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`))
if(res.body.drafts[0].length <= 4000) description.push(res.body.drafts[0]) if(res.body.candidates[0].length <= 4000) description.push(res.body.candidates[0])
else { else {
files.push({ files.push({
filename: `chat.${Date.now().toString(36)}.txt`, filename: `chat.${Date.now().toString(36)}.txt`,
value: Buffer.from(res.body.drafts[0]) value: Buffer.from(res.body.candidates[0])
}) })
} }
if(!res.body.drafts || res.body.drafts?.length <= 1) return editOrReply(context, { if(!res.body.candidates || res.body.candidates?.length <= 1) return editOrReply(context, {
embeds:[createEmbed("defaultNoFooter", context, { embeds:[createEmbed("defaultNoFooter", context, {
author: { author: {
name: stringwrap(args.text, 50, false), name: stringwrap(args.text, 50, false),
@ -78,7 +78,7 @@ module.exports = {
components.components[0].components[0].options[i].default = (components.components[0].components[0].options[i].value == ctx.data.values[0]) components.components[0].components[0].options[i].default = (components.components[0].components[0].options[i].value == ctx.data.values[0])
} }
draft = res.body.drafts[parseInt(ctx.data.values[0].replace('draft-', ''))] draft = res.body.candidates[parseInt(ctx.data.values[0].replace('draft-', ''))]
description = [] description = []
files = []; files = [];
@ -111,9 +111,9 @@ module.exports = {
}) })
let draftOptions = []; let draftOptions = [];
for (let i = 0; i < res.body.drafts.length; i++) { for (let i = 0; i < res.body.candidates.length; i++) {
draftOptions.push({ draftOptions.push({
label: `Draft ${i + 1}: ${stringwrap(res.body.drafts[i], 50, false)}`, label: `Draft ${i + 1}: ${stringwrap(res.body.candidates[i], 50, false)}`,
value: "draft-" + (i), value: "draft-" + (i),
default: false default: false
}) })
@ -153,7 +153,7 @@ module.exports = {
if(e.response?.body?.message) return editOrReply(context, createEmbed("warning", context, e.response.body.message)) if(e.response?.body?.message) return editOrReply(context, createEmbed("warning", context, e.response.body.message))
console.log(e) console.log(e)
return editOrReply(context, createEmbed("error", context, `Unable to generate text.`)) return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
} }
} }
}; };

View file

@ -6,6 +6,10 @@ const ObeliskHosts = Object.freeze({
const ObeliskApi = Object.freeze({ const ObeliskApi = Object.freeze({
HOST: ObeliskHosts.prod, HOST: ObeliskHosts.prod,
// monolith2
LLM_PRIVATE_BARD: "/llm/v1/_private:bard",
LLM_MODELS_GENERATE: "/llm/v1/generate",
GOOGLE_BARD: "/parrot/v1/google:bard", GOOGLE_BARD: "/parrot/v1/google:bard",
GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini", GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini",
GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision", GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision",

View file

@ -8,8 +8,7 @@ async function request(path, type, headers, args, host) {
if(host) url = host + path if(host) url = host + path
// apply default headers // apply default headers
if(!headers["Authorization"]) headers["Authorization"] = process.env.OBELISK_API_KEY if(!headers["Authorization"]) headers["Authorization"] = process.env.MONOLITH_API_KEY
if(!headers["x-obelisk-client"]) headers["x-obelisk-client"] = process.env.OBELISK_CLIENT_ID || "labscore-production-001"
if (type === "GET") { if (type === "GET") {
if(!args){ if(!args){
@ -40,6 +39,23 @@ async function request(path, type, headers, args, host) {
throw new Error("unsupported, must either use GET or POST"); throw new Error("unsupported, must either use GET or POST");
} }
// monolith2
module.exports.LlmPrivateBard = async function(context, prompt){
return await request(ObeliskApi.LLM_PRIVATE_BARD, "POST", {}, {
prompt
})
}
module.exports.LlmModelsGenerate = async function(context, model, prompt, harmLevel = "BLOCK_NONE"){
return await request(ObeliskApi.LLM_PRIVATE_BARD, "POST", {}, {
user_prompt: prompt,
model: model,
safety_config: {
default_safety_threshold: harmLevel
}
})
}
// GENERATIVEAI // GENERATIVEAI
module.exports.bard = async function(context, input){ module.exports.bard = async function(context, input){
return await request(ObeliskApi.GOOGLE_BARD, "POST", {}, { return await request(ObeliskApi.GOOGLE_BARD, "POST", {}, {