diff --git a/commands/message/genai/bard.js b/commands/message/genai/bard.js index 24bc2f7..5bf521c 100644 --- a/commands/message/genai/bard.js +++ b/commands/message/genai/bard.js @@ -8,7 +8,7 @@ const { iconPill, stringwrap } = require('../../../labscore/utils/markdown') const { Permissions, InteractionCallbackTypes } = require("detritus-client/lib/constants"); const { Components } = require('detritus-client/lib/utils'); -const { bard } = require('../../../labscore/api/obelisk'); +const { LlmPrivateBard } = require('../../../labscore/api/obelisk'); const { hasFeature } = require('../../../labscore/utils/testing'); module.exports = { @@ -34,23 +34,23 @@ module.exports = { try{ await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_bard)) - let res = await bard(context, input) + let res = await LlmPrivateBard(context, input) res = res.response let description = [] let files = []; - if(!res.body.drafts) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`)) + if(!res.body.candidates) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`)) - if(res.body.drafts[0].length <= 4000) description.push(res.body.drafts[0]) + if(res.body.candidates[0].length <= 4000) description.push(res.body.candidates[0]) else { files.push({ filename: `chat.${Date.now().toString(36)}.txt`, - value: Buffer.from(res.body.drafts[0]) + value: Buffer.from(res.body.candidates[0]) }) } - if(!res.body.drafts || res.body.drafts?.length <= 1) return editOrReply(context, { + if(!res.body.candidates || res.body.candidates?.length <= 1) return editOrReply(context, { embeds:[createEmbed("defaultNoFooter", context, { author: { name: stringwrap(args.text, 50, false), @@ -78,7 +78,7 @@ module.exports = { components.components[0].components[0].options[i].default = (components.components[0].components[0].options[i].value == ctx.data.values[0]) } - draft = res.body.drafts[parseInt(ctx.data.values[0].replace('draft-', ''))] + draft = res.body.candidates[parseInt(ctx.data.values[0].replace('draft-', ''))] description = [] files = []; @@ -111,9 +111,9 @@ module.exports = { }) let draftOptions = []; - for (let i = 0; i < res.body.drafts.length; i++) { + for (let i = 0; i < res.body.candidates.length; i++) { draftOptions.push({ - label: `Draft ${i + 1}: ​ ${stringwrap(res.body.drafts[i], 50, false)}`, + label: `Draft ${i + 1}: ​ ${stringwrap(res.body.candidates[i], 50, false)}`, value: "draft-" + (i), default: false }) @@ -153,7 +153,7 @@ module.exports = { if(e.response?.body?.message) return editOrReply(context, createEmbed("warning", context, e.response.body.message)) console.log(e) - return editOrReply(context, createEmbed("error", context, `Unable to generate text.`)) + return editOrReply(context, createEmbed("error", context, `Unable to generate response.`)) } } }; \ No newline at end of file diff --git a/labscore/api/obelisk/endpoints.js b/labscore/api/obelisk/endpoints.js index 10534d2..63270b7 100644 --- a/labscore/api/obelisk/endpoints.js +++ b/labscore/api/obelisk/endpoints.js @@ -6,6 +6,10 @@ const ObeliskHosts = Object.freeze({ const ObeliskApi = Object.freeze({ HOST: ObeliskHosts.prod, + // monolith2 + LLM_PRIVATE_BARD: "/llm/v1/_private:bard", + LLM_MODELS_GENERATE: "/llm/v1/generate", + GOOGLE_BARD: "/parrot/v1/google:bard", GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini", GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision", diff --git a/labscore/api/obelisk/index.js b/labscore/api/obelisk/index.js index b2416fa..3d0b8ae 100644 --- a/labscore/api/obelisk/index.js +++ b/labscore/api/obelisk/index.js @@ -8,8 +8,7 @@ async function request(path, type, headers, args, host) { if(host) url = host + path // apply default headers - if(!headers["Authorization"]) headers["Authorization"] = process.env.OBELISK_API_KEY - if(!headers["x-obelisk-client"]) headers["x-obelisk-client"] = process.env.OBELISK_CLIENT_ID || "labscore-production-001" + if(!headers["Authorization"]) headers["Authorization"] = process.env.MONOLITH_API_KEY if (type === "GET") { if(!args){ @@ -40,6 +39,23 @@ async function request(path, type, headers, args, host) { throw new Error("unsupported, must either use GET or POST"); } +// monolith2 +module.exports.LlmPrivateBard = async function(context, prompt){ + return await request(ObeliskApi.LLM_PRIVATE_BARD, "POST", {}, { + prompt + }) +} + +module.exports.LlmModelsGenerate = async function(context, model, prompt, harmLevel = "BLOCK_NONE"){ + return await request(ObeliskApi.LLM_PRIVATE_BARD, "POST", {}, { + user_prompt: prompt, + model: model, + safety_config: { + default_safety_threshold: harmLevel + } + }) +} + // GENERATIVEAI module.exports.bard = async function(context, input){ return await request(ObeliskApi.GOOGLE_BARD, "POST", {}, {