diff --git a/commands/message/limited/chat.js b/commands/message/limited/chat.js index 16204a0..c614152 100644 --- a/commands/message/limited/chat.js +++ b/commands/message/limited/chat.js @@ -8,6 +8,7 @@ const superagent = require('superagent') const { iconPill, stringwrap, smallIconPill } = require('../../../labscore/utils/markdown') const { Permissions } = require("detritus-client/lib/constants"); +const { chatgpt } = require('../../../labscore/api/obelisk'); const MODELS = { "chatgpt": { @@ -78,27 +79,23 @@ module.exports = { try{ await editOrReply(context, createEmbed("ai", context, "Generating response...")) - let res = await superagent.post(`${process.env.AI_SERVER}/openai`) - .set({ - Authorization: process.env.AI_SERVER_KEY - }) - .send({ - prompt, - input: [input], - temperature, - model: MODELS[model.toLowerCase()].id - }) + let res; + if(model.toLowerCase() == "chatgpt"){ + res = await chatgpt(context, prompt, input) + } else if (model.toLowerCase() == "gpt4"){ + res = await chatgpt(context, prompt, input) + } let description = [] let files = []; - if(!res.body.output) throw "Unable to generate response" + if(!res.response.body.output) throw "Unable to generate response" - if(res.body.output.length <= 4000) description.push(res.body.output) + if(res.response.body.output.length <= 4000) description.push(res.response.body.output) else { files.push({ filename: `chat.${Date.now().toString(36)}.txt`, - value: Buffer.from(res.body.output) + value: Buffer.from(res.response.body.output) }) } diff --git a/commands/message/limited/clyde.js b/commands/message/limited/clyde.js index 5bc2b0d..9f0aff0 100644 --- a/commands/message/limited/clyde.js +++ b/commands/message/limited/clyde.js @@ -9,6 +9,7 @@ const { iconPill, stringwrap } = require('../../../labscore/utils/markdown') const { Permissions } = require("detritus-client/lib/constants"); const { getUser } = require('../../../labscore/utils/users'); +const { chatgpt } = require('../../../labscore/api/obelisk'); const LOADING_QUIPS = [ "Crunching the data, one byte at a time...", @@ -86,16 +87,8 @@ Current time: ${new Date().toLocaleDateString('en-us', { weekday:"long", year:"n await editOrReply(context, e) - let res = await superagent.post(`${process.env.AI_SERVER}/openai`) - .set({ - Authorization: process.env.AI_SERVER_KEY - }) - .send({ - prompt, - input: [input], - temperature: "0.75", - model: "CHATGPT" - }) + let res = await chatgpt(context, prompt, input); + res = res.response; let description = [] let files = []; diff --git a/commands/message/limited/disstrack.js b/commands/message/limited/disstrack.js index a585044..6a6209e 100644 --- a/commands/message/limited/disstrack.js +++ b/commands/message/limited/disstrack.js @@ -6,7 +6,8 @@ const { codeblock, iconPill, smallIconPill } = require('../../../labscore/utils/ const { Permissions } = require("detritus-client/lib/constants"); const { canUseLimitedTestCommands } = require('../../../labscore/utils/testing') -const { STATICS } = require('../../../labscore/utils/statics') +const { STATICS } = require('../../../labscore/utils/statics'); +const { chatgpt } = require('../../../labscore/api/obelisk'); module.exports = { name: 'disstrack', @@ -24,18 +25,11 @@ module.exports = { context.triggerTyping(); if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`)) try{ - await editOrReply(context, createEmbed("ai", context, "Generating response...")) + await editOrReply(context, createEmbed("ai", context, "Spitting bars...")) - let res = await superagent.post(`${process.env.AI_SERVER}/openai`) - .set({ - Authorization: process.env.AI_SERVER_KEY - }) - .send({ - prompt: "Write a disstrack about the subject the user supplies. The disstrack should have at least two verses and a chorus.", - input: [args.text], - temperature: 0.6, - model: "CHATGPT" - }) + let res = await chatgpt(context, "Write a disstrack about the subject the user supplies. The disstrack should have at least one verse and a chorus.", args.text); + res = res.response; + return editOrReply(context, createEmbed("default", context, { description: smallIconPill("generative_ai", args.text) + '\n' + codeblock("ansi", [res.body.output.substr(0, 2020 - args.text.length)]), footer: { diff --git a/commands/message/limited/palm.js b/commands/message/limited/palm.js index 7275078..df7d90c 100644 --- a/commands/message/limited/palm.js +++ b/commands/message/limited/palm.js @@ -8,6 +8,7 @@ const superagent = require('superagent') const { iconPill, stringwrap, smallIconPill } = require('../../../labscore/utils/markdown') const { Permissions } = require("detritus-client/lib/constants"); +const { palm2 } = require('../../../labscore/api/obelisk'); module.exports = { name: 'palm', @@ -56,17 +57,9 @@ module.exports = { try{ await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_palm_idle)) - let res = await superagent.post(`${process.env.AI_SERVER}/google/palm2/chat`) - .set({ - Authorization: process.env.AI_SERVER_KEY - }) - .send({ - prompt, - input: [input], - temperature, - model - }) - + let res = await palm2(context, prompt, input) + res = res.response; + let description = [] let files = []; diff --git a/labscore/api/obelisk/endpoints.js b/labscore/api/obelisk/endpoints.js index 1694111..3b19de1 100644 --- a/labscore/api/obelisk/endpoints.js +++ b/labscore/api/obelisk/endpoints.js @@ -7,8 +7,12 @@ const ObeliskApi = Object.freeze({ HOST: ObeliskHosts.prod, GOOGLE_BARD: "/parrot/v1/google:bard", - GEMINI_PRO: "/parrot/v1/google:gemini", - GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision", + GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini", + GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision", + GOOGLE_PALM2: "/parrot/v1/google:palm2", + + OPENAI_CHATGPT: "/parrot/v1/openai:chatgpt", + OPENAI_GPT4: "/parrot/v1/openai:gpt4", SUMMARIZE_WEBPAGES: "/flamingo/v1/web:summarize" }) diff --git a/labscore/api/obelisk/index.js b/labscore/api/obelisk/index.js index a4161df..d670e5d 100644 --- a/labscore/api/obelisk/index.js +++ b/labscore/api/obelisk/index.js @@ -48,18 +48,39 @@ module.exports.bard = async function(context, input){ } module.exports.gemini = async function(context, prompt){ - return await request(ObeliskApi.GEMINI_PRO, "POST", {}, { + return await request(ObeliskApi.GOOGLE_GEMINI_PRO, "POST", {}, { prompt }) } module.exports.geminiVision = async function(context, input, url){ - return await request(ObeliskApi.GEMINI_PRO_VISION, "POST", {}, { + return await request(ObeliskApi.GOOGLE_GEMINI_PRO_VISION, "POST", {}, { input, url }) } +module.exports.palm2 = async function(context, prompt, input){ + return await request(ObeliskApi.GOOGLE_PALM2, "POST", {}, { + prompt, + input + }) +} + +module.exports.chatgpt = async function(context, prompt, input){ + return await request(ObeliskApi.OPENAI_CHATGPT, "POST", {}, { + prompt, + input + }) +} + +module.exports.gpt4 = async function(context, prompt, input){ + return await request(ObeliskApi.OPENAI_GPT4, "POST", {}, { + prompt, + input + }) +} + // FLAMINGO module.exports.summarizeWebpage = async function(context, url){ return await request(ObeliskApi.SUMMARIZE_WEBPAGES, "POST", {}, {