migrate ai commands to obelisk

This commit is contained in:
derpystuff 2023-12-14 21:46:33 +01:00
parent 3b478bd69e
commit 641ab48053
6 changed files with 52 additions and 50 deletions

View file

@ -8,6 +8,7 @@ const superagent = require('superagent')
const { iconPill, stringwrap, smallIconPill } = require('../../../labscore/utils/markdown')
const { Permissions } = require("detritus-client/lib/constants");
const { chatgpt } = require('../../../labscore/api/obelisk');
const MODELS = {
"chatgpt": {
@ -78,27 +79,23 @@ module.exports = {
try{
await editOrReply(context, createEmbed("ai", context, "Generating response..."))
let res = await superagent.post(`${process.env.AI_SERVER}/openai`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt,
input: [input],
temperature,
model: MODELS[model.toLowerCase()].id
})
let res;
if(model.toLowerCase() == "chatgpt"){
res = await chatgpt(context, prompt, input)
} else if (model.toLowerCase() == "gpt4"){
res = await chatgpt(context, prompt, input)
}
let description = []
let files = [];
if(!res.body.output) throw "Unable to generate response"
if(!res.response.body.output) throw "Unable to generate response"
if(res.body.output.length <= 4000) description.push(res.body.output)
if(res.response.body.output.length <= 4000) description.push(res.response.body.output)
else {
files.push({
filename: `chat.${Date.now().toString(36)}.txt`,
value: Buffer.from(res.body.output)
value: Buffer.from(res.response.body.output)
})
}

View file

@ -9,6 +9,7 @@ const { iconPill, stringwrap } = require('../../../labscore/utils/markdown')
const { Permissions } = require("detritus-client/lib/constants");
const { getUser } = require('../../../labscore/utils/users');
const { chatgpt } = require('../../../labscore/api/obelisk');
const LOADING_QUIPS = [
"Crunching the data, one byte at a time...",
@ -86,16 +87,8 @@ Current time: ${new Date().toLocaleDateString('en-us', { weekday:"long", year:"n
await editOrReply(context, e)
let res = await superagent.post(`${process.env.AI_SERVER}/openai`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt,
input: [input],
temperature: "0.75",
model: "CHATGPT"
})
let res = await chatgpt(context, prompt, input);
res = res.response;
let description = []
let files = [];

View file

@ -6,7 +6,8 @@ const { codeblock, iconPill, smallIconPill } = require('../../../labscore/utils/
const { Permissions } = require("detritus-client/lib/constants");
const { canUseLimitedTestCommands } = require('../../../labscore/utils/testing')
const { STATICS } = require('../../../labscore/utils/statics')
const { STATICS } = require('../../../labscore/utils/statics');
const { chatgpt } = require('../../../labscore/api/obelisk');
module.exports = {
name: 'disstrack',
@ -24,18 +25,11 @@ module.exports = {
context.triggerTyping();
if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
try{
await editOrReply(context, createEmbed("ai", context, "Generating response..."))
await editOrReply(context, createEmbed("ai", context, "Spitting bars..."))
let res = await chatgpt(context, "Write a disstrack about the subject the user supplies. The disstrack should have at least one verse and a chorus.", args.text);
res = res.response;
let res = await superagent.post(`${process.env.AI_SERVER}/openai`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt: "Write a disstrack about the subject the user supplies. The disstrack should have at least two verses and a chorus.",
input: [args.text],
temperature: 0.6,
model: "CHATGPT"
})
return editOrReply(context, createEmbed("default", context, {
description: smallIconPill("generative_ai", args.text) + '\n' + codeblock("ansi", [res.body.output.substr(0, 2020 - args.text.length)]),
footer: {

View file

@ -8,6 +8,7 @@ const superagent = require('superagent')
const { iconPill, stringwrap, smallIconPill } = require('../../../labscore/utils/markdown')
const { Permissions } = require("detritus-client/lib/constants");
const { palm2 } = require('../../../labscore/api/obelisk');
module.exports = {
name: 'palm',
@ -56,16 +57,8 @@ module.exports = {
try{
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_palm_idle))
let res = await superagent.post(`${process.env.AI_SERVER}/google/palm2/chat`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt,
input: [input],
temperature,
model
})
let res = await palm2(context, prompt, input)
res = res.response;
let description = []
let files = [];

View file

@ -7,8 +7,12 @@ const ObeliskApi = Object.freeze({
HOST: ObeliskHosts.prod,
GOOGLE_BARD: "/parrot/v1/google:bard",
GEMINI_PRO: "/parrot/v1/google:gemini",
GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision",
GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini",
GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision",
GOOGLE_PALM2: "/parrot/v1/google:palm2",
OPENAI_CHATGPT: "/parrot/v1/openai:chatgpt",
OPENAI_GPT4: "/parrot/v1/openai:gpt4",
SUMMARIZE_WEBPAGES: "/flamingo/v1/web:summarize"
})

View file

@ -48,18 +48,39 @@ module.exports.bard = async function(context, input){
}
module.exports.gemini = async function(context, prompt){
return await request(ObeliskApi.GEMINI_PRO, "POST", {}, {
return await request(ObeliskApi.GOOGLE_GEMINI_PRO, "POST", {}, {
prompt
})
}
module.exports.geminiVision = async function(context, input, url){
return await request(ObeliskApi.GEMINI_PRO_VISION, "POST", {}, {
return await request(ObeliskApi.GOOGLE_GEMINI_PRO_VISION, "POST", {}, {
input,
url
})
}
module.exports.palm2 = async function(context, prompt, input){
return await request(ObeliskApi.GOOGLE_PALM2, "POST", {}, {
prompt,
input
})
}
module.exports.chatgpt = async function(context, prompt, input){
return await request(ObeliskApi.OPENAI_CHATGPT, "POST", {}, {
prompt,
input
})
}
module.exports.gpt4 = async function(context, prompt, input){
return await request(ObeliskApi.OPENAI_GPT4, "POST", {}, {
prompt,
input
})
}
// FLAMINGO
module.exports.summarizeWebpage = async function(context, url){
return await request(ObeliskApi.SUMMARIZE_WEBPAGES, "POST", {}, {