Merge branch 'main' of https://gitlab.com/bignutty/labscore into main

This commit is contained in:
derpystuff 2023-12-19 19:25:29 +01:00
commit 4798e60585
14 changed files with 255 additions and 72 deletions

View file

@ -4,6 +4,7 @@ const { editOrReply } = require('../../../labscore/utils/message')
const superagent = require('superagent');
const { Permissions } = require("detritus-client/lib/constants");
const { otter } = require('../../../labscore/api');
module.exports = {
name: 'otter',
@ -17,16 +18,15 @@ module.exports = {
run: async (context) => {
await context.triggerTyping();
try{
let res = await superagent.get(`https://otter.bruhmomentlol.repl.co/random`)
.set("User-Agent","labscore/2.0")
const ott = (await otter()).response.body
return await editOrReply(context, {
embeds: [ createEmbed("image", context, {
url: `otter.${res.headers["x-file-ext"]}`
})],
files: [{ filename: `otter.${res.headers["x-file-ext"]}`, value: res.body }]
})
return editOrReply(context, createEmbed("default", context, {
image: {
url: ott.url
}
}))
}catch(e){
console.log(e)
return editOrReply(context, createEmbed("error", context, `Unable to fetch otter.`))
}
}

View file

@ -0,0 +1,82 @@
const { geminiVision } = require("../../../labscore/api/obelisk");
const { getRecentImage } = require("../../../labscore/utils/attachment");
const { createEmbed } = require("../../../labscore/utils/embed");
const { editOrReply } = require("../../../labscore/utils/message");
const { getUser } = require("../../../labscore/utils/users");
const { Permissions } = require("detritus-client/lib/constants");
const superagent = require('superagent');
const { STATIC_ICONS } = require("../../../labscore/utils/statics");
const { stringwrap, iconPill, smallIconPill } = require("../../../labscore/utils/markdown");
const { canUseLimitedTestCommands } = require("../../../labscore/utils/testing");
module.exports = {
name: 'gemini-vision',
label: 'text',
aliases: ['gv'],
metadata: {
description: `${iconPill("generative_ai", "LIMITED TESTING")}\n${smallIconPill("reply", "Supports Replies")}\n\nRun Gemini Vision on an Image with a custom prompt.`,
description_short: 'Run Gemini Vision ',
examples: ['gv Which show is this image from?'],
category: 'limited',
usage: 'gemini-vision <attachment> <prompt>'
},
permissionsClient: [Permissions.EMBED_LINKS, Permissions.SEND_MESSAGES, Permissions.USE_EXTERNAL_EMOJIS, Permissions.ATTACH_FILES, Permissions.READ_MESSAGE_HISTORY],
run: async (context, args) => {
context.triggerTyping();
if(!canUseLimitedTestCommands(context)) return;
context.triggerTyping();
// for the sake of privacy, make the context window one message
let image = await getRecentImage(context, 1)
if (!image) return editOrReply(context, createEmbed("warning", context, "No images found. Reply if you want a specific image."))
if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
let input = args.text;
try{
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_gemini))
let res = await geminiVision(context, input, image)
let description = []
let files = [];
if(res.response.body.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
let output = res.response.body.gemini?.candidates[0]?.content?.parts[0]?.text
if(!output) return editOrReply(context, createEmbed("error", context, `Gemini returned an error. Try again later.`))
if(output.length <= 4000) description.push(output)
else {
files.push({
filename: `gemini.${Date.now().toString(36)}.txt`,
value: Buffer.from(output)
})
}
return editOrReply(context, {
embeds:[createEmbed("defaultNoFooter", context, {
author: {
name: stringwrap(input, 50, false),
iconUrl: STATIC_ICONS.ai_gemini
},
thumbnail: {
url: image
},
description: description.join('\n'),
footer: {
text: `Generative AI is experimental • Data submitted to Gemini may be used by Google for training.`
}
})],
files
})
} catch(e){
console.log(e)
if(e.response?.body?.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
}
}
};

View file

@ -0,0 +1,71 @@
const { gemini } = require("../../../labscore/api/obelisk");
const { createEmbed } = require("../../../labscore/utils/embed");
const { editOrReply } = require("../../../labscore/utils/message");
const { Permissions } = require("detritus-client/lib/constants");
const { STATIC_ICONS } = require("../../../labscore/utils/statics");
const { stringwrap, iconPill, smallIconPill } = require("../../../labscore/utils/markdown");
const { canUseLimitedTestCommands } = require("../../../labscore/utils/testing");
module.exports = {
name: 'gemini',
label: 'text',
aliases: ['gem'],
metadata: {
description: `${iconPill("generative_ai", "LIMITED TESTING")}\n${smallIconPill("reply", "Supports Replies")}\n\nRun Gemini Pro with a custom prompt.`,
description_short: 'Gemini',
examples: ['gem why do they call it oven when you of in the cold food of out hot eat the food'],
category: 'limited',
usage: 'gemini <prompt>'
},
permissionsClient: [Permissions.EMBED_LINKS, Permissions.SEND_MESSAGES, Permissions.USE_EXTERNAL_EMOJIS, Permissions.ATTACH_FILES, Permissions.READ_MESSAGE_HISTORY],
run: async (context, args) => {
context.triggerTyping();
if(!canUseLimitedTestCommands(context)) return;
context.triggerTyping();
if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
let input = args.text;
try{
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_gemini))
let res = await gemini(context, input)
let description = []
let files = [];
if(res.response.body.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
let output = res.response.body.gemini?.candidates[0]?.content?.parts[0]?.text
if(!output) return editOrReply(context, createEmbed("error", context, `Gemini returned an error. Try again later.`))
if(output.length <= 4000) description.push(output)
else {
files.push({
filename: `gemini.${Date.now().toString(36)}.txt`,
value: Buffer.from(output)
})
}
return editOrReply(context, {
embeds:[createEmbed("defaultNoFooter", context, {
author: {
name: stringwrap(input, 50, false),
iconUrl: STATIC_ICONS.ai_gemini
},
description: description.join('\n'),
footer: {
text: `Generative AI is experimental • Data submitted to Gemini may be used by Google for training.`
}
})],
files
})
} catch(e){
console.log(e)
if(e.response?.body?.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
}
}
};

View file

@ -9,6 +9,7 @@ const { iconPill, stringwrap } = require('../../../labscore/utils/markdown')
const { Permissions, InteractionCallbackTypes } = require("detritus-client/lib/constants");
const { Components } = require('detritus-client/lib/utils');
const { bard } = require('../../../labscore/api/obelisk');
module.exports = {
name: 'bard',
@ -33,34 +34,26 @@ module.exports = {
try{
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_bard))
let res = await superagent.post(`${process.env.AI_SERVER}/google/bard`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.query({
with_drafts: true
})
.send({
input
})
let res = await bard(context, input)
res = res.response
let description = []
let files = [];
if(!res.body.output) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`))
if(!res.body.drafts) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`))
if(res.body.output.length <= 4000) description.push(res.body.output)
if(res.body.drafts[0].length <= 4000) description.push(res.body.drafts[0])
else {
files.push({
filename: `chat.${Date.now().toString(36)}.txt`,
value: Buffer.from(res.body.output)
value: Buffer.from(res.body.drafts[0])
})
}
if(!res.body.drafts || res.body.drafts?.length <= 1) return editOrReply(context, {
embeds:[createEmbed("defaultNoFooter", context, {
author: {
name: stringwrap(args.text, 50),
name: stringwrap(args.text, 50, false),
iconUrl: STATIC_ICONS.ai_bard_idle
},
description: description.join('\n'),

View file

@ -8,6 +8,7 @@ const superagent = require('superagent')
const { iconPill, stringwrap, smallIconPill } = require('../../../labscore/utils/markdown')
const { Permissions } = require("detritus-client/lib/constants");
const { chatgpt } = require('../../../labscore/api/obelisk');
const MODELS = {
"chatgpt": {
@ -78,27 +79,23 @@ module.exports = {
try{
await editOrReply(context, createEmbed("ai", context, "Generating response..."))
let res = await superagent.post(`${process.env.AI_SERVER}/openai`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt,
input: [input],
temperature,
model: MODELS[model.toLowerCase()].id
})
let res;
if(model.toLowerCase() == "chatgpt"){
res = await chatgpt(context, prompt, input)
} else if (model.toLowerCase() == "gpt4"){
res = await chatgpt(context, prompt, input)
}
let description = []
let files = [];
if(!res.body.output) throw "Unable to generate response"
if(!res.response.body.output) throw "Unable to generate response"
if(res.body.output.length <= 4000) description.push(res.body.output)
if(res.response.body.output.length <= 4000) description.push(res.response.body.output)
else {
files.push({
filename: `chat.${Date.now().toString(36)}.txt`,
value: Buffer.from(res.body.output)
value: Buffer.from(res.response.body.output)
})
}

View file

@ -9,6 +9,7 @@ const { iconPill, stringwrap } = require('../../../labscore/utils/markdown')
const { Permissions } = require("detritus-client/lib/constants");
const { getUser } = require('../../../labscore/utils/users');
const { chatgpt } = require('../../../labscore/api/obelisk');
const LOADING_QUIPS = [
"Crunching the data, one byte at a time...",
@ -86,16 +87,8 @@ Current time: ${new Date().toLocaleDateString('en-us', { weekday:"long", year:"n
await editOrReply(context, e)
let res = await superagent.post(`${process.env.AI_SERVER}/openai`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt,
input: [input],
temperature: "0.75",
model: "CHATGPT"
})
let res = await chatgpt(context, prompt, input);
res = res.response;
let description = []
let files = [];

View file

@ -6,7 +6,8 @@ const { codeblock, iconPill, smallIconPill } = require('../../../labscore/utils/
const { Permissions } = require("detritus-client/lib/constants");
const { canUseLimitedTestCommands } = require('../../../labscore/utils/testing')
const { STATICS } = require('../../../labscore/utils/statics')
const { STATICS } = require('../../../labscore/utils/statics');
const { chatgpt } = require('../../../labscore/api/obelisk');
module.exports = {
name: 'disstrack',
@ -24,18 +25,11 @@ module.exports = {
context.triggerTyping();
if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
try{
await editOrReply(context, createEmbed("ai", context, "Generating response..."))
await editOrReply(context, createEmbed("ai", context, "Spitting bars..."))
let res = await chatgpt(context, "Write a disstrack about the subject the user supplies. The disstrack should have at least one verse and a chorus.", args.text);
res = res.response;
let res = await superagent.post(`${process.env.AI_SERVER}/openai`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt: "Write a disstrack about the subject the user supplies. The disstrack should have at least two verses and a chorus.",
input: [args.text],
temperature: 0.6,
model: "CHATGPT"
})
return editOrReply(context, createEmbed("default", context, {
description: smallIconPill("generative_ai", args.text) + '\n' + codeblock("ansi", [res.body.output.substr(0, 2020 - args.text.length)]),
footer: {

View file

@ -8,6 +8,7 @@ const superagent = require('superagent')
const { iconPill, stringwrap, smallIconPill } = require('../../../labscore/utils/markdown')
const { Permissions } = require("detritus-client/lib/constants");
const { palm2 } = require('../../../labscore/api/obelisk');
module.exports = {
name: 'palm',
@ -56,16 +57,8 @@ module.exports = {
try{
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_palm_idle))
let res = await superagent.post(`${process.env.AI_SERVER}/google/palm2/chat`)
.set({
Authorization: process.env.AI_SERVER_KEY
})
.send({
prompt,
input: [input],
temperature,
model
})
let res = await palm2(context, prompt, input)
res = res.response;
let description = []
let files = [];

View file

@ -54,6 +54,7 @@ const Api = Object.freeze({
UTILS_GARFIELD: '/utils/garfield',
UTILS_INFERKIT: '/utils/inferkit',
UTILS_MAPKIT: '/utils/mapkit',
UTILS_OTTER: '/utils/otter',
UTILS_PERSPECTIVE: '/utils/perspective',
UTILS_SCREENSHOT: '/utils/screenshot',
UTILS_TEXTGENERATOR: '/utils/text-generator',

View file

@ -304,7 +304,7 @@ module.exports.emojipedia = async function(context, emoji){
})
}
module.exports.garfield = async function(context, emoji){
module.exports.garfield = async function(context,){
return await request(Api.UTILS_GARFIELD, "GET", {}, {})
}
@ -314,6 +314,10 @@ module.exports.inferkit = async function(context, input){
})
}
module.exports.otter = async function(context){
return await request(Api.UTILS_OTTER, "GET", {}, {})
}
module.exports.perspective = async function(context, content = []){
return await request(Api.UTILS_PERSPECTIVE, "GET", {}, {
input: content.join('\n\n')

View file

@ -6,6 +6,14 @@ const ObeliskHosts = Object.freeze({
const ObeliskApi = Object.freeze({
HOST: ObeliskHosts.prod,
GOOGLE_BARD: "/parrot/v1/google:bard",
GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini",
GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision",
GOOGLE_PALM2: "/parrot/v1/google:palm2",
OPENAI_CHATGPT: "/parrot/v1/openai:chatgpt",
OPENAI_GPT4: "/parrot/v1/openai:gpt4",
SUMMARIZE_WEBPAGES: "/flamingo/v1/web:summarize"
})

View file

@ -40,6 +40,48 @@ async function request(path, type, headers, args, host) {
throw new Error("unsupported, must either use GET or POST");
}
// GENERATIVEAI
module.exports.bard = async function(context, input){
return await request(ObeliskApi.GOOGLE_BARD, "POST", {}, {
input
})
}
module.exports.gemini = async function(context, prompt){
return await request(ObeliskApi.GOOGLE_GEMINI_PRO, "POST", {}, {
prompt
})
}
module.exports.geminiVision = async function(context, input, url){
return await request(ObeliskApi.GOOGLE_GEMINI_PRO_VISION, "POST", {}, {
input,
url
})
}
module.exports.palm2 = async function(context, prompt, input){
return await request(ObeliskApi.GOOGLE_PALM2, "POST", {}, {
prompt,
input
})
}
module.exports.chatgpt = async function(context, prompt, input){
return await request(ObeliskApi.OPENAI_CHATGPT, "POST", {}, {
prompt,
input
})
}
module.exports.gpt4 = async function(context, prompt, input){
return await request(ObeliskApi.OPENAI_GPT4, "POST", {}, {
prompt,
input
})
}
// FLAMINGO
module.exports.summarizeWebpage = async function(context, url){
return await request(ObeliskApi.SUMMARIZE_WEBPAGES, "POST", {}, {
url

View file

@ -152,13 +152,17 @@ const Statics = Object.freeze({
file: "brands/_clyde/clyde.png",
revision: 0
},
ai_gemini: {
file: "icons/aiv2/gemini_spark.png",
revision: 0
},
ai_palm_idle: {
file: "icons/core/ico_notice_palm_idle.png",
revision: 0
},
ai_summary: {
file: "icons/core/ico_notice_summary.png",
revision: 0
file: "icons/flamingo/web_summary.png",
revision: 1
},
warning: {
file: "icons/core/ico_notice_warning.png",
@ -209,6 +213,7 @@ module.exports.STATIC_ICONS = Object.freeze({
ai_bard_idle: staticAsset(Statics.icons.ai_bard_idle),
ai_clyde: staticAsset(Statics.icons.ai_clyde),
ai_clyde_idle: staticAsset(Statics.icons.ai_clyde_idle),
ai_gemini: staticAsset(Statics.icons.ai_gemini),
ai_palm_idle: staticAsset(Statics.icons.ai_palm_idle),
ai_summary: staticAsset(Statics.icons.ai_summary),
warning: staticAsset(Statics.icons.warning)