mirror of
https://gitlab.com/bignutty/labscore.git
synced 2025-06-07 13:43:06 -04:00
71 lines
No EOL
2.7 KiB
JavaScript
71 lines
No EOL
2.7 KiB
JavaScript
const { PERMISSION_GROUPS } = require("#constants");
|
|
const { LlmModelsGenerate } = require("#obelisk");
|
|
|
|
const { createEmbed } = require("#utils/embed");
|
|
const { acknowledge } = require("#utils/interactions");
|
|
const { stringwrap, iconPill, smallIconPill } = require("#utils/markdown");
|
|
const { editOrReply } = require("#utils/message");
|
|
const { STATIC_ICONS } = require("#utils/statics");
|
|
const { hasFeature } = require("#utils/testing");
|
|
|
|
module.exports = {
|
|
name: 'gemini-pro',
|
|
label: 'text',
|
|
aliases: ['gpro'],
|
|
metadata: {
|
|
description: `${iconPill("generative_ai", "LIMITED TESTING")}\n${smallIconPill("reply", "Supports Replies")}\n\nRun Gemini 1.0 Pro with a custom prompt.`,
|
|
description_short: 'Gemini-1.0-Pro',
|
|
examples: ['gem why do they call it oven when you of in the cold food of out hot eat the food'],
|
|
category: 'limited',
|
|
usage: 'gemini-pro <prompt>'
|
|
},
|
|
permissionsClient: [...PERMISSION_GROUPS.baseline, ...PERMISSION_GROUPS.attachments],
|
|
run: async (context, args) => {
|
|
if(!await hasFeature(context, "ai/gemini/text")) return;
|
|
await acknowledge(context);
|
|
|
|
if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
|
|
|
|
let input = args.text;
|
|
|
|
try{
|
|
await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_gemini))
|
|
|
|
let res = await LlmModelsGenerate(context, "gemini-1.5-pro", input, "BLOCK_NONE")
|
|
|
|
let description = []
|
|
let files = [];
|
|
|
|
if(res.response.body.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
|
|
|
|
let output = res.response.body.candidates[0]?.output
|
|
if(!output) return editOrReply(context, createEmbed("error", context, `Gemini returned an error. Try again later.`))
|
|
|
|
if(output.length <= 4000) description.push(output)
|
|
else {
|
|
files.push({
|
|
filename: `gemini.${Date.now().toString(36)}.txt`,
|
|
value: Buffer.from(output)
|
|
})
|
|
}
|
|
|
|
return editOrReply(context, {
|
|
embeds:[createEmbed("defaultNoFooter", context, {
|
|
author: {
|
|
name: stringwrap(input, 50, false),
|
|
iconUrl: STATIC_ICONS.ai_gemini
|
|
},
|
|
description: description.join('\n'),
|
|
footer: {
|
|
text: `Generative AI is experimental • Data submitted to Gemini may be used by Google for training.`
|
|
}
|
|
})],
|
|
files
|
|
})
|
|
} catch(e){
|
|
console.log(e)
|
|
if(e.response?.body?.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
|
|
return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
|
|
}
|
|
}
|
|
}; |