From 528baecb1df9ac6b6e05aac339a658018a449b11 Mon Sep 17 00:00:00 2001
From: bignutty <3515180-bignutty@users.noreply.gitlab.com>
Date: Wed, 15 May 2024 16:58:42 +0200
Subject: [PATCH] rebrand
---
commands/message/genai/bard.js | 159 ---------------------------
commands/message/genai/gemini-pro.js | 70 ++++++++++++
commands/message/genai/gemini.js | 150 ++++++++++++++++++++-----
labscore/utils/statics.js | 8 +-
4 files changed, 194 insertions(+), 193 deletions(-)
delete mode 100644 commands/message/genai/bard.js
create mode 100644 commands/message/genai/gemini-pro.js
diff --git a/commands/message/genai/bard.js b/commands/message/genai/bard.js
deleted file mode 100644
index 5bf521c..0000000
--- a/commands/message/genai/bard.js
+++ /dev/null
@@ -1,159 +0,0 @@
-const { createEmbed } = require('../../../labscore/utils/embed')
-const { editOrReply } = require('../../../labscore/utils/message')
-
-const { STATIC_ICONS } = require('../../../labscore/utils/statics');
-
-const superagent = require('superagent')
-const { iconPill, stringwrap } = require('../../../labscore/utils/markdown')
-
-const { Permissions, InteractionCallbackTypes } = require("detritus-client/lib/constants");
-const { Components } = require('detritus-client/lib/utils');
-const { LlmPrivateBard } = require('../../../labscore/api/obelisk');
-const { hasFeature } = require('../../../labscore/utils/testing');
-
-module.exports = {
- name: 'bard',
- label: 'text',
- metadata: {
- description: `${iconPill("generative_ai", "LIMITED TESTING")}\n\nChat with <:bard:1163200801871765504> Bard.`,
- description_short: 'Chat with Bard.',
- examples: ['bard How many otter species are there?'],
- category: 'limited',
- usage: 'bard '
- },
- args: [],
- permissionsClient: [Permissions.EMBED_LINKS, Permissions.SEND_MESSAGES, Permissions.ATTACH_FILES, Permissions.USE_EXTERNAL_EMOJIS, Permissions.READ_MESSAGE_HISTORY],
- run: async (context, args) => {
- if(!await hasFeature(context, "ai/bard")) return;
-
- context.triggerTyping();
- if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
-
- let input = args.text;
-
- try{
- await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_bard))
-
- let res = await LlmPrivateBard(context, input)
- res = res.response
-
- let description = []
- let files = [];
-
- if(!res.body.candidates) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`))
-
- if(res.body.candidates[0].length <= 4000) description.push(res.body.candidates[0])
- else {
- files.push({
- filename: `chat.${Date.now().toString(36)}.txt`,
- value: Buffer.from(res.body.candidates[0])
- })
- }
-
- if(!res.body.candidates || res.body.candidates?.length <= 1) return editOrReply(context, {
- embeds:[createEmbed("defaultNoFooter", context, {
- author: {
- name: stringwrap(args.text, 50, false),
- iconUrl: STATIC_ICONS.ai_bard_idle
- },
- description: description.join('\n'),
- footer: {
- text: `Bard • Generative AI is experimental. Response may be factually wrong or completely made up.`
- }
- })],
- files
- })
- // Draft support
- else {
-
- let currentView;
-
- const components = new Components({
- timeout: 100000,
- run: async (ctx) => {
- if (ctx.userId !== context.userId) return await ctx.respond(InteractionCallbackTypes.DEFERRED_UPDATE_MESSAGE);
-
- // this sucks but works, ensures the newly selected option stays selected
- for (let i = 0; i < components.components[0].components[0].options.length; i++) {
- components.components[0].components[0].options[i].default = (components.components[0].components[0].options[i].value == ctx.data.values[0])
- }
-
- draft = res.body.candidates[parseInt(ctx.data.values[0].replace('draft-', ''))]
-
- description = []
- files = [];
-
- if(draft.length <= 4000) description.push(draft)
- else {
- files.push({
- filename: `chat.${Date.now().toString(36)}.txt`,
- value: Buffer.from(draft)
- })
- }
-
- currentView = createEmbed("defaultNoFooter", context, {
- author: {
- name: stringwrap(args.text, 50, false),
- iconUrl: STATIC_ICONS.ai_bard_idle
- },
- description: description.join('\n'),
- footer: {
- text: `Bard • Generative AI is experimental. Response may be factually wrong or completely made up.`
- }
- })
-
- await ctx.editOrRespond({
- embeds:[currentView],
- files,
- components
- })
- }
- })
-
- let draftOptions = [];
- for (let i = 0; i < res.body.candidates.length; i++) {
- draftOptions.push({
- label: `Draft ${i + 1}: ${stringwrap(res.body.candidates[i], 50, false)}`,
- value: "draft-" + (i),
- default: false
- })
- }
-
- components.addSelectMenu({
- placeholder: "View other drafts",
- customId: "bard-drafts",
- options: draftOptions
- })
-
- setTimeout(()=>{
- editOrReply(context, {
- embeds:[currentView],
- components:[]
- })
- }, 100000)
-
- currentView = createEmbed("defaultNoFooter", context, {
- author: {
- name: stringwrap(args.text, 50, false),
- iconUrl: STATIC_ICONS.ai_bard_idle
- },
- description: description.join('\n'),
- footer: {
- text: `Bard • Generative AI is experimental. Response may be factually wrong or completely made up.`
- }
- })
-
- return editOrReply(context, {
- embeds:[currentView],
- files,
- components
- })
- }
- }catch(e){
- if(e.response?.body?.message) return editOrReply(context, createEmbed("warning", context, e.response.body.message))
-
- console.log(e)
- return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
- }
- }
-};
\ No newline at end of file
diff --git a/commands/message/genai/gemini-pro.js b/commands/message/genai/gemini-pro.js
new file mode 100644
index 0000000..074849d
--- /dev/null
+++ b/commands/message/genai/gemini-pro.js
@@ -0,0 +1,70 @@
+const { gemini, LlmModelsGenerate } = require("../../../labscore/api/obelisk");
+const { createEmbed } = require("../../../labscore/utils/embed");
+const { editOrReply } = require("../../../labscore/utils/message");
+
+const { Permissions } = require("detritus-client/lib/constants");
+
+const { STATIC_ICONS } = require("../../../labscore/utils/statics");
+const { stringwrap, iconPill, smallIconPill } = require("../../../labscore/utils/markdown");
+const { hasFeature } = require("../../../labscore/utils/testing");
+module.exports = {
+ name: 'gemini-pro',
+ label: 'text',
+ aliases: ['gpro'],
+ metadata: {
+ description: `${iconPill("generative_ai", "LIMITED TESTING")}\n${smallIconPill("reply", "Supports Replies")}\n\nRun Gemini 1.0 Pro with a custom prompt.`,
+ description_short: 'Gemini-1.0-Pro',
+ examples: ['gem why do they call it oven when you of in the cold food of out hot eat the food'],
+ category: 'limited',
+ usage: 'gemini-pro '
+ },
+ permissionsClient: [Permissions.EMBED_LINKS, Permissions.SEND_MESSAGES, Permissions.USE_EXTERNAL_EMOJIS, Permissions.ATTACH_FILES, Permissions.READ_MESSAGE_HISTORY],
+ run: async (context, args) => {
+ if(!await hasFeature(context, "ai/gemini/text")) return;
+ context.triggerTyping();
+
+ if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
+
+ let input = args.text;
+
+ try{
+ await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_gemini))
+
+ let res = await LlmModelsGenerate(context, "gemini-1.5-pro", input, "BLOCK_NONE")
+
+ let description = []
+ let files = [];
+
+ if(res.response.body.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
+
+ let output = res.response.body.candidates[0]?.output
+ if(!output) return editOrReply(context, createEmbed("error", context, `Gemini returned an error. Try again later.`))
+
+ if(output.length <= 4000) description.push(output)
+ else {
+ files.push({
+ filename: `gemini.${Date.now().toString(36)}.txt`,
+ value: Buffer.from(output)
+ })
+ }
+
+ return editOrReply(context, {
+ embeds:[createEmbed("defaultNoFooter", context, {
+ author: {
+ name: stringwrap(input, 50, false),
+ iconUrl: STATIC_ICONS.ai_gemini
+ },
+ description: description.join('\n'),
+ footer: {
+ text: `Generative AI is experimental • Data submitted to Gemini may be used by Google for training.`
+ }
+ })],
+ files
+ })
+ } catch(e){
+ console.log(e)
+ if(e.response?.body?.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
+ return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
+ }
+ }
+};
\ No newline at end of file
diff --git a/commands/message/genai/gemini.js b/commands/message/genai/gemini.js
index 4a4e010..b8ebd36 100644
--- a/commands/message/genai/gemini.js
+++ b/commands/message/genai/gemini.js
@@ -1,69 +1,159 @@
-const { gemini, LlmModelsGenerate } = require("../../../labscore/api/obelisk");
-const { createEmbed } = require("../../../labscore/utils/embed");
-const { editOrReply } = require("../../../labscore/utils/message");
+const { createEmbed } = require('../../../labscore/utils/embed')
+const { editOrReply } = require('../../../labscore/utils/message')
-const { Permissions } = require("detritus-client/lib/constants");
+const { STATIC_ICONS } = require('../../../labscore/utils/statics');
+
+const superagent = require('superagent')
+const { iconPill, stringwrap } = require('../../../labscore/utils/markdown')
+
+const { Permissions, InteractionCallbackTypes } = require("detritus-client/lib/constants");
+const { Components } = require('detritus-client/lib/utils');
+const { LlmPrivateBard } = require('../../../labscore/api/obelisk');
+const { hasFeature } = require('../../../labscore/utils/testing');
-const { STATIC_ICONS } = require("../../../labscore/utils/statics");
-const { stringwrap, iconPill, smallIconPill } = require("../../../labscore/utils/markdown");
-const { hasFeature } = require("../../../labscore/utils/testing");
module.exports = {
name: 'gemini',
label: 'text',
- aliases: ['gem'],
+ aliases: ["bard","gem"],
metadata: {
- description: `${iconPill("generative_ai", "LIMITED TESTING")}\n${smallIconPill("reply", "Supports Replies")}\n\nRun Gemini Pro with a custom prompt.`,
- description_short: 'Gemini',
- examples: ['gem why do they call it oven when you of in the cold food of out hot eat the food'],
+ description: `${iconPill("generative_ai", "LIMITED TESTING")}\n\nChat with <:icoext_gemini:1240316089515249715> Gemini.`,
+ description_short: 'Chat with Gemini.',
+ examples: ['gemini How many otter species are there?'],
category: 'limited',
- usage: 'gemini '
+ usage: 'gemini '
},
- permissionsClient: [Permissions.EMBED_LINKS, Permissions.SEND_MESSAGES, Permissions.USE_EXTERNAL_EMOJIS, Permissions.ATTACH_FILES, Permissions.READ_MESSAGE_HISTORY],
+ args: [],
+ permissionsClient: [Permissions.EMBED_LINKS, Permissions.SEND_MESSAGES, Permissions.ATTACH_FILES, Permissions.USE_EXTERNAL_EMOJIS, Permissions.READ_MESSAGE_HISTORY],
run: async (context, args) => {
- if(!await hasFeature(context, "ai/gemini/text")) return;
- context.triggerTyping();
+ if(!await hasFeature(context, "ai/bard")) return;
+ context.triggerTyping();
if(!args.text) return editOrReply(context, createEmbed("warning", context, `Missing Parameter (text).`))
let input = args.text;
try{
- await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_gemini))
+ await editOrReply(context, createEmbed("ai_custom", context, STATIC_ICONS.ai_bard))
- let res = await LlmModelsGenerate(context, "gemini-1.5-pro", input, "BLOCK_NONE")
+ let res = await LlmPrivateBard(context, input)
+ res = res.response
let description = []
let files = [];
- if(res.response.body.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
+ if(!res.body.candidates) return editOrReply(context, createEmbed("error", context, `Bard returned an error. Try again later.`))
- let output = res.response.body.candidates[0]?.output
- if(!output) return editOrReply(context, createEmbed("error", context, `Gemini returned an error. Try again later.`))
-
- if(output.length <= 4000) description.push(output)
+ if(res.body.candidates[0].length <= 4000) description.push(res.body.candidates[0])
else {
files.push({
- filename: `gemini.${Date.now().toString(36)}.txt`,
- value: Buffer.from(output)
+ filename: `chat.${Date.now().toString(36)}.txt`,
+ value: Buffer.from(res.body.candidates[0])
})
}
- return editOrReply(context, {
+ if(!res.body.candidates || res.body.candidates?.length <= 1) return editOrReply(context, {
embeds:[createEmbed("defaultNoFooter", context, {
author: {
- name: stringwrap(input, 50, false),
- iconUrl: STATIC_ICONS.ai_gemini
+ name: stringwrap(args.text, 50, false),
+ iconUrl: STATIC_ICONS.ai_bard_idle
},
description: description.join('\n'),
footer: {
- text: `Generative AI is experimental • Data submitted to Gemini may be used by Google for training.`
+ text: `Gemini • Gemini may display inaccurate info, so double-check its responses.`
}
})],
files
})
- } catch(e){
+ // Draft support
+ else {
+
+ let currentView;
+
+ const components = new Components({
+ timeout: 100000,
+ run: async (ctx) => {
+ if (ctx.userId !== context.userId) return await ctx.respond(InteractionCallbackTypes.DEFERRED_UPDATE_MESSAGE);
+
+ // this sucks but works, ensures the newly selected option stays selected
+ for (let i = 0; i < components.components[0].components[0].options.length; i++) {
+ components.components[0].components[0].options[i].default = (components.components[0].components[0].options[i].value == ctx.data.values[0])
+ }
+
+ draft = res.body.candidates[parseInt(ctx.data.values[0].replace('draft-', ''))]
+
+ description = []
+ files = [];
+
+ if(draft.length <= 4000) description.push(draft)
+ else {
+ files.push({
+ filename: `chat.${Date.now().toString(36)}.txt`,
+ value: Buffer.from(draft)
+ })
+ }
+
+ currentView = createEmbed("defaultNoFooter", context, {
+ author: {
+ name: stringwrap(args.text, 50, false),
+ iconUrl: STATIC_ICONS.ai_bard_idle
+ },
+ description: description.join('\n'),
+ footer: {
+ text: `Bard • Generative AI is experimental. Response may be factually wrong or completely made up.`
+ }
+ })
+
+ await ctx.editOrRespond({
+ embeds:[currentView],
+ files,
+ components
+ })
+ }
+ })
+
+ let draftOptions = [];
+ for (let i = 0; i < res.body.candidates.length; i++) {
+ draftOptions.push({
+ label: `Draft ${i + 1}: ${stringwrap(res.body.candidates[i], 50, false)}`,
+ value: "draft-" + (i),
+ default: false
+ })
+ }
+
+ components.addSelectMenu({
+ placeholder: "View other drafts",
+ customId: "bard-drafts",
+ options: draftOptions
+ })
+
+ setTimeout(()=>{
+ editOrReply(context, {
+ embeds:[currentView],
+ components:[]
+ })
+ }, 100000)
+
+ currentView = createEmbed("defaultNoFooter", context, {
+ author: {
+ name: stringwrap(args.text, 50, false),
+ iconUrl: STATIC_ICONS.ai_bard_idle
+ },
+ description: description.join('\n'),
+ footer: {
+ text: `Bard • Generative AI is experimental. Response may be factually wrong or completely made up.`
+ }
+ })
+
+ return editOrReply(context, {
+ embeds:[currentView],
+ files,
+ components
+ })
+ }
+ }catch(e){
+ if(e.response?.body?.message) return editOrReply(context, createEmbed("warning", context, e.response.body.message))
+
console.log(e)
- if(e.response?.body?.message) return editOrReply(context, createEmbed("error", context, e.response.body.message))
return editOrReply(context, createEmbed("error", context, `Unable to generate response.`))
}
}
diff --git a/labscore/utils/statics.js b/labscore/utils/statics.js
index 2bf1e73..729e211 100644
--- a/labscore/utils/statics.js
+++ b/labscore/utils/statics.js
@@ -141,12 +141,12 @@ const Statics = Object.freeze({
revision: 0
},
ai_bard: {
- file: "icons/core/ico_bard_loading.gif",
- revision: 0
+ file: "_gemini/gspark_processing.zk25zt28.gif",
+ revision: 1
},
ai_bard_idle: {
- file: "icons/core/ico_bard_idle.gif",
- revision: 0
+ file: "_gemini/gspark_idle.10dm7yti7.gif",
+ revision: 1
},
ai_clyde: {
file: "brands/_clyde/clyde_generating.gif",