From 4733de1c5d46ef7f07ce9a4ff9004cf3a1e1382e Mon Sep 17 00:00:00 2001 From: derpystuff <3515180-derpystuff@users.noreply.gitlab.com> Date: Fri, 21 Apr 2023 21:14:38 +0200 Subject: [PATCH] add stablelm --- commands/interaction/slash/gpt.js | 1 + .../interaction/subcommands/gpt/alpaca.js | 2 +- .../interaction/subcommands/gpt/stablelm.js | 52 +++++++++++++++++++ labscore/constants.js | 6 ++- 4 files changed, 58 insertions(+), 3 deletions(-) create mode 100644 commands/interaction/subcommands/gpt/stablelm.js diff --git a/commands/interaction/slash/gpt.js b/commands/interaction/slash/gpt.js index bf98fdd..c6e3fd0 100644 --- a/commands/interaction/slash/gpt.js +++ b/commands/interaction/slash/gpt.js @@ -3,6 +3,7 @@ module.exports = { name: 'gpt', options: [ require('../subcommands/gpt/chatgpt'), + require('../subcommands/gpt/stablelm'), require('../subcommands/gpt/davinci3'), require('../subcommands/gpt/claude'), require('../subcommands/gpt/claude-instant'), diff --git a/commands/interaction/subcommands/gpt/alpaca.js b/commands/interaction/subcommands/gpt/alpaca.js index 3abd7d4..b8a0483 100644 --- a/commands/interaction/subcommands/gpt/alpaca.js +++ b/commands/interaction/subcommands/gpt/alpaca.js @@ -22,7 +22,7 @@ module.exports = { } ], run: async (context, args) => { - const MODEL = "replicate:alpaca-7b" + const MODEL = "replicate:replicate/alpaca-7b" try{ let s = Date.now() await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE}) diff --git a/commands/interaction/subcommands/gpt/stablelm.js b/commands/interaction/subcommands/gpt/stablelm.js new file mode 100644 index 0000000..96212c1 --- /dev/null +++ b/commands/interaction/subcommands/gpt/stablelm.js @@ -0,0 +1,52 @@ +const { Constants } = require('detritus-client'); +const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants; + +const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants"); +const superagent = require('superagent') + +const { createEmbed } = require('../../../../labscore/utils/embed'); +const { codeblock } = require('../../../../labscore/utils/markdown'); +const { format } = require('../../../../labscore/utils/ansi'); + +module.exports = { + description: 'StableLM (Stability AI stablelm-tuned-alpha-7b)', + name: 'stablelm', + type: ApplicationCommandOptionTypes.SUB_COMMAND, + options: [ + { + name: 'prompt', + description: 'Prompt', + type: ApplicationCommandOptionTypes.STRING, + required: true, + maxLength: 256 + } + ], + run: async (context, args) => { + const MODEL = "replicate:stability-ai/stablelm-tuned-alpha-7b" + try{ + let s = Date.now() + await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE}) + + let res = await superagent.get(`${process.env.AI_SERVER}/gpt`) + .query({ + model: MODEL, + prompt: args.prompt + }) + + await context.editOrRespond({ + embeds: [createEmbed("default", context, { + footer: { + iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`, + text: `${AI_GPT_MODEL_CONFIG[MODEL].name} • ${context.application.name}`, + }, + description: codeblock("ansi", [res.body.response.substr(0, 1024).replace(/\\n/g,'\n')]) + })] + }) + }catch(e){ + console.log(e) + await context.editOrRespond({ + embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")] + }) + } + }, +}; \ No newline at end of file diff --git a/labscore/constants.js b/labscore/constants.js index 364ff41..684c163 100644 --- a/labscore/constants.js +++ b/labscore/constants.js @@ -301,7 +301,8 @@ module.exports.MICROSOFT_VOICE_CONFIG = { module.exports.AI_GPT_MODEL_CONFIG = { "anthropic:claude-instant-v1": { name: "Anthropic claude-instant-v1", icon: "anthropic" }, "anthropic:claude-v1": { name: "Anthropic claude-v1", icon: "anthropic" }, - "replicate:alpaca-7b": { name: "Replicate alpaca-7b", icon: "replicate" }, + "replicate:replicate/alpaca-7b": { name: "Replicate alpaca-7b", icon: "replicate" }, + "replicate:stability-ai/stablelm-tuned-alpha-7b": { name: "Stability AI stablelm-tuned-alpha-7b", icon: "stability" }, "huggingface:bigscience/bloomz": { name: "HuggingFace bigscience/bloomz", icon: "huggingface" }, "huggingface:google/flan-t5-xxl": { name: "HuggingFace google/flan-t5-xxl", icon: "huggingface" }, "huggingface:google/flan-ul2": { name: "HuggingFace google/flan-ul2", icon: "huggingface" }, @@ -319,7 +320,8 @@ module.exports.AI_GPT_MODELS = [ { name: "OpenAI gpt-3.5-turbo (ChatGPT)", value: "openai:gpt-3.5-turbo" }, { name: "Anthropic claude-instant-v1", value: "anthropic:claude-instant-v1" }, { name: "Anthropic claude-v1", value: "anthropic:claude-v1" }, - { name: "Replicate alpaca-7b", value: "replicate:alpaca-7b" }, + { name: "Replicate alpaca-7b", value: "replicate:replicate/alpaca-7b" }, + { name: "Replicate stablelm-tuned-alpha-7b", value: "stability-ai/stablelm-tuned-alpha-7b" }, { name: "HuggingFace bigscience/bloomz", value: "huggingface:bigscience/bloomz" }, { name: "HuggingFace google/flan-t5-xxl", value: "huggingface:google/flan-t5-xxl" }, { name: "HuggingFace google/flan-ul2", value: "huggingface:google/flan-ul2" },