vercel wins

This commit is contained in:
derpystuff 2023-04-23 21:09:27 +02:00
parent 27bc88ea15
commit e0aadaa2cf
7 changed files with 0 additions and 320 deletions

View file

@ -1,12 +0,0 @@
module.exports = {
description: 'Generate text via Large Language Models',
name: 'gpt',
options: [
require('../subcommands/gpt/chatgpt'),
require('../subcommands/gpt/stablelm'),
require('../subcommands/gpt/davinci3'),
//require('../subcommands/gpt/claude'),
//require('../subcommands/gpt/claude-instant'),
require('../subcommands/gpt/alpaca')
]
};

View file

@ -1,52 +0,0 @@
const { Constants } = require('detritus-client');
const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants;
const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants");
const superagent = require('superagent')
const { createEmbed } = require('../../../../labscore/utils/embed');
const { codeblock } = require('../../../../labscore/utils/markdown');
const { format } = require('../../../../labscore/utils/ansi');
module.exports = {
description: 'Alpaca-7b (Replicate alpaca-7b)',
name: 'alpaca',
type: ApplicationCommandOptionTypes.SUB_COMMAND,
options: [
{
name: 'prompt',
description: 'Prompt',
type: ApplicationCommandOptionTypes.STRING,
required: true,
maxLength: 256
}
],
run: async (context, args) => {
const MODEL = "replicate:replicate/alpaca-7b"
try{
let s = Date.now()
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
.query({
model: MODEL,
prompt: args.prompt
})
await context.editOrRespond({
embeds: [createEmbed("default", context, {
footer: {
iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`,
text: `${AI_GPT_MODEL_CONFIG[MODEL].name}${context.application.name}`,
},
description: codeblock("ansi", [format(args.prompt, "cyan") + res.body.response.substr(0, 1024).replace(/\\n/g,'\n')])
})]
})
}catch(e){
console.log(e)
await context.editOrRespond({
embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")]
})
}
},
};

View file

@ -1,51 +0,0 @@
const { Constants } = require('detritus-client');
const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants;
const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants");
const superagent = require('superagent')
const { createEmbed } = require('../../../../labscore/utils/embed');
const { codeblock } = require('../../../../labscore/utils/markdown');
module.exports = {
description: 'ChatGPT (OpenAI gpt-3.5-turbo)',
name: 'chatgpt',
type: ApplicationCommandOptionTypes.SUB_COMMAND,
options: [
{
name: 'prompt',
description: 'Prompt',
type: ApplicationCommandOptionTypes.STRING,
required: true,
maxLength: 256
}
],
run: async (context, args) => {
const MODEL = "openai:gpt-3.5-turbo"
try{
let s = Date.now()
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
.query({
model: MODEL,
prompt: args.prompt
})
await context.editOrRespond({
embeds: [createEmbed("default", context, {
footer: {
iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`,
text: `${AI_GPT_MODEL_CONFIG[MODEL].name}${context.application.name}`,
},
description: codeblock("ansi", [res.body.response.substr(0, 1024).replace(/\\n/g,'\n')])
})]
})
}catch(e){
console.log(e)
await context.editOrRespond({
embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")]
})
}
},
};

View file

@ -1,51 +0,0 @@
const { Constants } = require('detritus-client');
const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants;
const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants");
const superagent = require('superagent')
const { createEmbed } = require('../../../../labscore/utils/embed');
const { codeblock } = require('../../../../labscore/utils/markdown');
module.exports = {
description: 'Claude Instant (Anthropic claude-instant-v1)',
name: 'claude-instant',
type: ApplicationCommandOptionTypes.SUB_COMMAND,
options: [
{
name: 'prompt',
description: 'Prompt',
type: ApplicationCommandOptionTypes.STRING,
required: true,
maxLength: 256
}
],
run: async (context, args) => {
const MODEL = "anthropic:claude-instant-v1"
try{
let s = Date.now()
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
.query({
model: MODEL,
prompt: args.prompt
})
await context.editOrRespond({
embeds: [createEmbed("default", context, {
footer: {
iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`,
text: `${AI_GPT_MODEL_CONFIG[MODEL].name}${context.application.name}`,
},
description: codeblock("ansi", [res.body.response.substr(0, 1024).replace(/\\n/g,'\n')])
})]
})
}catch(e){
console.log(e)
await context.editOrRespond({
embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")]
})
}
},
};

View file

@ -1,51 +0,0 @@
const { Constants } = require('detritus-client');
const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants;
const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants");
const superagent = require('superagent')
const { createEmbed } = require('../../../../labscore/utils/embed');
const { codeblock } = require('../../../../labscore/utils/markdown');
module.exports = {
description: 'Claude (Anthropic claude-v1)',
name: 'claude',
type: ApplicationCommandOptionTypes.SUB_COMMAND,
options: [
{
name: 'prompt',
description: 'Prompt',
type: ApplicationCommandOptionTypes.STRING,
required: true,
maxLength: 256
}
],
run: async (context, args) => {
const MODEL = "anthropic:claude-v1"
try{
let s = Date.now()
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
.query({
model: MODEL,
prompt: args.prompt
})
await context.editOrRespond({
embeds: [createEmbed("default", context, {
footer: {
iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`,
text: `${AI_GPT_MODEL_CONFIG[MODEL].name}${context.application.name}`,
},
description: codeblock("ansi", [res.body.response.substr(0, 1024).replace(/\\n/g,'\n')])
})]
})
}catch(e){
console.log(e)
await context.editOrRespond({
embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")]
})
}
},
};

View file

@ -1,51 +0,0 @@
const { Constants } = require('detritus-client');
const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants;
const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants");
const superagent = require('superagent')
const { createEmbed } = require('../../../../labscore/utils/embed');
const { codeblock } = require('../../../../labscore/utils/markdown');
module.exports = {
description: 'GPT-3 (OpenAI text-davinci-003)',
name: 'gpt3',
type: ApplicationCommandOptionTypes.SUB_COMMAND,
options: [
{
name: 'prompt',
description: 'Prompt',
type: ApplicationCommandOptionTypes.STRING,
required: true,
maxLength: 256
}
],
run: async (context, args) => {
const MODEL = "openai: text-davinci-003"
try{
let s = Date.now()
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
.query({
model: MODEL,
prompt: args.prompt
})
await context.editOrRespond({
embeds: [createEmbed("default", context, {
footer: {
iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`,
text: `${AI_GPT_MODEL_CONFIG[MODEL].name}${context.application.name}`,
},
description: codeblock("ansi", [res.body.response.substr(0, 1024).replace(/\\n/g,'\n')])
})]
})
}catch(e){
console.log(e)
await context.editOrRespond({
embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")]
})
}
},
};

View file

@ -1,52 +0,0 @@
const { Constants } = require('detritus-client');
const { InteractionCallbackTypes, ApplicationCommandOptionTypes } = Constants;
const { AI_GPT_MODEL_CONFIG } = require("../../../../labscore/constants");
const superagent = require('superagent')
const { createEmbed } = require('../../../../labscore/utils/embed');
const { codeblock } = require('../../../../labscore/utils/markdown');
const { format } = require('../../../../labscore/utils/ansi');
module.exports = {
description: 'StableLM (Stability AI stablelm-tuned-alpha-7b)',
name: 'stablelm',
type: ApplicationCommandOptionTypes.SUB_COMMAND,
options: [
{
name: 'prompt',
description: 'Prompt',
type: ApplicationCommandOptionTypes.STRING,
required: true,
maxLength: 256
}
],
run: async (context, args) => {
const MODEL = "replicate:stability-ai/stablelm-tuned-alpha-7b"
try{
let s = Date.now()
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
.query({
model: MODEL,
prompt: args.prompt
})
await context.editOrRespond({
embeds: [createEmbed("default", context, {
footer: {
iconUrl: `https://derpystuff.gitlab.io/webstorage4/v2/assets/icons/ai/ico_ai_${AI_GPT_MODEL_CONFIG[MODEL].icon}.png`,
text: `${AI_GPT_MODEL_CONFIG[MODEL].name}${context.application.name}`,
},
description: codeblock("ansi", [res.body.response.substr(0, 1024).replace(/\\n/g,'\n')])
})]
})
}catch(e){
console.log(e)
await context.editOrRespond({
embeds: [createEmbed("error", context, "Unable to generate response. Try again in a bit.")]
})
}
},
};