mirror of
https://gitlab.com/bignutty/labscore.git
synced 2025-06-08 14:13:02 -04:00
remove ugly ai loading embed
This commit is contained in:
parent
3aeb9bfdc7
commit
f11cb26560
7 changed files with 5 additions and 29 deletions
|
@ -26,9 +26,7 @@ module.exports = {
|
|||
try{
|
||||
let s = Date.now()
|
||||
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
|
||||
await context.editOrRespond({
|
||||
embeds: [createEmbed("loading_ai", context)]
|
||||
})
|
||||
|
||||
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
|
||||
.query({
|
||||
model: MODEL,
|
||||
|
|
|
@ -25,9 +25,7 @@ module.exports = {
|
|||
try{
|
||||
let s = Date.now()
|
||||
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
|
||||
await context.editOrRespond({
|
||||
embeds: [createEmbed("loading_ai", context)]
|
||||
})
|
||||
|
||||
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
|
||||
.query({
|
||||
model: MODEL,
|
||||
|
|
|
@ -25,9 +25,7 @@ module.exports = {
|
|||
try{
|
||||
let s = Date.now()
|
||||
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
|
||||
await context.editOrRespond({
|
||||
embeds: [createEmbed("loading_ai", context)]
|
||||
})
|
||||
|
||||
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
|
||||
.query({
|
||||
model: MODEL,
|
||||
|
|
|
@ -25,9 +25,7 @@ module.exports = {
|
|||
try{
|
||||
let s = Date.now()
|
||||
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
|
||||
await context.editOrRespond({
|
||||
embeds: [createEmbed("loading_ai", context)]
|
||||
})
|
||||
|
||||
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
|
||||
.query({
|
||||
model: MODEL,
|
||||
|
|
|
@ -25,9 +25,7 @@ module.exports = {
|
|||
try{
|
||||
let s = Date.now()
|
||||
await context.respond({data: {}, type: InteractionCallbackTypes.DEFERRED_CHANNEL_MESSAGE_WITH_SOURCE})
|
||||
await context.editOrRespond({
|
||||
embeds: [createEmbed("loading_ai", context)]
|
||||
})
|
||||
|
||||
let res = await superagent.get(`${process.env.AI_SERVER}/gpt`)
|
||||
.query({
|
||||
model: MODEL,
|
||||
|
|
|
@ -69,15 +69,6 @@ const embedTypes = Object.freeze({
|
|||
},
|
||||
color: COLORS.embed
|
||||
}
|
||||
},
|
||||
"loading_ai": (context) => {
|
||||
return {
|
||||
author: {
|
||||
iconUrl: STATIC_ICONS.loading_ai,
|
||||
name: `Generating...`
|
||||
},
|
||||
color: COLORS.brand
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
|
|
|
@ -82,10 +82,6 @@ const Statics = Object.freeze({
|
|||
file: "icons/core/ico_notice_loading.gif",
|
||||
revision: 0
|
||||
},
|
||||
loading_ai: {
|
||||
file: "icons/ai/ico_ai_generating.gif",
|
||||
revision: 0
|
||||
},
|
||||
warning: {
|
||||
file: "icons/core/ico_notice_warning.png",
|
||||
revision: 0
|
||||
|
@ -124,6 +120,5 @@ module.exports.STATIC_ICONS = Object.freeze({
|
|||
adult: staticAsset(Statics.icons.adult),
|
||||
error: staticAsset(Statics.icons.error),
|
||||
loading: staticAsset(Statics.icons.loading),
|
||||
loading_ai: staticAsset(Statics.icons.loading_ai),
|
||||
warning: staticAsset(Statics.icons.warning)
|
||||
})
|
Loading…
Add table
Add a link
Reference in a new issue