Merge branch 'main' of https://gitlab.com/bignutty/labscore into main

This commit is contained in:
derpystuff 2023-12-19 19:25:29 +01:00
commit 4798e60585
14 changed files with 255 additions and 72 deletions

View file

@ -54,6 +54,7 @@ const Api = Object.freeze({
UTILS_GARFIELD: '/utils/garfield',
UTILS_INFERKIT: '/utils/inferkit',
UTILS_MAPKIT: '/utils/mapkit',
UTILS_OTTER: '/utils/otter',
UTILS_PERSPECTIVE: '/utils/perspective',
UTILS_SCREENSHOT: '/utils/screenshot',
UTILS_TEXTGENERATOR: '/utils/text-generator',

View file

@ -304,7 +304,7 @@ module.exports.emojipedia = async function(context, emoji){
})
}
module.exports.garfield = async function(context, emoji){
module.exports.garfield = async function(context,){
return await request(Api.UTILS_GARFIELD, "GET", {}, {})
}
@ -314,6 +314,10 @@ module.exports.inferkit = async function(context, input){
})
}
module.exports.otter = async function(context){
return await request(Api.UTILS_OTTER, "GET", {}, {})
}
module.exports.perspective = async function(context, content = []){
return await request(Api.UTILS_PERSPECTIVE, "GET", {}, {
input: content.join('\n\n')

View file

@ -6,6 +6,14 @@ const ObeliskHosts = Object.freeze({
const ObeliskApi = Object.freeze({
HOST: ObeliskHosts.prod,
GOOGLE_BARD: "/parrot/v1/google:bard",
GOOGLE_GEMINI_PRO: "/parrot/v1/google:gemini",
GOOGLE_GEMINI_PRO_VISION: "/parrot/v1/google:geminiVision",
GOOGLE_PALM2: "/parrot/v1/google:palm2",
OPENAI_CHATGPT: "/parrot/v1/openai:chatgpt",
OPENAI_GPT4: "/parrot/v1/openai:gpt4",
SUMMARIZE_WEBPAGES: "/flamingo/v1/web:summarize"
})

View file

@ -40,6 +40,48 @@ async function request(path, type, headers, args, host) {
throw new Error("unsupported, must either use GET or POST");
}
// GENERATIVEAI
module.exports.bard = async function(context, input){
return await request(ObeliskApi.GOOGLE_BARD, "POST", {}, {
input
})
}
module.exports.gemini = async function(context, prompt){
return await request(ObeliskApi.GOOGLE_GEMINI_PRO, "POST", {}, {
prompt
})
}
module.exports.geminiVision = async function(context, input, url){
return await request(ObeliskApi.GOOGLE_GEMINI_PRO_VISION, "POST", {}, {
input,
url
})
}
module.exports.palm2 = async function(context, prompt, input){
return await request(ObeliskApi.GOOGLE_PALM2, "POST", {}, {
prompt,
input
})
}
module.exports.chatgpt = async function(context, prompt, input){
return await request(ObeliskApi.OPENAI_CHATGPT, "POST", {}, {
prompt,
input
})
}
module.exports.gpt4 = async function(context, prompt, input){
return await request(ObeliskApi.OPENAI_GPT4, "POST", {}, {
prompt,
input
})
}
// FLAMINGO
module.exports.summarizeWebpage = async function(context, url){
return await request(ObeliskApi.SUMMARIZE_WEBPAGES, "POST", {}, {
url

View file

@ -30,7 +30,7 @@ module.exports.timestamp = function(time, flag = "t"){
}
module.exports.stringwrap = function(content = "", length, newlines = true){
if(!newlines) content = content.replace(/\n/, '')
if(!newlines) content = content.replace(/\n/, ' ')
if(content.length > length) return content.substr(0, length) + '...';
return content;
}

View file

@ -152,13 +152,17 @@ const Statics = Object.freeze({
file: "brands/_clyde/clyde.png",
revision: 0
},
ai_gemini: {
file: "icons/aiv2/gemini_spark.png",
revision: 0
},
ai_palm_idle: {
file: "icons/core/ico_notice_palm_idle.png",
revision: 0
},
ai_summary: {
file: "icons/core/ico_notice_summary.png",
revision: 0
file: "icons/flamingo/web_summary.png",
revision: 1
},
warning: {
file: "icons/core/ico_notice_warning.png",
@ -209,6 +213,7 @@ module.exports.STATIC_ICONS = Object.freeze({
ai_bard_idle: staticAsset(Statics.icons.ai_bard_idle),
ai_clyde: staticAsset(Statics.icons.ai_clyde),
ai_clyde_idle: staticAsset(Statics.icons.ai_clyde_idle),
ai_gemini: staticAsset(Statics.icons.ai_gemini),
ai_palm_idle: staticAsset(Statics.icons.ai_palm_idle),
ai_summary: staticAsset(Statics.icons.ai_summary),
warning: staticAsset(Statics.icons.warning)