1
0
Fork 0
mirror of https://codeberg.org/ashley/poke.git synced 2024-11-17 02:34:38 -05:00

inital source for january

This commit is contained in:
Ashley 2023-12-10 10:27:07 +00:00
parent ff13d57563
commit 7f132c1517
4 changed files with 158 additions and 0 deletions

View file

@ -0,0 +1,98 @@
use std::time::Duration;
use actix_web::{
web::{self, Query},
Responder,
};
use regex::Regex;
use serde::Deserialize;
use crate::structs::metadata::Metadata;
use crate::structs::{embed::Embed, media::Video};
use crate::util::request::fetch;
use crate::{
structs::media::{Image, ImageSize},
util::{request::consume_size, result::Error},
};
lazy_static! {
static ref CACHE: moka::future::Cache<String, Result<Embed, Error>> =
moka::future::Cache::builder()
.max_capacity(1_000)
.time_to_live(Duration::from_secs(60))
.build();
}
#[derive(Deserialize)]
pub struct Parameters {
url: String,
}
async fn embed(mut url: String) -> Result<Embed, Error> {
// Twitter is a piece of shit and does not
// provide metadata in an easily consumable format.
//
// So... we just redirect everything to Nitter.
//
// Fun bonus: Twitter denied our developer application
// which would've been the only way to pull properly
// formatted Tweet data out and what's worse is that this
// also prevents us adding those "connections" that other
// platforms have.
//
// In any case, because Twitter, they
// do not provide OpenGraph data.
lazy_static! {
static ref RE_TWITTER: Regex =
Regex::new("^(?:https?://)?(?:www\\.)?twitter\\.com").unwrap();
}
if RE_TWITTER.is_match(&url) {
url = RE_TWITTER.replace(&url, "https://nitter.net").into();
}
// Fetch URL
let (resp, mime) = fetch(&url).await?;
// Match appropriate MIME type to process
match (mime.type_(), mime.subtype()) {
(_, mime::HTML) => {
let mut metadata = Metadata::from(resp, url).await?;
metadata.resolve_external().await;
if metadata.is_none() {
return Ok(Embed::None);
}
Ok(Embed::Website(metadata))
}
(mime::IMAGE, _) => {
if let Ok((width, height)) = consume_size(resp, mime).await {
Ok(Embed::Image(Image {
url,
width,
height,
size: ImageSize::Large,
}))
} else {
Ok(Embed::None)
}
}
(mime::VIDEO, _) => {
if let Ok((width, height)) = consume_size(resp, mime).await {
Ok(Embed::Video(Video { url, width, height }))
} else {
Ok(Embed::None)
}
}
_ => Ok(Embed::None),
}
}
pub async fn get(Query(info): Query<Parameters>) -> Result<impl Responder, Error> {
let url = info.url;
let result = CACHE
.get_with(url.clone(), async { embed(url).await })
.await;
result.map(web::Json)
}

View file

@ -0,0 +1,14 @@
use actix_web::web;
use actix_web::Responder;
use serde::Serialize;
#[derive(Debug, Serialize)]
pub struct Info {
january: &'static str,
}
pub async fn get() -> impl Responder {
web::Json(Info {
january: env!("CARGO_PKG_VERSION"),
})
}

View file

@ -0,0 +1,3 @@
pub mod embed;
pub mod info;
pub mod proxy;

View file

@ -0,0 +1,43 @@
use std::time::Duration;
use actix_web::web::Bytes;
use actix_web::{web::Query, HttpResponse, Responder};
use serde::Deserialize;
use crate::util::request::{fetch, get_bytes};
use crate::util::result::Error;
lazy_static! {
static ref CACHE: moka::future::Cache<String, Result<Bytes, Error>> =
moka::future::Cache::builder()
.weigher(|_key, value: &Result<Bytes, Error>| {
value.as_ref().map(|bytes| bytes.len() as u32).unwrap_or(1)
})
.max_capacity(1024 * 1024 * 1024)
.time_to_live(Duration::from_secs(60))
.build();
}
#[derive(Deserialize)]
pub struct Parameters {
url: String,
}
async fn proxy(url: String) -> Result<Bytes, Error> {
let (mut resp, mime) = fetch(&url).await?;
if matches!(mime.type_(), mime::IMAGE | mime::VIDEO) {
let bytes = get_bytes(&mut resp).await?;
Ok(bytes)
} else {
Err(Error::NotAllowedToProxy)
}
}
pub async fn get(Query(info): Query<Parameters>) -> Result<impl Responder, Error> {
let url = info.url;
let result = CACHE
.get_with(url.clone(), async { proxy(url).await })
.await;
result.map(|b| HttpResponse::Ok().body(b))
}