mirror of
https://github.com/girlbossceo/conduwuit.git
synced 2025-03-14 18:55:37 +00:00
improvements on blurhashing feature
Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
parent
62180897c0
commit
442bb9889c
7 changed files with 87 additions and 81 deletions
|
@ -194,8 +194,10 @@ features = [
|
|||
version = "0.2.3"
|
||||
default-features = false
|
||||
features = [
|
||||
"fast-linear-to-srgb","image"
|
||||
"fast-linear-to-srgb",
|
||||
"image",
|
||||
]
|
||||
|
||||
# logging
|
||||
[workspace.dependencies.log]
|
||||
version = "0.4.22"
|
||||
|
|
|
@ -17,7 +17,6 @@ crate-type = [
|
|||
]
|
||||
|
||||
[features]
|
||||
blurhashing=[]
|
||||
element_hacks = []
|
||||
release_max_log_level = [
|
||||
"tracing/max_level_trace",
|
||||
|
|
|
@ -57,40 +57,28 @@ pub(crate) async fn create_content_route(
|
|||
let filename = body.filename.as_deref();
|
||||
let content_type = body.content_type.as_deref();
|
||||
let content_disposition = make_content_disposition(None, content_type, filename);
|
||||
let mxc = Mxc {
|
||||
let ref mxc = Mxc {
|
||||
server_name: services.globals.server_name(),
|
||||
media_id: &utils::random_string(MXC_LENGTH),
|
||||
};
|
||||
|
||||
#[cfg(feature = "blurhashing")]
|
||||
{
|
||||
if body.generate_blurhash {
|
||||
let (blurhash, create_media_result) = tokio::join!(
|
||||
services
|
||||
.media
|
||||
.create_blurhash(&body.file, content_type, filename),
|
||||
services.media.create(
|
||||
&mxc,
|
||||
Some(user),
|
||||
Some(&content_disposition),
|
||||
content_type,
|
||||
&body.file
|
||||
)
|
||||
);
|
||||
return create_media_result.map(|()| create_content::v3::Response {
|
||||
content_uri: mxc.to_string().into(),
|
||||
blurhash,
|
||||
});
|
||||
}
|
||||
}
|
||||
services
|
||||
.media
|
||||
.create(&mxc, Some(user), Some(&content_disposition), content_type, &body.file)
|
||||
.await
|
||||
.map(|()| create_content::v3::Response {
|
||||
content_uri: mxc.to_string().into(),
|
||||
blurhash: None,
|
||||
})
|
||||
.create(mxc, Some(user), Some(&content_disposition), content_type, &body.file)
|
||||
.await?;
|
||||
|
||||
let blurhash = body.generate_blurhash.then(|| {
|
||||
services
|
||||
.media
|
||||
.create_blurhash(&body.file, content_type, filename)
|
||||
.ok()
|
||||
.flatten()
|
||||
});
|
||||
|
||||
Ok(create_content::v3::Response {
|
||||
content_uri: mxc.to_string().into(),
|
||||
blurhash: blurhash.flatten(),
|
||||
})
|
||||
}
|
||||
|
||||
/// # `GET /_matrix/client/v1/media/thumbnail/{serverName}/{mediaId}`
|
||||
|
|
|
@ -54,7 +54,6 @@ sentry_telemetry = []
|
|||
conduwuit_mods = [
|
||||
"dep:libloading"
|
||||
]
|
||||
blurhashing = []
|
||||
|
||||
[dependencies]
|
||||
argon2.workspace = true
|
||||
|
|
|
@ -49,6 +49,9 @@ default = [
|
|||
"zstd_compression",
|
||||
]
|
||||
|
||||
blurhashing = [
|
||||
"conduwuit-service/blurhashing",
|
||||
]
|
||||
brotli_compression = [
|
||||
"conduwuit-api/brotli_compression",
|
||||
"conduwuit-core/brotli_compression",
|
||||
|
@ -101,7 +104,6 @@ perf_measurements = [
|
|||
"conduwuit-core/perf_measurements",
|
||||
"conduwuit-core/sentry_telemetry",
|
||||
]
|
||||
blurhashing =["conduwuit-service/blurhashing","conduwuit-core/blurhashing","conduwuit-api/blurhashing"]
|
||||
# increases performance, reduces build times, and reduces binary size by not compiling or
|
||||
# genreating code for log level filters that users will generally not use (debug and trace)
|
||||
release_max_log_level = [
|
||||
|
|
|
@ -1,56 +1,58 @@
|
|||
use std::{fmt::Display, io::Cursor, path::Path};
|
||||
use std::{error::Error, ffi::OsStr, fmt::Display, io::Cursor, path::Path};
|
||||
|
||||
use blurhash::encode_image;
|
||||
use conduwuit::{config::BlurhashConfig as CoreBlurhashConfig, debug_error, implement, trace};
|
||||
use conduwuit::{config::BlurhashConfig as CoreBlurhashConfig, err, implement, Result};
|
||||
use image::{DynamicImage, ImageDecoder, ImageError, ImageFormat, ImageReader};
|
||||
|
||||
use super::Service;
|
||||
#[implement(Service)]
|
||||
pub async fn create_blurhash(
|
||||
pub fn create_blurhash(
|
||||
&self,
|
||||
file: &[u8],
|
||||
content_type: Option<&str>,
|
||||
file_name: Option<&str>,
|
||||
) -> Option<String> {
|
||||
) -> Result<Option<String>> {
|
||||
if !cfg!(feature = "blurhashing") {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let config = BlurhashConfig::from(self.services.server.config.blurhashing);
|
||||
|
||||
// since 0 means disabled blurhashing, skipped blurhashing
|
||||
if config.size_limit == 0 {
|
||||
trace!("since 0 means disabled blurhashing, skipped blurhashing logic");
|
||||
return None;
|
||||
return Ok(None);
|
||||
}
|
||||
let file_data = file.to_owned();
|
||||
let content_type = content_type.map(String::from);
|
||||
let file_name = file_name.map(String::from);
|
||||
|
||||
let blurhashing_result = tokio::task::spawn_blocking(move || {
|
||||
get_blurhash_from_request(&file_data, content_type, file_name, config)
|
||||
})
|
||||
.await
|
||||
.expect("no join error");
|
||||
|
||||
match blurhashing_result {
|
||||
| Ok(result) => Some(result),
|
||||
| Err(e) => {
|
||||
debug_error!("Error when blurhashing: {e}");
|
||||
None
|
||||
},
|
||||
}
|
||||
get_blurhash_from_request(file, content_type, file_name, config)
|
||||
.map_err(|e| err!(debug_error!("blurhashing error: {e}")))
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
/// Returns the blurhash or a blurhash error which implements Display.
|
||||
#[tracing::instrument(
|
||||
name = "blurhash",
|
||||
level = "debug",
|
||||
skip(data),
|
||||
fields(
|
||||
bytes = data.len(),
|
||||
),
|
||||
)]
|
||||
fn get_blurhash_from_request(
|
||||
data: &[u8],
|
||||
mime: Option<String>,
|
||||
filename: Option<String>,
|
||||
mime: Option<&str>,
|
||||
filename: Option<&str>,
|
||||
config: BlurhashConfig,
|
||||
) -> Result<String, BlurhashingError> {
|
||||
// Get format image is supposed to be in
|
||||
let format = get_format_from_data_mime_and_filename(data, mime, filename)?;
|
||||
|
||||
// Get the image reader for said image format
|
||||
let decoder = get_image_decoder_with_format_and_data(format, data)?;
|
||||
|
||||
// Check image size makes sense before unpacking whole image
|
||||
if is_image_above_size_limit(&decoder, config) {
|
||||
return Err(BlurhashingError::ImageTooLarge);
|
||||
}
|
||||
|
||||
// decode the image finally
|
||||
let image = DynamicImage::from_decoder(decoder)?;
|
||||
|
||||
|
@ -64,24 +66,17 @@ fn get_blurhash_from_request(
|
|||
/// different file format than file.
|
||||
fn get_format_from_data_mime_and_filename(
|
||||
data: &[u8],
|
||||
mime: Option<String>,
|
||||
filename: Option<String>,
|
||||
mime: Option<&str>,
|
||||
filename: Option<&str>,
|
||||
) -> Result<ImageFormat, BlurhashingError> {
|
||||
let mut image_format = None;
|
||||
if let Some(mime) = mime {
|
||||
image_format = ImageFormat::from_mime_type(mime);
|
||||
}
|
||||
if let (Some(filename), None) = (filename, image_format) {
|
||||
if let Some(extension) = Path::new(&filename).extension() {
|
||||
image_format = ImageFormat::from_mime_type(extension.to_string_lossy());
|
||||
}
|
||||
}
|
||||
let extension = filename
|
||||
.map(Path::new)
|
||||
.and_then(Path::extension)
|
||||
.map(OsStr::to_string_lossy);
|
||||
|
||||
if let Some(format) = image_format {
|
||||
Ok(format)
|
||||
} else {
|
||||
image::guess_format(data).map_err(Into::into)
|
||||
}
|
||||
mime.or(extension.as_deref())
|
||||
.and_then(ImageFormat::from_mime_type)
|
||||
.map_or_else(|| image::guess_format(data).map_err(Into::into), Ok)
|
||||
}
|
||||
|
||||
fn get_image_decoder_with_format_and_data(
|
||||
|
@ -99,23 +94,37 @@ fn is_image_above_size_limit<T: ImageDecoder>(
|
|||
) -> bool {
|
||||
decoder.total_bytes() >= blurhash_config.size_limit
|
||||
}
|
||||
|
||||
#[cfg(feature = "blurhashing")]
|
||||
#[tracing::instrument(name = "encode", level = "debug", skip_all)]
|
||||
#[inline]
|
||||
fn blurhash_an_image(
|
||||
image: &DynamicImage,
|
||||
blurhash_config: BlurhashConfig,
|
||||
) -> Result<String, BlurhashingError> {
|
||||
Ok(encode_image(
|
||||
Ok(blurhash::encode_image(
|
||||
blurhash_config.components_x,
|
||||
blurhash_config.components_y,
|
||||
&image.to_rgba8(),
|
||||
)?)
|
||||
}
|
||||
#[derive(Clone, Copy)]
|
||||
|
||||
#[cfg(not(feature = "blurhashing"))]
|
||||
#[inline]
|
||||
fn blurhash_an_image(
|
||||
_image: &DynamicImage,
|
||||
_blurhash_config: BlurhashConfig,
|
||||
) -> Result<String, BlurhashingError> {
|
||||
Err(BlurhashingError::Unavailable)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct BlurhashConfig {
|
||||
components_x: u32,
|
||||
components_y: u32,
|
||||
pub components_x: u32,
|
||||
pub components_y: u32,
|
||||
|
||||
/// size limit in bytes
|
||||
size_limit: u64,
|
||||
pub size_limit: u64,
|
||||
}
|
||||
|
||||
impl From<CoreBlurhashConfig> for BlurhashConfig {
|
||||
|
@ -129,15 +138,20 @@ impl From<CoreBlurhashConfig> for BlurhashConfig {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum BlurhashingError {
|
||||
pub enum BlurhashingError {
|
||||
HashingLibError(Box<dyn Error + Send>),
|
||||
ImageError(Box<ImageError>),
|
||||
HashingLibError(Box<blurhash::Error>),
|
||||
ImageTooLarge,
|
||||
|
||||
#[cfg(not(feature = "blurhashing"))]
|
||||
Unavailable,
|
||||
}
|
||||
|
||||
impl From<ImageError> for BlurhashingError {
|
||||
fn from(value: ImageError) -> Self { Self::ImageError(Box::new(value)) }
|
||||
}
|
||||
|
||||
#[cfg(feature = "blurhashing")]
|
||||
impl From<blurhash::Error> for BlurhashingError {
|
||||
fn from(value: blurhash::Error) -> Self { Self::HashingLibError(Box::new(value)) }
|
||||
}
|
||||
|
@ -152,6 +166,9 @@ impl Display for BlurhashingError {
|
|||
|
||||
| Self::ImageError(e) =>
|
||||
write!(f, "There was an error with the image loading library => {e}")?,
|
||||
|
||||
#[cfg(not(feature = "blurhashing"))]
|
||||
| Self::Unavailable => write!(f, "Blurhashing is not supported")?,
|
||||
};
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#[cfg(feature = "blurhashing")]
|
||||
pub mod blurhash;
|
||||
mod data;
|
||||
pub(super) mod migrations;
|
||||
|
|
Loading…
Add table
Reference in a new issue