Brush blend modes and erase/restore (#1261)

* Made blit node numerically stable.

* Added blend mode parameter to brush strokes.

* Fixed difference blend mode.

* Added erase/restore blend modes.

* Added blend mode and draw mode widgets.

* Added comment explaining the ImageFrame.transform.

* Initial blit/blend version.

* Working version of erase/restore.

* Improved inlining for blend functions.

* Dsiable the blend mode selector in erase/draw mode.

* Fixed incorrect bounds calculation.

* Use factor instead of percentage for opacity

* Rearrange options bar widgets

* Tidy up blend modes

* Code review

---------

Co-authored-by: Keavon Chambers <keavon@keavon.com>
This commit is contained in:
Orson Peters 2023-06-02 21:59:55 +02:00 committed by Keavon Chambers
parent 4e1bfddcd8
commit 5558deba5e
13 changed files with 397 additions and 153 deletions

View file

@ -1,12 +1,18 @@
use std::marker::PhantomData;
use crate::raster::{blend_image_closure, BlendImageTupleNode, EmptyImageNode};
use glam::{DAffine2, DVec2};
use graphene_core::raster::{Alpha, Color, ImageFrame, Pixel, Sample};
use graphene_core::raster::adjustments::blend_colors;
use graphene_core::raster::{Alpha, Color, Image, ImageFrame, Pixel, Sample};
use graphene_core::raster::{BlendMode, BlendNode};
use graphene_core::transform::{Transform, TransformMut};
use graphene_core::value::{CopiedNode, ValueNode};
use graphene_core::vector::brush_stroke::BrushStyle;
use graphene_core::vector::VectorData;
use graphene_core::Node;
use node_macro::node_fn;
use glam::{DAffine2, DVec2};
use std::marker::PhantomData;
#[derive(Clone, Debug, PartialEq)]
pub struct ReduceNode<Initial, Lambda> {
pub initial: Initial,
@ -159,15 +165,20 @@ pub struct BlitNode<P, Texture, Positions, BlendFn> {
}
#[node_fn(BlitNode<_P>)]
fn blit_node<_P: Alpha + Pixel + std::fmt::Debug, BlendFn>(mut target: ImageFrame<_P>, texture: ImageFrame<_P>, positions: Vec<DVec2>, blend_mode: BlendFn) -> ImageFrame<_P>
fn blit_node<_P: Alpha + Pixel + std::fmt::Debug, BlendFn>(mut target: ImageFrame<_P>, texture: Image<_P>, positions: Vec<DVec2>, blend_mode: BlendFn) -> ImageFrame<_P>
where
BlendFn: for<'any_input> Node<'any_input, (_P, _P), Output = _P>,
{
if positions.len() == 0 {
return target;
}
let target_size = DVec2::new(target.image.width as f64, target.image.height as f64);
let texture_size = DVec2::new(texture.width as f64, texture.height as f64);
let document_to_target = DAffine2::from_translation(-texture_size / 2.) * DAffine2::from_scale(target_size) * target.transform.inverse();
for position in positions {
let target_size = DVec2::new(target.image.width as f64, target.image.height as f64);
let texture_size = DVec2::new(texture.image.width as f64, texture.image.height as f64);
let document_to_target = target.transform.inverse();
let start = document_to_target.transform_point2(position) * target_size - texture_size / 2.;
let start = document_to_target.transform_point2(position).round();
let stop = start + texture_size;
// Half-open integer ranges [start, stop).
@ -178,17 +189,17 @@ where
let blit_area_dimensions = (clamp_stop - clamp_start).min(texture_size.as_uvec2() - blit_area_offset);
// Tight blitting loop. Eagerly assert bounds to hopefully eliminate bounds check inside loop.
let texture_index = |x: u32, y: u32| -> usize { (y as usize * texture.image.width as usize) + (x as usize) };
let texture_index = |x: u32, y: u32| -> usize { (y as usize * texture.width as usize) + (x as usize) };
let target_index = |x: u32, y: u32| -> usize { (y as usize * target.image.width as usize) + (x as usize) };
let max_y = (blit_area_offset.y + blit_area_dimensions.y).saturating_sub(1);
let max_x = (blit_area_offset.x + blit_area_dimensions.x).saturating_sub(1);
assert!(texture_index(max_x, max_y) < texture.image.data.len());
assert!(texture_index(max_x, max_y) < texture.data.len());
assert!(target_index(max_x, max_y) < target.image.data.len());
for y in blit_area_offset.y..blit_area_offset.y + blit_area_dimensions.y {
for x in blit_area_offset.x..blit_area_offset.x + blit_area_dimensions.x {
let src_pixel = texture.image.data[texture_index(x, y)];
let src_pixel = texture.data[texture_index(x, y)];
let dst_pixel = &mut target.image.data[target_index(x + clamp_start.x, y + clamp_start.y)];
*dst_pixel = blend_mode.eval((src_pixel, *dst_pixel));
}
@ -198,6 +209,76 @@ where
target
}
pub fn create_brush_texture(brush_style: BrushStyle) -> Image<Color> {
let stamp = BrushStampGeneratorNode::new(CopiedNode::new(brush_style.color), CopiedNode::new(brush_style.hardness), CopiedNode::new(brush_style.flow));
let stamp = stamp.eval(brush_style.diameter);
let transform = DAffine2::from_scale_angle_translation(DVec2::splat(brush_style.diameter), 0., -DVec2::splat(brush_style.diameter / 2.));
let blank_texture = EmptyImageNode::new(CopiedNode::new(Color::TRANSPARENT)).eval(transform);
let normal_blend = BlendNode::new(CopiedNode::new(BlendMode::Normal), CopiedNode::new(100.));
let blend_executor = BlendImageTupleNode::new(ValueNode::new(normal_blend));
blend_executor.eval((blank_texture, stamp)).image
}
macro_rules! inline_blend_funcs {
($bg:ident, $fg:ident, $blend_mode:ident, $opacity:ident, [$($mode:path,)*]) => {
match std::hint::black_box($blend_mode) {
$(
$mode => {
blend_image_closure($fg, $bg, |a, b| blend_colors(a, b, $mode, $opacity))
}
)*
}
};
}
pub fn blend_with_mode(background: ImageFrame<Color>, foreground: ImageFrame<Color>, blend_mode: BlendMode, opacity: f32) -> ImageFrame<Color> {
let opacity = opacity / 100.;
inline_blend_funcs!(
background,
foreground,
blend_mode,
opacity,
[
// Normal group
BlendMode::Normal,
// Darken group
BlendMode::Darken,
BlendMode::Multiply,
BlendMode::ColorBurn,
BlendMode::LinearBurn,
BlendMode::DarkerColor,
// Lighten group
BlendMode::Lighten,
BlendMode::Screen,
BlendMode::ColorDodge,
BlendMode::LinearDodge,
BlendMode::LighterColor,
// Contrast group
BlendMode::Overlay,
BlendMode::SoftLight,
BlendMode::HardLight,
BlendMode::VividLight,
BlendMode::LinearLight,
BlendMode::PinLight,
BlendMode::HardMix,
// Inversion group
BlendMode::Difference,
BlendMode::Exclusion,
BlendMode::Subtract,
BlendMode::Divide,
// Component group
BlendMode::Hue,
BlendMode::Saturation,
BlendMode::Color,
BlendMode::Luminosity,
// Other utility blend modes (hidden from the normal list)
BlendMode::Erase,
BlendMode::Restore,
BlendMode::MultiplyAlpha,
]
)
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -279,6 +279,17 @@ fn blend_image<_P: Alpha + Pixel + Debug, MapFn, Frame: Sample<Pixel = _P> + Tra
) -> Background
where
MapFn: for<'any_input> Node<'any_input, (_P, _P), Output = _P>,
{
blend_image_closure(foreground, background, |a, b| map_fn.eval((a, b)))
}
pub fn blend_image_closure<_P: Alpha + Pixel + Debug, MapFn, Frame: Sample<Pixel = _P> + Transform, Background: RasterMut<Pixel = _P> + Transform + Sample<Pixel = _P>>(
foreground: Frame,
mut background: Background,
map_fn: MapFn,
) -> Background
where
MapFn: Fn(_P, _P) -> _P,
{
let background_size = DVec2::new(background.width() as f64, background.height() as f64);
// Transforms a point from the background image to the forground image
@ -299,7 +310,7 @@ where
if let Some(src_pixel) = foreground.sample(fg_point, area) {
if let Some(dst_pixel) = background.get_pixel_mut(x, y) {
*dst_pixel = map_fn.eval((src_pixel, *dst_pixel));
*dst_pixel = map_fn(src_pixel, *dst_pixel);
}
}
}