mirror of
https://github.com/GraphiteEditor/Graphite.git
synced 2025-07-08 00:05:00 +00:00
Fix 'Boolean Operation' node merge-by-distance post-processing
Fixes #2750
This commit is contained in:
parent
11ba2cc0fe
commit
4a65ad290c
3 changed files with 123 additions and 125 deletions
|
@ -1,16 +1,18 @@
|
||||||
use crate::vector::{PointId, VectorData, VectorDataIndex};
|
use crate::vector::{PointDomain, PointId, SegmentDomain, VectorData, VectorDataIndex};
|
||||||
use glam::DVec2;
|
use glam::{DAffine2, DVec2};
|
||||||
use petgraph::prelude::UnGraphMap;
|
use petgraph::prelude::UnGraphMap;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
impl VectorData {
|
impl VectorData {
|
||||||
/// Collapse all points with edges shorter than the specified distance
|
/// Collapse all points with edges shorter than the specified distance
|
||||||
pub fn merge_by_distance(&mut self, distance: f64) {
|
pub fn merge_by_distance_topological(&mut self, distance: f64) {
|
||||||
// Treat self as an undirected graph
|
// Treat self as an undirected graph
|
||||||
let indices = VectorDataIndex::build_from(self);
|
let indices = VectorDataIndex::build_from(self);
|
||||||
|
|
||||||
|
// TODO: We lose information on the winding order by using an undirected graph. Switch to a directed graph and fix the algorithm to handle that.
|
||||||
// Graph containing only short edges, referencing the data graph
|
// Graph containing only short edges, referencing the data graph
|
||||||
let mut short_edges = UnGraphMap::new();
|
let mut short_edges = UnGraphMap::new();
|
||||||
|
|
||||||
for segment_id in self.segment_ids().iter().copied() {
|
for segment_id in self.segment_ids().iter().copied() {
|
||||||
let length = indices.segment_chord_length(segment_id);
|
let length = indices.segment_chord_length(segment_id);
|
||||||
if length < distance {
|
if length < distance {
|
||||||
|
@ -92,4 +94,116 @@ impl VectorData {
|
||||||
self.segment_domain.retain(|id| !segments_to_delete.contains(id), usize::MAX);
|
self.segment_domain.retain(|id| !segments_to_delete.contains(id), usize::MAX);
|
||||||
self.point_domain.retain(&mut self.segment_domain, |id| !points_to_delete.contains(id));
|
self.point_domain.retain(&mut self.segment_domain, |id| !points_to_delete.contains(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn merge_by_distance_spatial(&mut self, transform: DAffine2, distance: f64) {
|
||||||
|
let point_count = self.point_domain.positions().len();
|
||||||
|
|
||||||
|
// Find min x and y for grid cell normalization
|
||||||
|
let mut min_x = f64::MAX;
|
||||||
|
let mut min_y = f64::MAX;
|
||||||
|
|
||||||
|
// Calculate mins without collecting all positions
|
||||||
|
for &pos in self.point_domain.positions() {
|
||||||
|
let transformed_pos = transform.transform_point2(pos);
|
||||||
|
min_x = min_x.min(transformed_pos.x);
|
||||||
|
min_y = min_y.min(transformed_pos.y);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a spatial grid with cell size of 'distance'
|
||||||
|
use std::collections::HashMap;
|
||||||
|
let mut grid: HashMap<(i32, i32), Vec<usize>> = HashMap::new();
|
||||||
|
|
||||||
|
// Add points to grid cells without collecting all positions first
|
||||||
|
for i in 0..point_count {
|
||||||
|
let pos = transform.transform_point2(self.point_domain.positions()[i]);
|
||||||
|
let grid_x = ((pos.x - min_x) / distance).floor() as i32;
|
||||||
|
let grid_y = ((pos.y - min_y) / distance).floor() as i32;
|
||||||
|
|
||||||
|
grid.entry((grid_x, grid_y)).or_default().push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create point index mapping for merged points
|
||||||
|
let mut point_index_map = vec![None; point_count];
|
||||||
|
let mut merged_positions = Vec::new();
|
||||||
|
let mut merged_indices = Vec::new();
|
||||||
|
|
||||||
|
// Process each point
|
||||||
|
for i in 0..point_count {
|
||||||
|
// Skip points that have already been processed
|
||||||
|
if point_index_map[i].is_some() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pos_i = transform.transform_point2(self.point_domain.positions()[i]);
|
||||||
|
let grid_x = ((pos_i.x - min_x) / distance).floor() as i32;
|
||||||
|
let grid_y = ((pos_i.y - min_y) / distance).floor() as i32;
|
||||||
|
|
||||||
|
let mut group = vec![i];
|
||||||
|
|
||||||
|
// Check only neighboring cells (3x3 grid around current cell)
|
||||||
|
for dx in -1..=1 {
|
||||||
|
for dy in -1..=1 {
|
||||||
|
let neighbor_cell = (grid_x + dx, grid_y + dy);
|
||||||
|
|
||||||
|
if let Some(indices) = grid.get(&neighbor_cell) {
|
||||||
|
for &j in indices {
|
||||||
|
if j > i && point_index_map[j].is_none() {
|
||||||
|
let pos_j = transform.transform_point2(self.point_domain.positions()[j]);
|
||||||
|
if pos_i.distance(pos_j) <= distance {
|
||||||
|
group.push(j);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create merged point - calculate positions as needed
|
||||||
|
let merged_position = group
|
||||||
|
.iter()
|
||||||
|
.map(|&idx| transform.transform_point2(self.point_domain.positions()[idx]))
|
||||||
|
.fold(DVec2::ZERO, |sum, pos| sum + pos)
|
||||||
|
/ group.len() as f64;
|
||||||
|
|
||||||
|
let merged_position = transform.inverse().transform_point2(merged_position);
|
||||||
|
let merged_index = merged_positions.len();
|
||||||
|
|
||||||
|
merged_positions.push(merged_position);
|
||||||
|
merged_indices.push(self.point_domain.ids()[group[0]]);
|
||||||
|
|
||||||
|
// Update mapping for all points in the group
|
||||||
|
for &idx in &group {
|
||||||
|
point_index_map[idx] = Some(merged_index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new point domain with merged points
|
||||||
|
let mut new_point_domain = PointDomain::new();
|
||||||
|
for (idx, pos) in merged_indices.into_iter().zip(merged_positions) {
|
||||||
|
new_point_domain.push(idx, pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update segment domain
|
||||||
|
let mut new_segment_domain = SegmentDomain::new();
|
||||||
|
for segment_idx in 0..self.segment_domain.ids().len() {
|
||||||
|
let id = self.segment_domain.ids()[segment_idx];
|
||||||
|
let start = self.segment_domain.start_point()[segment_idx];
|
||||||
|
let end = self.segment_domain.end_point()[segment_idx];
|
||||||
|
let handles = self.segment_domain.handles()[segment_idx];
|
||||||
|
let stroke = self.segment_domain.stroke()[segment_idx];
|
||||||
|
|
||||||
|
// Get new indices for start and end points
|
||||||
|
let new_start = point_index_map[start].unwrap();
|
||||||
|
let new_end = point_index_map[end].unwrap();
|
||||||
|
|
||||||
|
// Skip segments where start and end points were merged
|
||||||
|
if new_start != new_end {
|
||||||
|
new_segment_domain.push(id, new_start, new_end, handles, stroke);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new vector data
|
||||||
|
self.point_domain = new_point_domain;
|
||||||
|
self.segment_domain = new_segment_domain;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -555,7 +555,7 @@ pub fn merge_by_distance(
|
||||||
vector_data: VectorDataTable,
|
vector_data: VectorDataTable,
|
||||||
#[default(0.1)]
|
#[default(0.1)]
|
||||||
#[hard_min(0.0001)]
|
#[hard_min(0.0001)]
|
||||||
distance: Length,
|
distance: PixelLength,
|
||||||
algorithm: MergeByDistanceAlgorithm,
|
algorithm: MergeByDistanceAlgorithm,
|
||||||
) -> VectorDataTable {
|
) -> VectorDataTable {
|
||||||
let mut result_table = VectorDataTable::default();
|
let mut result_table = VectorDataTable::default();
|
||||||
|
@ -563,130 +563,14 @@ pub fn merge_by_distance(
|
||||||
match algorithm {
|
match algorithm {
|
||||||
MergeByDistanceAlgorithm::Spatial => {
|
MergeByDistanceAlgorithm::Spatial => {
|
||||||
for mut vector_data_instance in vector_data.instance_iter() {
|
for mut vector_data_instance in vector_data.instance_iter() {
|
||||||
let vector_data_transform = vector_data_instance.transform;
|
vector_data_instance.instance.merge_by_distance_spatial(vector_data_instance.transform, distance);
|
||||||
let vector_data = vector_data_instance.instance;
|
|
||||||
|
|
||||||
let point_count = vector_data.point_domain.positions().len();
|
|
||||||
|
|
||||||
// Find min x and y for grid cell normalization
|
|
||||||
let mut min_x = f64::MAX;
|
|
||||||
let mut min_y = f64::MAX;
|
|
||||||
|
|
||||||
// Calculate mins without collecting all positions
|
|
||||||
for &pos in vector_data.point_domain.positions() {
|
|
||||||
let transformed_pos = vector_data_transform.transform_point2(pos);
|
|
||||||
min_x = min_x.min(transformed_pos.x);
|
|
||||||
min_y = min_y.min(transformed_pos.y);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a spatial grid with cell size of 'distance'
|
|
||||||
use std::collections::HashMap;
|
|
||||||
let mut grid: HashMap<(i32, i32), Vec<usize>> = HashMap::new();
|
|
||||||
|
|
||||||
// Add points to grid cells without collecting all positions first
|
|
||||||
for i in 0..point_count {
|
|
||||||
let pos = vector_data_transform.transform_point2(vector_data.point_domain.positions()[i]);
|
|
||||||
let grid_x = ((pos.x - min_x) / distance).floor() as i32;
|
|
||||||
let grid_y = ((pos.y - min_y) / distance).floor() as i32;
|
|
||||||
|
|
||||||
grid.entry((grid_x, grid_y)).or_default().push(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create point index mapping for merged points
|
|
||||||
let mut point_index_map = vec![None; point_count];
|
|
||||||
let mut merged_positions = Vec::new();
|
|
||||||
let mut merged_indices = Vec::new();
|
|
||||||
|
|
||||||
// Process each point
|
|
||||||
for i in 0..point_count {
|
|
||||||
// Skip points that have already been processed
|
|
||||||
if point_index_map[i].is_some() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos_i = vector_data_transform.transform_point2(vector_data.point_domain.positions()[i]);
|
|
||||||
let grid_x = ((pos_i.x - min_x) / distance).floor() as i32;
|
|
||||||
let grid_y = ((pos_i.y - min_y) / distance).floor() as i32;
|
|
||||||
|
|
||||||
let mut group = vec![i];
|
|
||||||
|
|
||||||
// Check only neighboring cells (3x3 grid around current cell)
|
|
||||||
for dx in -1..=1 {
|
|
||||||
for dy in -1..=1 {
|
|
||||||
let neighbor_cell = (grid_x + dx, grid_y + dy);
|
|
||||||
|
|
||||||
if let Some(indices) = grid.get(&neighbor_cell) {
|
|
||||||
for &j in indices {
|
|
||||||
if j > i && point_index_map[j].is_none() {
|
|
||||||
let pos_j = vector_data_transform.transform_point2(vector_data.point_domain.positions()[j]);
|
|
||||||
if pos_i.distance(pos_j) <= distance {
|
|
||||||
group.push(j);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create merged point - calculate positions as needed
|
|
||||||
let merged_position = group
|
|
||||||
.iter()
|
|
||||||
.map(|&idx| vector_data_transform.transform_point2(vector_data.point_domain.positions()[idx]))
|
|
||||||
.fold(DVec2::ZERO, |sum, pos| sum + pos)
|
|
||||||
/ group.len() as f64;
|
|
||||||
|
|
||||||
let merged_position = vector_data_transform.inverse().transform_point2(merged_position);
|
|
||||||
let merged_index = merged_positions.len();
|
|
||||||
|
|
||||||
merged_positions.push(merged_position);
|
|
||||||
merged_indices.push(vector_data.point_domain.ids()[group[0]]);
|
|
||||||
|
|
||||||
// Update mapping for all points in the group
|
|
||||||
for &idx in &group {
|
|
||||||
point_index_map[idx] = Some(merged_index);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create new point domain with merged points
|
|
||||||
let mut new_point_domain = PointDomain::new();
|
|
||||||
for (idx, pos) in merged_indices.into_iter().zip(merged_positions) {
|
|
||||||
new_point_domain.push(idx, pos);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update segment domain
|
|
||||||
let mut new_segment_domain = SegmentDomain::new();
|
|
||||||
for segment_idx in 0..vector_data.segment_domain.ids().len() {
|
|
||||||
let id = vector_data.segment_domain.ids()[segment_idx];
|
|
||||||
let start = vector_data.segment_domain.start_point()[segment_idx];
|
|
||||||
let end = vector_data.segment_domain.end_point()[segment_idx];
|
|
||||||
let handles = vector_data.segment_domain.handles()[segment_idx];
|
|
||||||
let stroke = vector_data.segment_domain.stroke()[segment_idx];
|
|
||||||
|
|
||||||
// Get new indices for start and end points
|
|
||||||
let new_start = point_index_map[start].unwrap();
|
|
||||||
let new_end = point_index_map[end].unwrap();
|
|
||||||
|
|
||||||
// Skip segments where start and end points were merged
|
|
||||||
if new_start != new_end {
|
|
||||||
new_segment_domain.push(id, new_start, new_end, handles, stroke);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create new vector data
|
|
||||||
let mut result = vector_data.clone();
|
|
||||||
result.point_domain = new_point_domain;
|
|
||||||
result.segment_domain = new_segment_domain;
|
|
||||||
|
|
||||||
// Create and return the result
|
|
||||||
vector_data_instance.instance = result;
|
|
||||||
vector_data_instance.source_node_id = None;
|
|
||||||
result_table.push(vector_data_instance);
|
result_table.push(vector_data_instance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MergeByDistanceAlgorithm::Topological => {
|
MergeByDistanceAlgorithm::Topological => {
|
||||||
for mut source_instance in vector_data.instance_iter() {
|
for mut vector_data_instance in vector_data.instance_iter() {
|
||||||
source_instance.instance.merge_by_distance(distance);
|
vector_data_instance.instance.merge_by_distance_topological(distance);
|
||||||
result_table.push(source_instance);
|
result_table.push(vector_data_instance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ async fn boolean_operation<I: Into<GraphicGroupTable> + 'n + Send + Clone>(
|
||||||
result_vector_data.instance.upstream_graphic_group = Some(group_of_paths.clone());
|
result_vector_data.instance.upstream_graphic_group = Some(group_of_paths.clone());
|
||||||
|
|
||||||
// Clean up the boolean operation result by merging duplicated points
|
// Clean up the boolean operation result by merging duplicated points
|
||||||
result_vector_data.instance.merge_by_distance(0.001);
|
result_vector_data.instance.merge_by_distance_spatial(*result_vector_data.transform, 0.0001);
|
||||||
}
|
}
|
||||||
|
|
||||||
result_vector_data_table
|
result_vector_data_table
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue