Add person/wire removal features and accessibility support

- Person removal: select all people option, mask dilation, brush refinement
- Wire removal: line brush fallback mode with Catmull-Rom smoothing
- Image import: Files app document picker, large image warnings
- Accessibility: VoiceOver labels, announcements, Dynamic Type support,
  high contrast mask option, Reduce Motion, 44pt touch targets, steppers

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-24 12:36:45 -05:00
parent 00138d944d
commit 99faa068f3
5 changed files with 625 additions and 51 deletions

View File

@@ -34,7 +34,7 @@ struct CanvasView: View {
.scaleEffect(scale)
.offset(offset)
// Mask overlay
// Mask overlay - uses cyan for high contrast (colorblind-friendly)
if let mask = viewModel.maskPreview {
Image(decorative: mask, scale: 1.0)
.resizable()
@@ -42,7 +42,8 @@ struct CanvasView: View {
.scaleEffect(scale)
.offset(offset)
.blendMode(.multiply)
.colorMultiply(.red.opacity(0.5))
.colorMultiply(viewModel.useHighContrastMask ? .cyan.opacity(0.7) : .red.opacity(0.5))
.accessibilityLabel("Selected area preview")
}
// Brush canvas overlay
@@ -53,9 +54,20 @@ struct CanvasView: View {
displayedImageFrame: displayedImageFrame(in: geometry.size)
)
}
// Line brush path overlay
if viewModel.selectedTool == .wire && viewModel.isLineBrushMode && !viewModel.lineBrushPath.isEmpty {
LineBrushPathView(
path: viewModel.lineBrushPath,
lineWidth: viewModel.wireWidth,
imageSize: viewModel.imageSize,
displayedFrame: displayedImageFrame(in: geometry.size)
)
}
}
}
.contentShape(Rectangle())
.gesture(lineBrushGesture(in: geometry))
.gesture(tapGesture(in: geometry))
.gesture(magnificationGesture(in: geometry))
.simultaneousGesture(dragGesture(in: geometry))
@@ -119,6 +131,11 @@ struct CanvasView: View {
!viewModel.showingMaskConfirmation,
viewModel.selectedTool != .brush else { return }
// Skip tap if in line brush mode
if viewModel.selectedTool == .wire && viewModel.isLineBrushMode {
return
}
let imagePoint = convertViewPointToImagePoint(value.location, in: geometry.size)
Task {
await viewModel.handleTap(at: imagePoint)
@@ -126,6 +143,19 @@ struct CanvasView: View {
}
}
private func lineBrushGesture(in geometry: GeometryProxy) -> some Gesture {
DragGesture(minimumDistance: 0)
.onChanged { value in
guard viewModel.selectedTool == .wire,
viewModel.isLineBrushMode,
!viewModel.isProcessing,
!viewModel.showingMaskConfirmation else { return }
let imagePoint = convertViewPointToImagePoint(value.location, in: geometry.size)
viewModel.addLineBrushPoint(imagePoint)
}
}
private func magnificationGesture(in geometry: GeometryProxy) -> some Gesture {
MagnificationGesture()
.onChanged { value in
@@ -207,12 +237,6 @@ struct CanvasView: View {
)
}
// Account for centering
let displayedOrigin = CGPoint(
x: (viewSize.width - displayedSize.width) / 2,
y: (viewSize.height - displayedSize.height) / 2
)
// Account for scale and offset
let scaledSize = CGSize(
width: displayedSize.width * scale,
@@ -276,6 +300,44 @@ struct CanvasView: View {
}
}
// MARK: - Line Brush Path View
struct LineBrushPathView: View {
let path: [CGPoint]
let lineWidth: CGFloat
let imageSize: CGSize
let displayedFrame: CGRect
var body: some View {
Canvas { context, size in
guard path.count >= 2 else { return }
let scaledPath = path.map { point -> CGPoint in
let normalizedX = point.x / imageSize.width
let normalizedY = point.y / imageSize.height
return CGPoint(
x: displayedFrame.origin.x + normalizedX * displayedFrame.width,
y: displayedFrame.origin.y + normalizedY * displayedFrame.height
)
}
var strokePath = Path()
strokePath.move(to: scaledPath[0])
for point in scaledPath.dropFirst() {
strokePath.addLine(to: point)
}
context.stroke(
strokePath,
with: .color(.red.opacity(0.7)),
lineWidth: lineWidth * (displayedFrame.width / imageSize.width)
)
}
.allowsHitTesting(false)
.accessibilityHidden(true)
}
}
#Preview {
let viewModel = EditorViewModel()
return CanvasView(viewModel: viewModel)

View File

@@ -10,6 +10,12 @@ import UIKit
import CoreGraphics
import Observation
// MARK: - Accessibility Helpers
private func announceForVoiceOver(_ message: String) {
UIAccessibility.post(notification: .announcement, argument: message)
}
@Observable
@MainActor
final class EditorViewModel {
@@ -28,6 +34,13 @@ final class EditorViewModel {
var processingMessage = ""
var errorMessage: String?
var showingMaskConfirmation = false
var detectedPeopleCount = 0
var showSelectAllPeople = false
var isLineBrushMode = false
var lineBrushPath: [CGPoint] = []
var useHighContrastMask = false
var useEdgeRefinement = true
var pendingRefineMask: CGImage?
private(set) var project: Project?
@@ -107,15 +120,80 @@ final class EditorViewModel {
}
private func handlePersonTap(at point: CGPoint, in image: CGImage) async throws {
let mask = try await maskingService.generatePersonMask(at: point, in: image)
let (mask, peopleCount) = try await maskingService.generatePersonMaskWithCount(at: point, in: image)
detectedPeopleCount = peopleCount
guard let mask = mask else {
errorMessage = "No person found at tap location"
return
}
maskPreview = mask
// Dilate mask by 3px to capture edge pixels
let dilatedMask = dilateMask(mask, by: 3)
maskPreview = dilatedMask ?? mask
showingMaskConfirmation = true
showSelectAllPeople = peopleCount > 1
}
func selectAllPeople() async {
guard let image = displayImage else { return }
isProcessing = true
processingMessage = "Selecting all people..."
do {
let mask = try await maskingService.generateAllForegroundMasks(in: image)
guard let mask = mask else {
errorMessage = "No people found in image"
isProcessing = false
processingMessage = ""
return
}
// Dilate mask
let dilatedMask = dilateMask(mask, by: 3)
maskPreview = dilatedMask ?? mask
showingMaskConfirmation = true
showSelectAllPeople = false
} catch {
errorMessage = error.localizedDescription
}
isProcessing = false
processingMessage = ""
}
private func dilateMask(_ mask: CGImage, by pixels: Int) -> CGImage? {
let width = mask.width
let height = mask.height
guard let context = CGContext(
data: nil,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: width,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGImageAlphaInfo.none.rawValue
) else { return nil }
// Draw original mask
context.draw(mask, in: CGRect(x: 0, y: 0, width: width, height: height))
// Apply dilation by drawing the mask multiple times with offsets
context.setBlendMode(.lighten)
for dx in -pixels...pixels {
for dy in -pixels...pixels {
if dx * dx + dy * dy <= pixels * pixels {
context.draw(mask, in: CGRect(x: dx, y: dy, width: width, height: height))
}
}
}
return context.makeImage()
}
private func handleObjectTap(at point: CGPoint, in image: CGImage) async throws {
@@ -131,6 +209,11 @@ final class EditorViewModel {
}
private func handleWireTap(at point: CGPoint, in image: CGImage) async throws {
// If in line brush mode, don't process taps
if isLineBrushMode {
return
}
let contours = try await contourService.detectContours(in: image)
let bestContour = await contourService.findBestWireContour(
at: point,
@@ -139,7 +222,7 @@ final class EditorViewModel {
)
guard let contour = bestContour else {
errorMessage = "No lines detected. Use the line brush to draw along the wire."
errorMessage = "No lines detected. Tap 'Line Brush' to draw along the wire."
return
}
@@ -158,6 +241,124 @@ final class EditorViewModel {
showingMaskConfirmation = true
}
// MARK: - Line Brush Mode
func toggleLineBrushMode() {
isLineBrushMode.toggle()
if !isLineBrushMode {
lineBrushPath.removeAll()
}
}
func addLineBrushPoint(_ point: CGPoint) {
lineBrushPath.append(point)
}
func finishLineBrush() async {
guard let image = displayImage, lineBrushPath.count >= 2 else {
lineBrushPath.removeAll()
return
}
isProcessing = true
processingMessage = "Creating line mask..."
// Generate mask from line brush path
let mask = createLineBrushMask(
path: lineBrushPath,
width: Int(wireWidth),
imageSize: CGSize(width: image.width, height: image.height)
)
lineBrushPath.removeAll()
guard let mask = mask else {
errorMessage = "Failed to create mask from line brush"
isProcessing = false
processingMessage = ""
return
}
maskPreview = mask
showingMaskConfirmation = true
isLineBrushMode = false
isProcessing = false
processingMessage = ""
}
private func createLineBrushMask(path: [CGPoint], width: Int, imageSize: CGSize) -> CGImage? {
let intWidth = Int(imageSize.width)
let intHeight = Int(imageSize.height)
guard let context = CGContext(
data: nil,
width: intWidth,
height: intHeight,
bitsPerComponent: 8,
bytesPerRow: intWidth,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGImageAlphaInfo.none.rawValue
) else { return nil }
context.setFillColor(gray: 0, alpha: 1)
context.fill(CGRect(x: 0, y: 0, width: intWidth, height: intHeight))
context.setStrokeColor(gray: 1, alpha: 1)
context.setLineWidth(CGFloat(width))
context.setLineCap(.round)
context.setLineJoin(.round)
// Use Catmull-Rom spline for smooth path
if path.count >= 4 {
let smoothedPath = catmullRomSpline(points: path, segments: 10)
context.move(to: CGPoint(x: smoothedPath[0].x, y: imageSize.height - smoothedPath[0].y))
for point in smoothedPath.dropFirst() {
context.addLine(to: CGPoint(x: point.x, y: imageSize.height - point.y))
}
} else {
context.move(to: CGPoint(x: path[0].x, y: imageSize.height - path[0].y))
for point in path.dropFirst() {
context.addLine(to: CGPoint(x: point.x, y: imageSize.height - point.y))
}
}
context.strokePath()
return context.makeImage()
}
private func catmullRomSpline(points: [CGPoint], segments: Int) -> [CGPoint] {
guard points.count >= 4 else { return points }
var result: [CGPoint] = []
for i in 0..<(points.count - 1) {
let p0 = points[max(0, i - 1)]
let p1 = points[i]
let p2 = points[min(points.count - 1, i + 1)]
let p3 = points[min(points.count - 1, i + 2)]
for t in 0..<segments {
let t0 = CGFloat(t) / CGFloat(segments)
let t2 = t0 * t0
let t3 = t2 * t0
let x = 0.5 * ((2 * p1.x) +
(-p0.x + p2.x) * t0 +
(2 * p0.x - 5 * p1.x + 4 * p2.x - p3.x) * t2 +
(-p0.x + 3 * p1.x - 3 * p2.x + p3.x) * t3)
let y = 0.5 * ((2 * p1.y) +
(-p0.y + p2.y) * t0 +
(2 * p0.y - 5 * p1.y + 4 * p2.y - p3.y) * t2 +
(-p0.y + 3 * p1.y - 3 * p2.y + p3.y) * t3)
result.append(CGPoint(x: x, y: y))
}
}
result.append(points.last!)
return result
}
// MARK: - Mask Confirmation
func confirmMask() async {
@@ -186,12 +387,15 @@ final class EditorViewModel {
self.project = project
}
}
announceForVoiceOver("Removal complete")
} catch {
errorMessage = error.localizedDescription
}
maskPreview = nil
showingMaskConfirmation = false
showSelectAllPeople = false
isProcessing = false
processingMessage = ""
}
@@ -199,6 +403,18 @@ final class EditorViewModel {
func cancelMask() {
maskPreview = nil
showingMaskConfirmation = false
showSelectAllPeople = false
pendingRefineMask = nil
}
func refineWithBrush() {
// Save current mask for refinement and switch to brush tool
pendingRefineMask = maskPreview
maskPreview = nil
showingMaskConfirmation = false
showSelectAllPeople = false
selectedTool = .brush
announceForVoiceOver("Switched to brush tool for mask refinement")
}
// MARK: - Undo/Redo
@@ -207,12 +423,14 @@ final class EditorViewModel {
guard var project = project, project.undo() else { return }
self.project = project
await rebuildEditedImage()
announceForVoiceOver("Undo complete")
}
func redo() async {
guard var project = project, project.redo() else { return }
self.project = project
await rebuildEditedImage()
announceForVoiceOver("Redo complete")
}
private func rebuildEditedImage() async {

View File

@@ -8,12 +8,16 @@
import SwiftUI
import PhotosUI
import UIKit
import UniformTypeIdentifiers
struct PhotoEditorView: View {
@State private var viewModel = EditorViewModel()
@State private var selectedItem: PhotosPickerItem?
@State private var isShowingPicker = false
@State private var isShowingExport = false
@State private var isShowingDocumentPicker = false
@State private var isShowingImportOptions = false
@Environment(\.accessibilityReduceMotion) private var reduceMotion
var body: some View {
NavigationStack {
@@ -49,10 +53,24 @@ struct PhotoEditorView: View {
.navigationBarTitleDisplayMode(.inline)
.toolbar {
ToolbarItem(placement: .topBarLeading) {
PhotosPicker(selection: $selectedItem, matching: .images) {
Menu {
Button {
isShowingPicker = true
} label: {
Label("Photo Library", systemImage: "photo.on.rectangle")
}
Button {
isShowingDocumentPicker = true
} label: {
Label("Files", systemImage: "folder")
}
} label: {
Image(systemName: "photo.on.rectangle")
}
.disabled(viewModel.isProcessing)
.accessibilityLabel("Import photo")
.accessibilityHint("Opens options to import from Photo Library or Files")
}
if viewModel.originalImage != nil {
@@ -80,32 +98,116 @@ struct PhotoEditorView: View {
ExportView(image: image)
}
}
.sheet(isPresented: $isShowingDocumentPicker) {
DocumentPickerView { result in
handleDocumentPickerResult(result)
}
}
}
}
private func handleDocumentPickerResult(_ result: Result<URL, Error>) {
switch result {
case .success(let url):
guard url.startAccessingSecurityScopedResource() else {
viewModel.errorMessage = "Unable to access file"
return
}
defer { url.stopAccessingSecurityScopedResource() }
do {
let data = try Data(contentsOf: url)
guard let uiImage = UIImage(data: data) else {
viewModel.errorMessage = "Unable to load image"
return
}
// Check for large images
let pixelCount = Int(uiImage.size.width * uiImage.scale * uiImage.size.height * uiImage.scale)
if pixelCount > 48_000_000 {
viewModel.errorMessage = "Image is very large (48MP+). May cause memory issues."
} else if pixelCount > 12_000_000 {
viewModel.errorMessage = "Large image detected. Processing may take longer."
}
viewModel.loadImage(uiImage)
} catch {
viewModel.errorMessage = "Failed to load image: \(error.localizedDescription)"
}
case .failure(let error):
if (error as NSError).code != NSUserCancelledError {
viewModel.errorMessage = "Failed to import: \(error.localizedDescription)"
}
}
}
private var maskConfirmationBar: some View {
HStack(spacing: 20) {
Button {
viewModel.cancelMask()
} label: {
Label("Cancel", systemImage: "xmark")
.font(.headline)
.padding(.horizontal, 16)
.padding(.vertical, 10)
}
.buttonStyle(.bordered)
Button {
Task {
await viewModel.confirmMask()
VStack(spacing: 12) {
// High contrast toggle for accessibility
HStack {
Toggle(isOn: $viewModel.useHighContrastMask) {
Label("High Contrast", systemImage: "circle.lefthalf.filled")
.font(.caption)
}
.toggleStyle(.button)
.buttonStyle(.bordered)
.accessibilityLabel("High contrast mask")
.accessibilityHint("Toggle to use colorblind-friendly cyan color for mask preview")
Spacer()
if viewModel.showSelectAllPeople && viewModel.detectedPeopleCount > 1 {
Button {
Task {
await viewModel.selectAllPeople()
}
} label: {
Label("Select All \(viewModel.detectedPeopleCount)", systemImage: "person.3.fill")
.font(.caption)
}
.buttonStyle(.bordered)
.accessibilityLabel("Select all \(viewModel.detectedPeopleCount) detected people")
}
} label: {
Label("Remove", systemImage: "checkmark")
.font(.headline)
.padding(.horizontal, 16)
.padding(.vertical, 10)
}
.buttonStyle(.borderedProminent)
HStack(spacing: 12) {
Button {
viewModel.cancelMask()
} label: {
Label("Cancel", systemImage: "xmark")
.font(.headline)
.padding(.horizontal, 12)
.padding(.vertical, 10)
}
.buttonStyle(.bordered)
.accessibilityLabel("Cancel mask selection")
Button {
viewModel.refineWithBrush()
} label: {
Label("Refine", systemImage: "paintbrush")
.font(.headline)
.padding(.horizontal, 12)
.padding(.vertical, 10)
}
.buttonStyle(.bordered)
.accessibilityLabel("Refine selection with brush")
.accessibilityHint("Switch to brush tool to adjust the selection")
Button {
Task {
await viewModel.confirmMask()
}
} label: {
Label("Remove", systemImage: "checkmark")
.font(.headline)
.padding(.horizontal, 12)
.padding(.vertical, 10)
}
.buttonStyle(.borderedProminent)
.accessibilityLabel("Confirm and remove selected area")
}
}
.padding()
.background(.ultraThinMaterial)
@@ -124,6 +226,9 @@ struct PhotoEditorView: View {
}
.padding(24)
.background(.ultraThinMaterial, in: RoundedRectangle(cornerRadius: 16))
.accessibilityElement(children: .combine)
.accessibilityLabel("Processing: \(viewModel.processingMessage)")
.accessibilityAddTraits(.updatesFrequently)
}
private func errorToast(message: String) -> some View {
@@ -140,9 +245,14 @@ struct PhotoEditorView: View {
.background(.ultraThinMaterial, in: RoundedRectangle(cornerRadius: 12))
.padding()
.padding(.bottom, 80)
.accessibilityElement(children: .combine)
.accessibilityLabel("Error: \(message)")
.accessibilityAddTraits(.isStaticText)
}
.transition(.move(edge: .bottom).combined(with: .opacity))
.transition(reduceMotion ? .opacity : .move(edge: .bottom).combined(with: .opacity))
.onAppear {
// Announce error for VoiceOver
UIAccessibility.post(notification: .announcement, argument: "Error: \(message)")
Task {
try? await Task.sleep(for: .seconds(3))
viewModel.errorMessage = nil
@@ -151,14 +261,57 @@ struct PhotoEditorView: View {
}
}
// MARK: - Document Picker
struct DocumentPickerView: UIViewControllerRepresentable {
let onResult: (Result<URL, Error>) -> Void
func makeUIViewController(context: Context) -> UIDocumentPickerViewController {
let picker = UIDocumentPickerViewController(forOpeningContentTypes: [
UTType.jpeg,
UTType.png,
UTType.heic,
UTType.image
])
picker.delegate = context.coordinator
picker.allowsMultipleSelection = false
return picker
}
func updateUIViewController(_ uiViewController: UIDocumentPickerViewController, context: Context) {}
func makeCoordinator() -> Coordinator {
Coordinator(onResult: onResult)
}
class Coordinator: NSObject, UIDocumentPickerDelegate {
let onResult: (Result<URL, Error>) -> Void
init(onResult: @escaping (Result<URL, Error>) -> Void) {
self.onResult = onResult
}
func documentPicker(_ controller: UIDocumentPickerViewController, didPickDocumentsAt urls: [URL]) {
guard let url = urls.first else { return }
onResult(.success(url))
}
func documentPickerWasCancelled(_ controller: UIDocumentPickerViewController) {
onResult(.failure(NSError(domain: "", code: NSUserCancelledError)))
}
}
}
struct EmptyStateView: View {
@Binding var isShowingPicker: Bool
@ScaledMetric private var iconSize: CGFloat = 60
var body: some View {
VStack(spacing: 20) {
Image(systemName: "photo.badge.plus")
.font(.system(size: 60))
.font(.system(size: iconSize))
.foregroundStyle(.secondary)
.accessibilityHidden(true)
Text("No Photo Selected")
.font(.title2)
@@ -179,7 +332,10 @@ struct EmptyStateView: View {
}
.buttonStyle(.borderedProminent)
.padding(.top, 8)
.accessibilityLabel("Select photo to edit")
.accessibilityHint("Opens the photo picker to import an image")
}
.accessibilityElement(children: .contain)
}
}

View File

@@ -6,6 +6,7 @@
//
import SwiftUI
import UIKit
enum EditTool: String, CaseIterable, Identifiable {
case person = "Person"
@@ -36,15 +37,18 @@ enum EditTool: String, CaseIterable, Identifiable {
struct ToolbarView: View {
@Bindable var viewModel: EditorViewModel
@Environment(\.accessibilityReduceMotion) private var reduceMotion
@ScaledMetric private var toolButtonWidth: CGFloat = 60
@ScaledMetric private var iconSize: CGFloat = 24
var body: some View {
VStack(spacing: 0) {
Divider()
// Inspector panel (contextual)
if viewModel.selectedTool == .brush || viewModel.selectedTool == .wire {
if viewModel.selectedTool == .brush || viewModel.selectedTool == .wire || viewModel.selectedTool == .person || viewModel.selectedTool == .object {
inspectorPanel
.transition(.move(edge: .bottom).combined(with: .opacity))
.transition(reduceMotion ? .opacity : .move(edge: .bottom).combined(with: .opacity))
}
// Main toolbar
@@ -91,17 +95,20 @@ struct ToolbarView: View {
private func toolButton(for tool: EditTool) -> some View {
Button {
viewModel.selectedTool = tool
// Announce tool selection for VoiceOver
UIAccessibility.post(notification: .announcement, argument: "\(tool.rawValue) tool selected")
} label: {
VStack(spacing: 4) {
Image(systemName: tool.icon)
.font(.title2)
.frame(width: 44, height: 32)
.font(.system(size: iconSize))
.frame(minWidth: 44, minHeight: 32)
Text(tool.rawValue)
.font(.caption2)
}
.foregroundStyle(viewModel.selectedTool == tool ? Color.accentColor : Color.secondary)
.frame(width: 60)
.frame(minWidth: toolButtonWidth)
.frame(minHeight: 44) // Minimum touch target
}
.disabled(viewModel.isProcessing)
.accessibilityLabel("\(tool.rawValue) tool")
@@ -113,7 +120,31 @@ struct ToolbarView: View {
VStack(spacing: 12) {
Divider()
if viewModel.selectedTool == .brush {
if viewModel.selectedTool == .person {
Text("Tap on a person to select them for removal")
.font(.caption)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
.accessibilityLabel("Instructions: Tap on a person to select them for removal")
}
if viewModel.selectedTool == .object {
VStack(spacing: 8) {
Text("Tap on an object to select it for removal")
.font(.caption)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
Text("Works best on objects that stand out from background")
.font(.caption2)
.foregroundStyle(.tertiary)
.multilineTextAlignment(.center)
}
.accessibilityElement(children: .combine)
.accessibilityLabel("Tap on an object to select it. Works best on objects that stand out from background. Low-contrast objects may require manual selection with brush.")
}
if viewModel.selectedTool == .brush || viewModel.selectedTool == .object {
HStack {
Text("Brush Size")
.font(.subheadline)
@@ -126,15 +157,68 @@ struct ToolbarView: View {
HStack {
Slider(value: $viewModel.brushSize, in: 1...100, step: 1)
.accessibilityLabel("Brush size slider")
.accessibilityLabel("Brush size")
.accessibilityValue("\(Int(viewModel.brushSize)) pixels")
Stepper("", value: $viewModel.brushSize, in: 1...100, step: 1)
.labelsHidden()
.accessibilityLabel("Brush size stepper")
.accessibilityValue("\(Int(viewModel.brushSize)) pixels")
}
Toggle(isOn: $viewModel.useEdgeRefinement) {
HStack {
Image(systemName: "wand.and.rays")
Text("Edge Refinement")
.font(.subheadline)
}
}
.toggleStyle(.switch)
.accessibilityLabel("Edge refinement")
.accessibilityHint("When enabled, brush strokes snap to nearby edges for cleaner selections")
}
if viewModel.selectedTool == .wire {
// Line brush toggle
HStack {
Text("Mode")
.font(.subheadline)
Spacer()
Button {
viewModel.toggleLineBrushMode()
} label: {
HStack(spacing: 4) {
Image(systemName: viewModel.isLineBrushMode ? "scribble" : "hand.tap")
Text(viewModel.isLineBrushMode ? "Line Brush" : "Tap to Detect")
.font(.caption)
}
.padding(.horizontal, 10)
.padding(.vertical, 6)
.background(
RoundedRectangle(cornerRadius: 8)
.fill(viewModel.isLineBrushMode ? Color.accentColor : Color(.tertiarySystemFill))
)
.foregroundStyle(viewModel.isLineBrushMode ? .white : .primary)
}
.accessibilityLabel(viewModel.isLineBrushMode ? "Line brush mode active" : "Tap to detect mode active")
.accessibilityHint("Double tap to toggle between line brush and tap detection modes")
}
if viewModel.isLineBrushMode && !viewModel.lineBrushPath.isEmpty {
Button {
Task {
await viewModel.finishLineBrush()
}
} label: {
Label("Apply Line", systemImage: "checkmark.circle.fill")
.font(.subheadline)
.frame(maxWidth: .infinity)
}
.buttonStyle(.borderedProminent)
.accessibilityLabel("Apply line brush selection")
}
HStack {
Text("Line Width")
.font(.subheadline)
@@ -145,8 +229,16 @@ struct ToolbarView: View {
.monospacedDigit()
}
Slider(value: $viewModel.wireWidth, in: 2...20, step: 1)
.accessibilityLabel("Wire width slider")
HStack {
Slider(value: $viewModel.wireWidth, in: 2...20, step: 1)
.accessibilityLabel("Wire width")
.accessibilityValue("\(Int(viewModel.wireWidth)) pixels")
Stepper("", value: $viewModel.wireWidth, in: 2...20, step: 1)
.labelsHidden()
.accessibilityLabel("Wire width stepper")
.accessibilityValue("\(Int(viewModel.wireWidth)) pixels")
}
}
HStack {
@@ -159,8 +251,16 @@ struct ToolbarView: View {
.monospacedDigit()
}
Slider(value: $viewModel.featherAmount, in: 0...20, step: 1)
.accessibilityLabel("Feather amount slider")
HStack {
Slider(value: $viewModel.featherAmount, in: 0...20, step: 1)
.accessibilityLabel("Feather amount")
.accessibilityValue("\(Int(viewModel.featherAmount)) pixels")
Stepper("", value: $viewModel.featherAmount, in: 0...20, step: 1)
.labelsHidden()
.accessibilityLabel("Feather amount stepper")
.accessibilityValue("\(Int(viewModel.featherAmount)) pixels")
}
}
.padding(.horizontal)
.padding(.vertical, 8)

View File

@@ -36,6 +36,50 @@ actor MaskingService {
try await generateForegroundMask(at: point, in: image)
}
func generatePersonMaskWithCount(at point: CGPoint, in image: CGImage) async throws -> (CGImage?, Int) {
let request = VNGenerateForegroundInstanceMaskRequest()
let handler = VNImageRequestHandler(cgImage: image, options: [:])
do {
try handler.perform([request])
} catch {
throw MaskingError.requestFailed(error)
}
guard let result = request.results?.first else {
return (nil, 0)
}
let allInstances = result.allInstances
let instanceCount = allInstances.count
let visionPoint = CGPoint(
x: point.x / CGFloat(image.width),
y: 1.0 - point.y / CGFloat(image.height)
)
// Find instance at tap point
var targetInstance: IndexSet?
for instance in allInstances {
let indexSet = IndexSet(integer: instance)
if let maskPixelBuffer = try? result.generateScaledMaskForImage(forInstances: indexSet, from: handler) {
if isPoint(visionPoint, inMask: maskPixelBuffer, imageSize: CGSize(width: image.width, height: image.height)) {
targetInstance = indexSet
break
}
}
}
guard let instance = targetInstance else {
return (nil, instanceCount)
}
let maskPixelBuffer = try result.generateScaledMaskForImage(forInstances: instance, from: handler)
return (convertPixelBufferToCGImage(maskPixelBuffer), instanceCount)
}
func generateForegroundMask(at point: CGPoint, in image: CGImage) async throws -> CGImage? {
let request = VNGenerateForegroundInstanceMaskRequest()
@@ -52,12 +96,6 @@ actor MaskingService {
}
// Normalize point to Vision coordinates (0-1, origin bottom-left)
let normalizedPoint = VNImagePointForNormalizedPoint(
CGPoint(x: point.x / CGFloat(image.width), y: 1.0 - point.y / CGFloat(image.height)),
image.width,
image.height
)
let visionPoint = CGPoint(
x: point.x / CGFloat(image.width),
y: 1.0 - point.y / CGFloat(image.height)