Complete remaining high-priority features
- Edge refinement: Wire toggle to actually snap brush strokes to edges using Sobel gradient analysis in EdgeRefinement.swift - Brush preview circle: Show visual cursor following finger during drawing - PHAsset storage: Capture localIdentifier for Photo Library imports - Low-confidence mask warning: Show "Does this look right?" for uncertain detections based on mask coverage and edge sharpness analysis - Fix Swift 6 concurrency warnings with nonisolated static methods Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -16,6 +16,8 @@ struct BrushCanvasView: View {
|
|||||||
@State private var currentStroke: [CGPoint] = []
|
@State private var currentStroke: [CGPoint] = []
|
||||||
@State private var allStrokes: [[CGPoint]] = []
|
@State private var allStrokes: [[CGPoint]] = []
|
||||||
@State private var isErasing = false
|
@State private var isErasing = false
|
||||||
|
@State private var currentTouchLocation: CGPoint?
|
||||||
|
@State private var gradientImage: EdgeRefinement.GradientImage?
|
||||||
|
|
||||||
var body: some View {
|
var body: some View {
|
||||||
Canvas { context, size in
|
Canvas { context, size in
|
||||||
@@ -28,11 +30,46 @@ struct BrushCanvasView: View {
|
|||||||
if !currentStroke.isEmpty {
|
if !currentStroke.isEmpty {
|
||||||
drawStroke(currentStroke, in: &context, color: isErasing ? .black : .white)
|
drawStroke(currentStroke, in: &context, color: isErasing ? .black : .white)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Draw brush preview circle at current touch location
|
||||||
|
if let location = currentTouchLocation {
|
||||||
|
let previewRect = CGRect(
|
||||||
|
x: location.x - viewModel.brushSize / 2,
|
||||||
|
y: location.y - viewModel.brushSize / 2,
|
||||||
|
width: viewModel.brushSize,
|
||||||
|
height: viewModel.brushSize
|
||||||
|
)
|
||||||
|
context.stroke(
|
||||||
|
Path(ellipseIn: previewRect),
|
||||||
|
with: .color(.white.opacity(0.8)),
|
||||||
|
lineWidth: 2
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
.gesture(drawingGesture)
|
.gesture(drawingGesture)
|
||||||
.overlay(alignment: .bottom) {
|
.overlay(alignment: .bottom) {
|
||||||
brushControls
|
brushControls
|
||||||
}
|
}
|
||||||
|
.onAppear {
|
||||||
|
// Precompute gradient for edge refinement if enabled
|
||||||
|
if viewModel.useEdgeRefinement, let image = viewModel.displayImage {
|
||||||
|
computeGradientAsync(from: image)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.onChange(of: viewModel.useEdgeRefinement) { _, newValue in
|
||||||
|
if newValue, gradientImage == nil, let image = viewModel.displayImage {
|
||||||
|
computeGradientAsync(from: image)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func computeGradientAsync(from image: CGImage) {
|
||||||
|
Task {
|
||||||
|
let gradient = await Task.detached(priority: .userInitiated) {
|
||||||
|
EdgeRefinement.computeGradient(from: image)
|
||||||
|
}.value
|
||||||
|
gradientImage = gradient
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func drawStroke(_ points: [CGPoint], in context: inout GraphicsContext, color: Color) {
|
private func drawStroke(_ points: [CGPoint], in context: inout GraphicsContext, color: Color) {
|
||||||
@@ -69,12 +106,15 @@ struct BrushCanvasView: View {
|
|||||||
DragGesture(minimumDistance: 0)
|
DragGesture(minimumDistance: 0)
|
||||||
.onChanged { value in
|
.onChanged { value in
|
||||||
let point = value.location
|
let point = value.location
|
||||||
|
currentTouchLocation = point
|
||||||
|
|
||||||
// Only add points within the image bounds
|
// Only add points within the image bounds
|
||||||
if displayedImageFrame.contains(point) {
|
if displayedImageFrame.contains(point) {
|
||||||
currentStroke.append(point)
|
currentStroke.append(point)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.onEnded { _ in
|
.onEnded { _ in
|
||||||
|
currentTouchLocation = nil
|
||||||
if !currentStroke.isEmpty {
|
if !currentStroke.isEmpty {
|
||||||
allStrokes.append(currentStroke)
|
allStrokes.append(currentStroke)
|
||||||
currentStroke = []
|
currentStroke = []
|
||||||
@@ -131,6 +171,32 @@ struct BrushCanvasView: View {
|
|||||||
private func applyBrushMask() async {
|
private func applyBrushMask() async {
|
||||||
guard !allStrokes.isEmpty else { return }
|
guard !allStrokes.isEmpty else { return }
|
||||||
|
|
||||||
|
let scaleX = imageSize.width / displayedImageFrame.width
|
||||||
|
let scaleY = imageSize.height / displayedImageFrame.height
|
||||||
|
|
||||||
|
// Convert all strokes to image coordinates
|
||||||
|
var imageCoordStrokes: [[CGPoint]] = []
|
||||||
|
for stroke in allStrokes {
|
||||||
|
let imageStroke = stroke.map { point in
|
||||||
|
CGPoint(
|
||||||
|
x: (point.x - displayedImageFrame.minX) * scaleX,
|
||||||
|
y: (point.y - displayedImageFrame.minY) * scaleY
|
||||||
|
)
|
||||||
|
}
|
||||||
|
imageCoordStrokes.append(imageStroke)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply edge refinement if enabled and gradient is available
|
||||||
|
if viewModel.useEdgeRefinement, let gradient = gradientImage {
|
||||||
|
imageCoordStrokes = imageCoordStrokes.map { stroke in
|
||||||
|
EdgeRefinement.refineSelectionToEdges(
|
||||||
|
selection: stroke,
|
||||||
|
gradient: gradient,
|
||||||
|
searchRadius: Int(viewModel.brushSize / 2)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create mask image from strokes
|
// Create mask image from strokes
|
||||||
let renderer = UIGraphicsImageRenderer(size: imageSize)
|
let renderer = UIGraphicsImageRenderer(size: imageSize)
|
||||||
let maskImage = renderer.image { ctx in
|
let maskImage = renderer.image { ctx in
|
||||||
@@ -141,25 +207,14 @@ struct BrushCanvasView: View {
|
|||||||
// Draw strokes in white (masked areas)
|
// Draw strokes in white (masked areas)
|
||||||
UIColor.white.setStroke()
|
UIColor.white.setStroke()
|
||||||
|
|
||||||
let scaleX = imageSize.width / displayedImageFrame.width
|
for stroke in imageCoordStrokes {
|
||||||
let scaleY = imageSize.height / displayedImageFrame.height
|
|
||||||
|
|
||||||
for stroke in allStrokes {
|
|
||||||
guard stroke.count >= 2 else { continue }
|
guard stroke.count >= 2 else { continue }
|
||||||
|
|
||||||
let path = UIBezierPath()
|
let path = UIBezierPath()
|
||||||
let firstPoint = CGPoint(
|
path.move(to: stroke[0])
|
||||||
x: (stroke[0].x - displayedImageFrame.minX) * scaleX,
|
|
||||||
y: (stroke[0].y - displayedImageFrame.minY) * scaleY
|
|
||||||
)
|
|
||||||
path.move(to: firstPoint)
|
|
||||||
|
|
||||||
for i in 1..<stroke.count {
|
for i in 1..<stroke.count {
|
||||||
let point = CGPoint(
|
path.addLine(to: stroke[i])
|
||||||
x: (stroke[i].x - displayedImageFrame.minX) * scaleX,
|
|
||||||
y: (stroke[i].y - displayedImageFrame.minY) * scaleY
|
|
||||||
)
|
|
||||||
path.addLine(to: point)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
path.lineWidth = viewModel.brushSize * scaleX
|
path.lineWidth = viewModel.brushSize * scaleX
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ final class EditorViewModel {
|
|||||||
var useHighContrastMask = false
|
var useHighContrastMask = false
|
||||||
var useEdgeRefinement = true
|
var useEdgeRefinement = true
|
||||||
var pendingRefineMask: CGImage?
|
var pendingRefineMask: CGImage?
|
||||||
|
var isLowConfidenceMask = false
|
||||||
|
|
||||||
private(set) var project: Project?
|
private(set) var project: Project?
|
||||||
|
|
||||||
@@ -72,17 +73,36 @@ final class EditorViewModel {
|
|||||||
|
|
||||||
// MARK: - Image Loading
|
// MARK: - Image Loading
|
||||||
|
|
||||||
func loadImage(_ uiImage: UIImage) {
|
func loadImage(_ uiImage: UIImage, localIdentifier: String? = nil) {
|
||||||
guard let cgImage = uiImage.cgImage else { return }
|
guard let cgImage = uiImage.cgImage else { return }
|
||||||
|
|
||||||
originalImage = cgImage
|
originalImage = cgImage
|
||||||
editedImage = nil
|
editedImage = nil
|
||||||
maskPreview = nil
|
maskPreview = nil
|
||||||
errorMessage = nil
|
errorMessage = nil
|
||||||
|
pendingRefineMask = nil
|
||||||
|
showSelectAllPeople = false
|
||||||
|
detectedPeopleCount = 0
|
||||||
|
|
||||||
// Create new project
|
// Check image size and warn if large
|
||||||
let imageData = uiImage.jpegData(compressionQuality: 0.9) ?? Data()
|
let pixelCount = cgImage.width * cgImage.height
|
||||||
project = Project(imageSource: .embedded(data: imageData))
|
if pixelCount > 48_000_000 {
|
||||||
|
errorMessage = "Very large image (48MP+). May cause memory issues."
|
||||||
|
} else if pixelCount > 12_000_000 {
|
||||||
|
// Just a note, not an error - processing continues
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new project with appropriate image source
|
||||||
|
let imageSource: Project.ImageSource
|
||||||
|
if let identifier = localIdentifier {
|
||||||
|
imageSource = .photoLibrary(localIdentifier: identifier)
|
||||||
|
} else {
|
||||||
|
let imageData = uiImage.jpegData(compressionQuality: 0.9) ?? Data()
|
||||||
|
imageSource = .embedded(data: imageData)
|
||||||
|
}
|
||||||
|
project = Project(imageSource: imageSource)
|
||||||
|
|
||||||
|
announceForVoiceOver("Photo loaded")
|
||||||
}
|
}
|
||||||
|
|
||||||
// MARK: - Tap Handling
|
// MARK: - Tap Handling
|
||||||
@@ -120,11 +140,11 @@ final class EditorViewModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func handlePersonTap(at point: CGPoint, in image: CGImage) async throws {
|
private func handlePersonTap(at point: CGPoint, in image: CGImage) async throws {
|
||||||
let (mask, peopleCount) = try await maskingService.generatePersonMaskWithCount(at: point, in: image)
|
let result = try await maskingService.generatePersonMaskWithCount(at: point, in: image)
|
||||||
|
|
||||||
detectedPeopleCount = peopleCount
|
detectedPeopleCount = result.instanceCount
|
||||||
|
|
||||||
guard let mask = mask else {
|
guard let mask = result.mask else {
|
||||||
errorMessage = "No person found at tap location"
|
errorMessage = "No person found at tap location"
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -134,7 +154,10 @@ final class EditorViewModel {
|
|||||||
|
|
||||||
maskPreview = dilatedMask ?? mask
|
maskPreview = dilatedMask ?? mask
|
||||||
showingMaskConfirmation = true
|
showingMaskConfirmation = true
|
||||||
showSelectAllPeople = peopleCount > 1
|
showSelectAllPeople = result.instanceCount > 1
|
||||||
|
|
||||||
|
// Check for low confidence and flag for user warning
|
||||||
|
isLowConfidenceMask = result.confidence < 0.7
|
||||||
}
|
}
|
||||||
|
|
||||||
func selectAllPeople() async {
|
func selectAllPeople() async {
|
||||||
@@ -396,6 +419,7 @@ final class EditorViewModel {
|
|||||||
maskPreview = nil
|
maskPreview = nil
|
||||||
showingMaskConfirmation = false
|
showingMaskConfirmation = false
|
||||||
showSelectAllPeople = false
|
showSelectAllPeople = false
|
||||||
|
isLowConfidenceMask = false
|
||||||
isProcessing = false
|
isProcessing = false
|
||||||
processingMessage = ""
|
processingMessage = ""
|
||||||
}
|
}
|
||||||
@@ -405,6 +429,7 @@ final class EditorViewModel {
|
|||||||
showingMaskConfirmation = false
|
showingMaskConfirmation = false
|
||||||
showSelectAllPeople = false
|
showSelectAllPeople = false
|
||||||
pendingRefineMask = nil
|
pendingRefineMask = nil
|
||||||
|
isLowConfidenceMask = false
|
||||||
}
|
}
|
||||||
|
|
||||||
func refineWithBrush() {
|
func refineWithBrush() {
|
||||||
|
|||||||
@@ -86,9 +86,14 @@ struct PhotoEditorView: View {
|
|||||||
}
|
}
|
||||||
.onChange(of: selectedItem) { oldValue, newValue in
|
.onChange(of: selectedItem) { oldValue, newValue in
|
||||||
Task {
|
Task {
|
||||||
if let data = try? await newValue?.loadTransferable(type: Data.self),
|
guard let item = newValue else { return }
|
||||||
|
|
||||||
|
// Get localIdentifier if available
|
||||||
|
let localIdentifier = item.itemIdentifier
|
||||||
|
|
||||||
|
if let data = try? await item.loadTransferable(type: Data.self),
|
||||||
let uiImage = UIImage(data: data) {
|
let uiImage = UIImage(data: data) {
|
||||||
viewModel.loadImage(uiImage)
|
viewModel.loadImage(uiImage, localIdentifier: localIdentifier)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -144,6 +149,22 @@ struct PhotoEditorView: View {
|
|||||||
|
|
||||||
private var maskConfirmationBar: some View {
|
private var maskConfirmationBar: some View {
|
||||||
VStack(spacing: 12) {
|
VStack(spacing: 12) {
|
||||||
|
// Low confidence warning
|
||||||
|
if viewModel.isLowConfidenceMask {
|
||||||
|
HStack {
|
||||||
|
Image(systemName: "exclamationmark.triangle.fill")
|
||||||
|
.foregroundStyle(.yellow)
|
||||||
|
Text("Does this look right? The selection may need refinement.")
|
||||||
|
.font(.caption)
|
||||||
|
.foregroundStyle(.secondary)
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 12)
|
||||||
|
.padding(.vertical, 8)
|
||||||
|
.background(Color.yellow.opacity(0.1), in: RoundedRectangle(cornerRadius: 8))
|
||||||
|
.accessibilityElement(children: .combine)
|
||||||
|
.accessibilityLabel("Warning: Low confidence detection. The selection may need refinement.")
|
||||||
|
}
|
||||||
|
|
||||||
// High contrast toggle for accessibility
|
// High contrast toggle for accessibility
|
||||||
HStack {
|
HStack {
|
||||||
Toggle(isOn: $viewModel.useHighContrastMask) {
|
Toggle(isOn: $viewModel.useHighContrastMask) {
|
||||||
|
|||||||
@@ -36,7 +36,13 @@ actor MaskingService {
|
|||||||
try await generateForegroundMask(at: point, in: image)
|
try await generateForegroundMask(at: point, in: image)
|
||||||
}
|
}
|
||||||
|
|
||||||
func generatePersonMaskWithCount(at point: CGPoint, in image: CGImage) async throws -> (CGImage?, Int) {
|
struct MaskResult {
|
||||||
|
let mask: CGImage?
|
||||||
|
let instanceCount: Int
|
||||||
|
let confidence: Float // 0.0 to 1.0, based on mask coverage relative to image
|
||||||
|
}
|
||||||
|
|
||||||
|
func generatePersonMaskWithCount(at point: CGPoint, in image: CGImage) async throws -> MaskResult {
|
||||||
let request = VNGenerateForegroundInstanceMaskRequest()
|
let request = VNGenerateForegroundInstanceMaskRequest()
|
||||||
|
|
||||||
let handler = VNImageRequestHandler(cgImage: image, options: [:])
|
let handler = VNImageRequestHandler(cgImage: image, options: [:])
|
||||||
@@ -48,7 +54,7 @@ actor MaskingService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
guard let result = request.results?.first else {
|
guard let result = request.results?.first else {
|
||||||
return (nil, 0)
|
return MaskResult(mask: nil, instanceCount: 0, confidence: 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let allInstances = result.allInstances
|
let allInstances = result.allInstances
|
||||||
@@ -73,11 +79,75 @@ actor MaskingService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
guard let instance = targetInstance else {
|
guard let instance = targetInstance else {
|
||||||
return (nil, instanceCount)
|
return MaskResult(mask: nil, instanceCount: instanceCount, confidence: 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
let maskPixelBuffer = try result.generateScaledMaskForImage(forInstances: instance, from: handler)
|
let maskPixelBuffer = try result.generateScaledMaskForImage(forInstances: instance, from: handler)
|
||||||
return (convertPixelBufferToCGImage(maskPixelBuffer), instanceCount)
|
let maskImage = convertPixelBufferToCGImage(maskPixelBuffer)
|
||||||
|
|
||||||
|
// Calculate confidence based on mask quality (coverage ratio)
|
||||||
|
let confidence = calculateMaskConfidence(maskPixelBuffer, imageSize: CGSize(width: image.width, height: image.height))
|
||||||
|
|
||||||
|
return MaskResult(mask: maskImage, instanceCount: instanceCount, confidence: confidence)
|
||||||
|
}
|
||||||
|
|
||||||
|
private func calculateMaskConfidence(_ pixelBuffer: CVPixelBuffer, imageSize: CGSize) -> Float {
|
||||||
|
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
|
||||||
|
defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) }
|
||||||
|
|
||||||
|
let width = CVPixelBufferGetWidth(pixelBuffer)
|
||||||
|
let height = CVPixelBufferGetHeight(pixelBuffer)
|
||||||
|
|
||||||
|
guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
|
||||||
|
return 0.5 // Default medium confidence if can't read
|
||||||
|
}
|
||||||
|
|
||||||
|
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
|
||||||
|
var maskPixelCount = 0
|
||||||
|
var edgePixelCount = 0
|
||||||
|
|
||||||
|
// Count mask pixels and check edge sharpness
|
||||||
|
for y in 0..<height {
|
||||||
|
for x in 0..<width {
|
||||||
|
let pixelOffset = y * bytesPerRow + x
|
||||||
|
let pixelValue = baseAddress.load(fromByteOffset: pixelOffset, as: UInt8.self)
|
||||||
|
|
||||||
|
if pixelValue > 127 {
|
||||||
|
maskPixelCount += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for edge pixels (values between 50-200 indicate soft edges)
|
||||||
|
if pixelValue > 50 && pixelValue < 200 {
|
||||||
|
edgePixelCount += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalPixels = width * height
|
||||||
|
let maskRatio = Float(maskPixelCount) / Float(totalPixels)
|
||||||
|
let edgeRatio = Float(edgePixelCount) / Float(max(1, maskPixelCount))
|
||||||
|
|
||||||
|
// Confidence is higher when:
|
||||||
|
// - Mask covers reasonable portion (not too small, not too large)
|
||||||
|
// - Edge pixels are minimal (sharp edges = confident detection)
|
||||||
|
var confidence: Float = 1.0
|
||||||
|
|
||||||
|
// Penalize very small masks (< 1% of image)
|
||||||
|
if maskRatio < 0.01 {
|
||||||
|
confidence -= 0.3
|
||||||
|
}
|
||||||
|
|
||||||
|
// Penalize very large masks (> 50% of image)
|
||||||
|
if maskRatio > 0.5 {
|
||||||
|
confidence -= 0.2
|
||||||
|
}
|
||||||
|
|
||||||
|
// Penalize fuzzy edges
|
||||||
|
if edgeRatio > 0.3 {
|
||||||
|
confidence -= 0.2
|
||||||
|
}
|
||||||
|
|
||||||
|
return max(0.0, min(1.0, confidence))
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateForegroundMask(at point: CGPoint, in image: CGImage) async throws -> CGImage? {
|
func generateForegroundMask(at point: CGPoint, in image: CGImage) async throws -> CGImage? {
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ import CoreGraphics
|
|||||||
import Accelerate
|
import Accelerate
|
||||||
import UIKit
|
import UIKit
|
||||||
|
|
||||||
struct EdgeRefinement {
|
struct EdgeRefinement: Sendable {
|
||||||
|
|
||||||
struct GradientImage {
|
struct GradientImage: Sendable {
|
||||||
let width: Int
|
let width: Int
|
||||||
let height: Int
|
let height: Int
|
||||||
let magnitude: [Float]
|
let magnitude: [Float]
|
||||||
@@ -20,12 +20,12 @@ struct EdgeRefinement {
|
|||||||
let directionY: [Float]
|
let directionY: [Float]
|
||||||
}
|
}
|
||||||
|
|
||||||
static func computeGradient(from image: CGImage) -> GradientImage? {
|
nonisolated static func computeGradient(from image: CGImage) -> GradientImage? {
|
||||||
let width = image.width
|
let width = image.width
|
||||||
let height = image.height
|
let height = image.height
|
||||||
|
|
||||||
// Convert to grayscale
|
// Convert to grayscale
|
||||||
guard let grayscale = convertToGrayscale(image) else {
|
guard let grayscale = convertToGrayscaleNonisolated(image) else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,28 +38,7 @@ struct EdgeRefinement {
|
|||||||
let sobelX: [Int16] = [-1, 0, 1, -2, 0, 2, -1, 0, 1]
|
let sobelX: [Int16] = [-1, 0, 1, -2, 0, 2, -1, 0, 1]
|
||||||
let sobelY: [Int16] = [-1, -2, -1, 0, 0, 0, 1, 2, 1]
|
let sobelY: [Int16] = [-1, -2, -1, 0, 0, 0, 1, 2, 1]
|
||||||
|
|
||||||
var sourceBuffer = vImage_Buffer(
|
// Apply Sobel filters using direct calculation
|
||||||
data: UnsafeMutableRawPointer(mutating: grayscale),
|
|
||||||
height: vImagePixelCount(height),
|
|
||||||
width: vImagePixelCount(width),
|
|
||||||
rowBytes: width
|
|
||||||
)
|
|
||||||
|
|
||||||
var destBufferX = vImage_Buffer(
|
|
||||||
data: &gradientX,
|
|
||||||
height: vImagePixelCount(height),
|
|
||||||
width: vImagePixelCount(width),
|
|
||||||
rowBytes: width * MemoryLayout<Float>.size
|
|
||||||
)
|
|
||||||
|
|
||||||
var destBufferY = vImage_Buffer(
|
|
||||||
data: &gradientY,
|
|
||||||
height: vImagePixelCount(height),
|
|
||||||
width: vImagePixelCount(width),
|
|
||||||
rowBytes: width * MemoryLayout<Float>.size
|
|
||||||
)
|
|
||||||
|
|
||||||
// Apply Sobel filters (simplified - using direct calculation)
|
|
||||||
for y in 1..<(height - 1) {
|
for y in 1..<(height - 1) {
|
||||||
for x in 1..<(width - 1) {
|
for x in 1..<(width - 1) {
|
||||||
var gx: Float = 0
|
var gx: Float = 0
|
||||||
@@ -90,7 +69,7 @@ struct EdgeRefinement {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
static func refineSelectionToEdges(
|
nonisolated static func refineSelectionToEdges(
|
||||||
selection: [CGPoint],
|
selection: [CGPoint],
|
||||||
gradient: GradientImage,
|
gradient: GradientImage,
|
||||||
searchRadius: Int = 5
|
searchRadius: Int = 5
|
||||||
@@ -131,7 +110,7 @@ struct EdgeRefinement {
|
|||||||
return refinedPoints
|
return refinedPoints
|
||||||
}
|
}
|
||||||
|
|
||||||
static func createMaskFromPoints(
|
nonisolated static func createMaskFromPoints(
|
||||||
_ points: [CGPoint],
|
_ points: [CGPoint],
|
||||||
brushSize: CGFloat,
|
brushSize: CGFloat,
|
||||||
imageSize: CGSize
|
imageSize: CGSize
|
||||||
@@ -169,7 +148,7 @@ struct EdgeRefinement {
|
|||||||
return context.makeImage()
|
return context.makeImage()
|
||||||
}
|
}
|
||||||
|
|
||||||
private static func convertToGrayscale(_ image: CGImage) -> [UInt8]? {
|
private nonisolated static func convertToGrayscaleNonisolated(_ image: CGImage) -> [UInt8]? {
|
||||||
let width = image.width
|
let width = image.width
|
||||||
let height = image.height
|
let height = image.height
|
||||||
var pixelData = [UInt8](repeating: 0, count: width * height)
|
var pixelData = [UInt8](repeating: 0, count: width * height)
|
||||||
|
|||||||
Reference in New Issue
Block a user