This commit is contained in:
2026-01-31 10:21:12 -05:00
parent 103a702668
commit ad0c0ba8a6
16 changed files with 1454 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
{
"permissions": {
"allow": [
"Bash(xcodebuild:*)"
]
}
}

View File

@@ -0,0 +1,395 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 56;
objects = {
/* Begin PBXBuildFile section */
1A0000000000000000000001 /* WordMarkApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000011 /* WordMarkApp.swift */; };
1A0000000000000000000002 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000012 /* ContentView.swift */; };
1A0000000000000000000003 /* CameraService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000013 /* CameraService.swift */; };
1A0000000000000000000004 /* CameraViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000014 /* CameraViewModel.swift */; };
1A0000000000000000000005 /* CameraView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000015 /* CameraView.swift */; };
1A0000000000000000000006 /* PhotoReviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000016 /* PhotoReviewView.swift */; };
1A0000000000000000000007 /* PhotoReviewViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000017 /* PhotoReviewViewModel.swift */; };
1A0000000000000000000008 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1A0000000000000000000018 /* Assets.xcassets */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
1A0000000000000000000010 /* WordMark.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = WordMark.app; sourceTree = BUILT_PRODUCTS_DIR; };
1A0000000000000000000011 /* WordMarkApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WordMarkApp.swift; sourceTree = "<group>"; };
1A0000000000000000000012 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
1A0000000000000000000013 /* CameraService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraService.swift; sourceTree = "<group>"; };
1A0000000000000000000014 /* CameraViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewModel.swift; sourceTree = "<group>"; };
1A0000000000000000000015 /* CameraView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraView.swift; sourceTree = "<group>"; };
1A0000000000000000000016 /* PhotoReviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoReviewView.swift; sourceTree = "<group>"; };
1A0000000000000000000017 /* PhotoReviewViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoReviewViewModel.swift; sourceTree = "<group>"; };
1A0000000000000000000018 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
1A0000000000000000000019 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
1A000000000000000000000D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
1A0000000000000000000020 = {
isa = PBXGroup;
children = (
1A0000000000000000000021 /* WordMark */,
1A0000000000000000000030 /* Products */,
);
sourceTree = "<group>";
};
1A0000000000000000000021 /* WordMark */ = {
isa = PBXGroup;
children = (
1A0000000000000000000011 /* WordMarkApp.swift */,
1A0000000000000000000012 /* ContentView.swift */,
1A0000000000000000000022 /* Services */,
1A0000000000000000000023 /* ViewModels */,
1A0000000000000000000024 /* Views */,
1A0000000000000000000018 /* Assets.xcassets */,
1A0000000000000000000019 /* Info.plist */,
);
path = WordMark;
sourceTree = "<group>";
};
1A0000000000000000000022 /* Services */ = {
isa = PBXGroup;
children = (
1A0000000000000000000013 /* CameraService.swift */,
);
path = Services;
sourceTree = "<group>";
};
1A0000000000000000000023 /* ViewModels */ = {
isa = PBXGroup;
children = (
1A0000000000000000000014 /* CameraViewModel.swift */,
1A0000000000000000000017 /* PhotoReviewViewModel.swift */,
);
path = ViewModels;
sourceTree = "<group>";
};
1A0000000000000000000024 /* Views */ = {
isa = PBXGroup;
children = (
1A0000000000000000000015 /* CameraView.swift */,
1A0000000000000000000016 /* PhotoReviewView.swift */,
);
path = Views;
sourceTree = "<group>";
};
1A0000000000000000000030 /* Products */ = {
isa = PBXGroup;
children = (
1A0000000000000000000010 /* WordMark.app */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
1A000000000000000000000F /* WordMark */ = {
isa = PBXNativeTarget;
buildConfigurationList = 1A0000000000000000000040 /* Build configuration list for PBXNativeTarget "WordMark" */;
buildPhases = (
1A000000000000000000000C /* Sources */,
1A000000000000000000000D /* Frameworks */,
1A000000000000000000000E /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = WordMark;
productName = WordMark;
productReference = 1A0000000000000000000010 /* WordMark.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
1A0000000000000000000050 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 1500;
LastUpgradeCheck = 1500;
TargetAttributes = {
1A000000000000000000000F = {
CreatedOnToolsVersion = 15.0;
};
};
};
buildConfigurationList = 1A0000000000000000000051 /* Build configuration list for PBXProject "WordMark" */;
compatibilityVersion = "Xcode 14.0";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 1A0000000000000000000020;
productRefGroup = 1A0000000000000000000030 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
1A000000000000000000000F /* WordMark */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
1A000000000000000000000E /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
1A0000000000000000000008 /* Assets.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
1A000000000000000000000C /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
1A0000000000000000000001 /* WordMarkApp.swift in Sources */,
1A0000000000000000000002 /* ContentView.swift in Sources */,
1A0000000000000000000003 /* CameraService.swift in Sources */,
1A0000000000000000000004 /* CameraViewModel.swift in Sources */,
1A0000000000000000000005 /* CameraView.swift in Sources */,
1A0000000000000000000006 /* PhotoReviewView.swift in Sources */,
1A0000000000000000000007 /* PhotoReviewViewModel.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
1A0000000000000000000060 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
1A0000000000000000000061 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
1A0000000000000000000062 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = WordMark/Info.plist;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.social-networking";
INFOPLIST_KEY_NSCameraUsageDescription = "WordMark needs access to your camera to capture photos from both front and rear cameras simultaneously.";
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "WordMark needs access to save your dual camera photos to your photo library.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.wordmark;
PRODUCT_NAME = "$(TARGET_NAME)";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
1A0000000000000000000063 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = WordMark/Info.plist;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.social-networking";
INFOPLIST_KEY_NSCameraUsageDescription = "WordMark needs access to your camera to capture photos from both front and rear cameras simultaneously.";
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "WordMark needs access to save your dual camera photos to your photo library.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.wordmark;
PRODUCT_NAME = "$(TARGET_NAME)";
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
1A0000000000000000000040 /* Build configuration list for PBXNativeTarget "WordMark" */ = {
isa = XCConfigurationList;
buildConfigurations = (
1A0000000000000000000062 /* Debug */,
1A0000000000000000000063 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
1A0000000000000000000051 /* Build configuration list for PBXProject "WordMark" */ = {
isa = XCConfigurationList;
buildConfigurations = (
1A0000000000000000000060 /* Debug */,
1A0000000000000000000061 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 1A0000000000000000000050 /* Project object */;
}

View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>WordMark.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
<key>WorkMark.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,13 @@
{
"images" : [
{
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,25 @@
import SwiftUI
struct ContentView: View {
@StateObject private var cameraViewModel = CameraViewModel()
var body: some View {
NavigationStack {
CameraView(viewModel: cameraViewModel)
.navigationBarHidden(true)
.navigationDestination(isPresented: $cameraViewModel.showReview) {
PhotoReviewView(
rearImage: cameraViewModel.rearImage,
frontImage: cameraViewModel.frontImage,
onRetake: {
cameraViewModel.resetCapture()
}
)
}
}
}
}
#Preview {
ContentView()
}

11
WordMark/Info.plist Normal file
View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>UILaunchScreen</key>
<dict>
<key>UIColorName</key>
<string>LaunchScreenBackground</string>
</dict>
</dict>
</plist>

View File

@@ -0,0 +1,267 @@
import AVFoundation
import UIKit
protocol CameraServiceDelegate: AnyObject {
func cameraService(_ service: CameraService, didCaptureRearImage image: UIImage)
func cameraService(_ service: CameraService, didCaptureFrontImage image: UIImage)
func cameraService(_ service: CameraService, didFailWithError error: CameraError)
}
enum CameraError: Error, LocalizedError {
case multiCamNotSupported
case cameraNotAvailable
case sessionConfigurationFailed
case captureError(String)
case permissionDenied
var errorDescription: String? {
switch self {
case .multiCamNotSupported:
return "This device does not support simultaneous front and rear camera capture."
case .cameraNotAvailable:
return "Camera is not available on this device."
case .sessionConfigurationFailed:
return "Failed to configure the camera session."
case .captureError(let message):
return "Capture failed: \(message)"
case .permissionDenied:
return "Camera permission denied. Please enable in Settings."
}
}
}
class CameraService: NSObject {
weak var delegate: CameraServiceDelegate?
private var multiCamSession: AVCaptureMultiCamSession?
private var rearCameraInput: AVCaptureDeviceInput?
private var frontCameraInput: AVCaptureDeviceInput?
private var rearPhotoOutput: AVCapturePhotoOutput?
private var frontPhotoOutput: AVCapturePhotoOutput?
private var rearPreviewLayer: AVCaptureVideoPreviewLayer?
private var frontPreviewLayer: AVCaptureVideoPreviewLayer?
private var pendingRearImage: UIImage?
private var pendingFrontImage: UIImage?
private var isCapturingRear = false
private var isCapturingFront = false
private let sessionQueue = DispatchQueue(label: "com.wordmark.sessionQueue")
var isMultiCamSupported: Bool {
return AVCaptureMultiCamSession.isMultiCamSupported
}
func checkPermissions(completion: @escaping (Bool) -> Void) {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
completion(true)
case .notDetermined:
AVCaptureDevice.requestAccess(for: .video) { granted in
DispatchQueue.main.async {
completion(granted)
}
}
case .denied, .restricted:
completion(false)
@unknown default:
completion(false)
}
}
func setupSession() {
sessionQueue.async { [weak self] in
self?.configureSession()
}
}
private func configureSession() {
guard AVCaptureMultiCamSession.isMultiCamSupported else {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.delegate?.cameraService(self, didFailWithError: .multiCamNotSupported)
}
return
}
let session = AVCaptureMultiCamSession()
session.beginConfiguration()
// Configure rear camera
guard let rearCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back),
let rearInput = try? AVCaptureDeviceInput(device: rearCamera) else {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.delegate?.cameraService(self, didFailWithError: .cameraNotAvailable)
}
return
}
// Configure front camera
guard let frontCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front),
let frontInput = try? AVCaptureDeviceInput(device: frontCamera) else {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.delegate?.cameraService(self, didFailWithError: .cameraNotAvailable)
}
return
}
// Add rear camera input
if session.canAddInput(rearInput) {
session.addInputWithNoConnections(rearInput)
self.rearCameraInput = rearInput
}
// Add front camera input
if session.canAddInput(frontInput) {
session.addInputWithNoConnections(frontInput)
self.frontCameraInput = frontInput
}
// Setup rear photo output
let rearPhotoOutput = AVCapturePhotoOutput()
if session.canAddOutput(rearPhotoOutput) {
session.addOutputWithNoConnections(rearPhotoOutput)
self.rearPhotoOutput = rearPhotoOutput
// Connect rear camera to photo output
if let rearPort = rearInput.ports(for: .video, sourceDeviceType: .builtInWideAngleCamera, sourceDevicePosition: .back).first {
let connection = AVCaptureConnection(inputPorts: [rearPort], output: rearPhotoOutput)
if session.canAddConnection(connection) {
session.addConnection(connection)
}
}
}
// Setup front photo output
let frontPhotoOutput = AVCapturePhotoOutput()
if session.canAddOutput(frontPhotoOutput) {
session.addOutputWithNoConnections(frontPhotoOutput)
self.frontPhotoOutput = frontPhotoOutput
// Connect front camera to photo output
if let frontPort = frontInput.ports(for: .video, sourceDeviceType: .builtInWideAngleCamera, sourceDevicePosition: .front).first {
let connection = AVCaptureConnection(inputPorts: [frontPort], output: frontPhotoOutput)
connection.isVideoMirrored = true
if session.canAddConnection(connection) {
session.addConnection(connection)
}
}
}
session.commitConfiguration()
self.multiCamSession = session
}
func getPreviewLayer() -> AVCaptureVideoPreviewLayer? {
guard let session = multiCamSession else { return nil }
if rearPreviewLayer == nil {
let previewLayer = AVCaptureVideoPreviewLayer(sessionWithNoConnection: session)
previewLayer.videoGravity = .resizeAspectFill
// Connect to rear camera
if let rearInput = rearCameraInput,
let rearPort = rearInput.ports(for: .video, sourceDeviceType: .builtInWideAngleCamera, sourceDevicePosition: .back).first {
let connection = AVCaptureConnection(inputPort: rearPort, videoPreviewLayer: previewLayer)
if session.canAddConnection(connection) {
session.addConnection(connection)
}
}
self.rearPreviewLayer = previewLayer
}
return rearPreviewLayer
}
func getFrontPreviewLayer() -> AVCaptureVideoPreviewLayer? {
guard let session = multiCamSession else { return nil }
if frontPreviewLayer == nil {
let previewLayer = AVCaptureVideoPreviewLayer(sessionWithNoConnection: session)
previewLayer.videoGravity = .resizeAspectFill
// Connect to front camera
if let frontInput = frontCameraInput,
let frontPort = frontInput.ports(for: .video, sourceDeviceType: .builtInWideAngleCamera, sourceDevicePosition: .front).first {
let connection = AVCaptureConnection(inputPort: frontPort, videoPreviewLayer: previewLayer)
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = true
if session.canAddConnection(connection) {
session.addConnection(connection)
}
}
self.frontPreviewLayer = previewLayer
}
return frontPreviewLayer
}
func startSession() {
sessionQueue.async { [weak self] in
self?.multiCamSession?.startRunning()
}
}
func stopSession() {
sessionQueue.async { [weak self] in
self?.multiCamSession?.stopRunning()
}
}
func capturePhotos() {
guard let rearOutput = rearPhotoOutput,
let frontOutput = frontPhotoOutput else {
delegate?.cameraService(self, didFailWithError: .captureError("Photo outputs not configured"))
return
}
pendingRearImage = nil
pendingFrontImage = nil
isCapturingRear = true
isCapturingFront = true
let rearSettings = AVCapturePhotoSettings()
let frontSettings = AVCapturePhotoSettings()
sessionQueue.async {
rearOutput.capturePhoto(with: rearSettings, delegate: self)
frontOutput.capturePhoto(with: frontSettings, delegate: self)
}
}
}
extension CameraService: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let error = error {
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.delegate?.cameraService(self, didFailWithError: .captureError(error.localizedDescription))
}
return
}
guard let imageData = photo.fileDataRepresentation(),
let image = UIImage(data: imageData) else {
return
}
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
if output == self.rearPhotoOutput {
self.pendingRearImage = image
self.isCapturingRear = false
self.delegate?.cameraService(self, didCaptureRearImage: image)
} else if output == self.frontPhotoOutput {
self.pendingFrontImage = image
self.isCapturingFront = false
self.delegate?.cameraService(self, didCaptureFrontImage: image)
}
}
}
}

View File

@@ -0,0 +1,101 @@
import SwiftUI
import AVFoundation
@MainActor
class CameraViewModel: ObservableObject {
@Published var rearImage: UIImage?
@Published var frontImage: UIImage?
@Published var showReview = false
@Published var errorMessage: String?
@Published var showError = false
@Published var isSessionRunning = false
@Published var permissionGranted = false
let cameraService = CameraService()
init() {
cameraService.delegate = self
}
func checkPermissions() {
cameraService.checkPermissions { [weak self] granted in
DispatchQueue.main.async {
self?.permissionGranted = granted
if granted {
self?.setupCamera()
} else {
self?.errorMessage = CameraError.permissionDenied.localizedDescription
self?.showError = true
}
}
}
}
func setupCamera() {
guard cameraService.isMultiCamSupported else {
errorMessage = CameraError.multiCamNotSupported.localizedDescription
showError = true
return
}
cameraService.setupSession()
}
func startSession() {
cameraService.startSession()
isSessionRunning = true
}
func stopSession() {
cameraService.stopSession()
isSessionRunning = false
}
func capturePhotos() {
cameraService.capturePhotos()
}
func resetCapture() {
rearImage = nil
frontImage = nil
showReview = false
startSession()
}
func getPreviewLayer() -> AVCaptureVideoPreviewLayer? {
return cameraService.getPreviewLayer()
}
func getFrontPreviewLayer() -> AVCaptureVideoPreviewLayer? {
return cameraService.getFrontPreviewLayer()
}
private func checkBothCaptured() {
if rearImage != nil && frontImage != nil {
stopSession()
showReview = true
}
}
}
extension CameraViewModel: CameraServiceDelegate {
nonisolated func cameraService(_ service: CameraService, didCaptureRearImage image: UIImage) {
Task { @MainActor in
self.rearImage = image
self.checkBothCaptured()
}
}
nonisolated func cameraService(_ service: CameraService, didCaptureFrontImage image: UIImage) {
Task { @MainActor in
self.frontImage = image
self.checkBothCaptured()
}
}
nonisolated func cameraService(_ service: CameraService, didFailWithError error: CameraError) {
Task { @MainActor in
self.errorMessage = error.localizedDescription
self.showError = true
}
}
}

View File

@@ -0,0 +1,17 @@
import SwiftUI
import Photos
@MainActor
class PhotoReviewViewModel: ObservableObject {
@Published var isSaving = false
@Published var saveSuccess = false
@Published var saveError: String?
func requestPhotoLibraryPermission(completion: @escaping (Bool) -> Void) {
PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in
DispatchQueue.main.async {
completion(status == .authorized || status == .limited)
}
}
}
}

View File

@@ -0,0 +1,146 @@
import SwiftUI
import AVFoundation
struct CameraView: View {
@ObservedObject var viewModel: CameraViewModel
var body: some View {
ZStack {
Color.black.ignoresSafeArea()
if viewModel.permissionGranted {
CameraPreviewView(viewModel: viewModel)
.ignoresSafeArea()
VStack {
Spacer()
HStack(spacing: 20) {
Spacer()
Button(action: {
viewModel.capturePhotos()
}) {
ZStack {
Circle()
.stroke(Color.white, lineWidth: 4)
.frame(width: 80, height: 80)
Circle()
.fill(Color.white)
.frame(width: 65, height: 65)
}
}
FrontCameraPreviewView(viewModel: viewModel)
.frame(width: 80, height: 100)
.clipShape(RoundedRectangle(cornerRadius: 10))
.overlay(
RoundedRectangle(cornerRadius: 10)
.stroke(Color.white, lineWidth: 2)
)
}
.padding(.horizontal, 30)
.padding(.bottom, 40)
}
} else {
VStack(spacing: 20) {
Image(systemName: "camera.fill")
.font(.system(size: 60))
.foregroundColor(.gray)
Text("Camera Access Required")
.font(.title2)
.foregroundColor(.white)
Text("Please enable camera access in Settings to use this app.")
.font(.body)
.foregroundColor(.gray)
.multilineTextAlignment(.center)
.padding(.horizontal, 40)
Button("Open Settings") {
if let url = URL(string: UIApplication.openSettingsURLString) {
UIApplication.shared.open(url)
}
}
.padding()
.background(Color.blue)
.foregroundColor(.white)
.cornerRadius(10)
}
}
}
.onAppear {
viewModel.checkPermissions()
}
.onChange(of: viewModel.permissionGranted) { _, newValue in
if newValue {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
viewModel.startSession()
}
}
}
.alert("Error", isPresented: $viewModel.showError) {
Button("OK", role: .cancel) { }
} message: {
Text(viewModel.errorMessage ?? "An unknown error occurred")
}
}
}
struct CameraPreviewView: UIViewRepresentable {
@ObservedObject var viewModel: CameraViewModel
func makeUIView(context: Context) -> UIView {
let view = UIView(frame: .zero)
view.backgroundColor = .black
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
if let previewLayer = viewModel.getPreviewLayer() {
previewLayer.frame = view.bounds
view.layer.addSublayer(previewLayer)
}
}
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
DispatchQueue.main.async {
if let previewLayer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer {
previewLayer.frame = uiView.bounds
}
}
}
}
struct FrontCameraPreviewView: UIViewRepresentable {
@ObservedObject var viewModel: CameraViewModel
func makeUIView(context: Context) -> UIView {
let view = UIView(frame: .zero)
view.backgroundColor = .black
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
if let previewLayer = viewModel.getFrontPreviewLayer() {
previewLayer.frame = view.bounds
view.layer.addSublayer(previewLayer)
}
}
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
DispatchQueue.main.async {
if let previewLayer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer {
previewLayer.frame = uiView.bounds
}
}
}
}
#Preview {
CameraView(viewModel: CameraViewModel())
}

View File

@@ -0,0 +1,392 @@
import SwiftUI
import Photos
struct TextLabel: Identifiable {
let id = UUID()
var text: String
var position: CGPoint
var offset: CGSize = .zero
var dragOffset: CGSize = .zero
var scale: CGFloat = 1.0
var currentScale: CGFloat = 1.0
var rotation: Angle = .zero
var currentRotation: Angle = .zero
}
struct PhotoReviewView: View {
let rearImage: UIImage?
let frontImage: UIImage?
let onRetake: () -> Void
@StateObject private var viewModel = PhotoReviewViewModel()
@State private var frontImageOffset: CGSize = .zero
@State private var dragOffset: CGSize = .zero
@State private var frontImageScale: CGFloat = 1.0
@State private var currentScale: CGFloat = 1.0
@State private var showSaveSuccess = false
@State private var showSaveError = false
@State private var saveErrorMessage = ""
@State private var textLabels: [TextLabel] = []
@State private var showTextInput = false
@State private var pendingTextPosition: CGPoint = .zero
@State private var newLabelText = ""
@Environment(\.dismiss) private var dismiss
private let frontImageSize: CGFloat = 120
var body: some View {
GeometryReader { geometry in
ZStack {
// Rear camera image as background with tap gesture
if let rearImage = rearImage {
Image(uiImage: rearImage)
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: geometry.size.width, height: geometry.size.height)
.clipped()
.onTapGesture { location in
// Check if tap is outside the front image area
if !isTapOnFrontImage(location: location, geometry: geometry) {
pendingTextPosition = location
newLabelText = ""
showTextInput = true
}
}
}
// Front camera image as draggable and resizable overlay
if let frontImage = frontImage {
let scaledWidth = frontImageSize * frontImageScale * currentScale
let scaledHeight = frontImageSize * 1.33 * frontImageScale * currentScale
Image(uiImage: frontImage)
.resizable()
.aspectRatio(contentMode: .fill)
.frame(width: scaledWidth, height: scaledHeight)
.clipShape(RoundedRectangle(cornerRadius: 12))
.overlay(
RoundedRectangle(cornerRadius: 12)
.stroke(Color.white, lineWidth: 3)
)
.shadow(color: .black.opacity(0.3), radius: 5, x: 0, y: 2)
.position(
x: initialFrontPosition(in: geometry).x + frontImageOffset.width + dragOffset.width,
y: initialFrontPosition(in: geometry).y + frontImageOffset.height + dragOffset.height
)
.gesture(
DragGesture()
.onChanged { value in
dragOffset = value.translation
}
.onEnded { value in
frontImageOffset.width += value.translation.width
frontImageOffset.height += value.translation.height
dragOffset = .zero
}
)
.simultaneousGesture(
MagnificationGesture()
.onChanged { value in
currentScale = value
}
.onEnded { value in
frontImageScale *= value
frontImageScale = min(max(frontImageScale, 0.5), 3.0)
currentScale = 1.0
}
)
}
// Text labels
ForEach($textLabels) { $label in
Text(label.text)
.font(.system(size: 32, weight: .bold))
.foregroundColor(.white)
.shadow(color: .black, radius: 2, x: 1, y: 1)
.padding(20)
.contentShape(Rectangle())
.scaleEffect(label.scale * label.currentScale)
.rotationEffect(label.rotation + label.currentRotation)
.position(
x: label.position.x + label.offset.width + label.dragOffset.width,
y: label.position.y + label.offset.height + label.dragOffset.height
)
.gesture(
SimultaneousGesture(
SimultaneousGesture(
DragGesture()
.onChanged { value in
label.dragOffset = value.translation
}
.onEnded { value in
label.offset.width += value.translation.width
label.offset.height += value.translation.height
label.dragOffset = .zero
},
MagnificationGesture()
.onChanged { value in
label.currentScale = value
}
.onEnded { value in
label.scale *= value
label.scale = min(max(label.scale, 0.3), 5.0)
label.currentScale = 1.0
}
),
RotationGesture()
.onChanged { value in
label.currentRotation = value
}
.onEnded { value in
label.rotation += value
label.currentRotation = .zero
}
)
)
}
// Controls overlay
VStack {
Spacer()
HStack(spacing: 60) {
// Retake button
Button(action: {
dismiss()
onRetake()
}) {
VStack {
Image(systemName: "arrow.counterclockwise")
.font(.system(size: 24))
Text("Retake")
.font(.caption)
}
.foregroundColor(.white)
.padding()
.background(Color.black.opacity(0.5))
.cornerRadius(12)
}
// Save button
Button(action: {
saveCompositeImage(geometry: geometry)
}) {
VStack {
Image(systemName: "square.and.arrow.down")
.font(.system(size: 24))
Text("Save")
.font(.caption)
}
.foregroundColor(.white)
.padding()
.background(Color.blue)
.cornerRadius(12)
}
}
.padding(.bottom, 50)
}
}
.ignoresSafeArea()
}
.navigationBarHidden(true)
.alert("Saved!", isPresented: $showSaveSuccess) {
Button("OK", role: .cancel) { }
} message: {
Text("Photo saved to your library.")
}
.alert("Save Failed", isPresented: $showSaveError) {
Button("OK", role: .cancel) { }
} message: {
Text(saveErrorMessage)
}
.alert("Enter a word", isPresented: $showTextInput) {
TextField("Word", text: $newLabelText)
Button("Cancel", role: .cancel) { }
Button("Add") {
if !newLabelText.isEmpty {
let newLabel = TextLabel(text: newLabelText, position: pendingTextPosition)
textLabels.append(newLabel)
}
}
}
}
private func initialFrontPosition(in geometry: GeometryProxy) -> CGPoint {
// Position in top-right corner with padding
let padding: CGFloat = 20
let x = geometry.size.width - frontImageSize / 2 - padding
let y = frontImageSize * 1.33 / 2 + padding + geometry.safeAreaInsets.top
return CGPoint(x: x, y: y)
}
private func isTapOnFrontImage(location: CGPoint, geometry: GeometryProxy) -> Bool {
let scaledWidth = frontImageSize * frontImageScale
let scaledHeight = frontImageSize * 1.33 * frontImageScale
let initialPos = initialFrontPosition(in: geometry)
let centerX = initialPos.x + frontImageOffset.width
let centerY = initialPos.y + frontImageOffset.height
let minX = centerX - scaledWidth / 2
let maxX = centerX + scaledWidth / 2
let minY = centerY - scaledHeight / 2
let maxY = centerY + scaledHeight / 2
return location.x >= minX && location.x <= maxX && location.y >= minY && location.y <= maxY
}
private func saveCompositeImage(geometry: GeometryProxy) {
guard let rearImage = rearImage else { return }
// Request photo library permission
PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in
DispatchQueue.main.async {
switch status {
case .authorized, .limited:
let compositeImage = createCompositeImage(
rearImage: rearImage,
frontImage: frontImage,
geometry: geometry
)
if let composite = compositeImage {
UIImageWriteToSavedPhotosAlbum(composite, nil, nil, nil)
showSaveSuccess = true
} else {
saveErrorMessage = "Failed to create composite image."
showSaveError = true
}
case .denied, .restricted:
saveErrorMessage = "Photo library access denied. Please enable in Settings."
showSaveError = true
case .notDetermined:
break
@unknown default:
break
}
}
}
}
private func createCompositeImage(rearImage: UIImage, frontImage: UIImage?, geometry: GeometryProxy) -> UIImage? {
// Use full screen size including safe areas to match what user sees
let fullWidth = geometry.size.width
let fullHeight = geometry.size.height + geometry.safeAreaInsets.top + geometry.safeAreaInsets.bottom
let size = CGSize(width: fullWidth, height: fullHeight)
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { context in
// Draw rear image as background (fill)
let rearAspect = rearImage.size.width / rearImage.size.height
let viewAspect = size.width / size.height
var rearDrawRect: CGRect
if rearAspect > viewAspect {
let height = size.height
let width = height * rearAspect
let x = (size.width - width) / 2
rearDrawRect = CGRect(x: x, y: 0, width: width, height: height)
} else {
let width = size.width
let height = width / rearAspect
let y = (size.height - height) / 2
rearDrawRect = CGRect(x: 0, y: y, width: width, height: height)
}
rearImage.draw(in: rearDrawRect)
// Draw front image overlay
if let frontImage = frontImage {
let frontWidth = frontImageSize * frontImageScale
let frontHeight = frontImageSize * 1.33 * frontImageScale
// Calculate center position (matches initialFrontPosition + offset)
let padding: CGFloat = 20
let centerX = fullWidth - frontImageSize / 2 - padding + frontImageOffset.width
let centerY = frontImageSize * 1.33 / 2 + padding + geometry.safeAreaInsets.top + frontImageOffset.height
// Convert center to rect origin (top-left corner)
let frontX = centerX - frontWidth / 2
let frontY = centerY - frontHeight / 2
let frontRect = CGRect(x: frontX, y: frontY, width: frontWidth, height: frontHeight)
// Draw rounded rect with border
let path = UIBezierPath(roundedRect: frontRect, cornerRadius: 12)
context.cgContext.saveGState()
path.addClip()
// Calculate aspect-fill rect (same as .fill in SwiftUI)
let imageAspect = frontImage.size.width / frontImage.size.height
let frameAspect = frontWidth / frontHeight
var drawRect: CGRect
if imageAspect > frameAspect {
// Image is wider - fill height, overflow width
let drawHeight = frontHeight
let drawWidth = drawHeight * imageAspect
let drawX = frontX - (drawWidth - frontWidth) / 2
drawRect = CGRect(x: drawX, y: frontY, width: drawWidth, height: drawHeight)
} else {
// Image is taller - fill width, overflow height
let drawWidth = frontWidth
let drawHeight = drawWidth / imageAspect
let drawY = frontY - (drawHeight - frontHeight) / 2
drawRect = CGRect(x: frontX, y: drawY, width: drawWidth, height: drawHeight)
}
frontImage.draw(in: drawRect)
context.cgContext.restoreGState()
// Draw border
UIColor.white.setStroke()
path.lineWidth = 3
path.stroke()
}
// Draw text labels
for label in textLabels {
let textX = label.position.x + label.offset.width
let textY = label.position.y + label.offset.height
let scaledFontSize: CGFloat = 32 * label.scale
let attributes: [NSAttributedString.Key: Any] = [
.font: UIFont.boldSystemFont(ofSize: scaledFontSize),
.foregroundColor: UIColor.white,
.strokeColor: UIColor.black,
.strokeWidth: -2.0
]
let attributedString = NSAttributedString(string: label.text, attributes: attributes)
let textSize = attributedString.size()
// Save graphics state before applying transforms
context.cgContext.saveGState()
// Move to the text center position
context.cgContext.translateBy(x: textX, y: textY)
// Apply rotation
let rotationRadians = CGFloat(label.rotation.radians)
context.cgContext.rotate(by: rotationRadians)
// Draw text centered at origin (which is now at the text position)
let drawPoint = CGPoint(x: -textSize.width / 2, y: -textSize.height / 2)
attributedString.draw(at: drawPoint)
// Restore graphics state
context.cgContext.restoreGState()
}
}
}
}
struct PhotoReviewView_Previews: PreviewProvider {
static var previews: some View {
PhotoReviewView(
rearImage: UIImage(systemName: "photo"),
frontImage: UIImage(systemName: "person.fill"),
onRetake: {}
)
}
}

View File

@@ -0,0 +1,10 @@
import SwiftUI
@main
struct WordMarkApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}

34
junk Normal file
View File

@@ -0,0 +1,34 @@
/ralph-loop:ralph-loop "Create an iOS app in Swift/SwiftUI that captures photos from both the front and rear cameras simultaneously.
## Requirements
1. **Dual Camera Capture**
- Use AVCaptureMultiCamSession to capture from front and rear cameras at the same time
- Single capture button triggers both cameras simultaneously
- Handle devices that don't support multi-cam gracefully (show error message)
2. **Photo Display Screen**
- After capture, navigate to a review screen
- Rear camera photo displays full-screen as background
- Front camera photo displays as a smaller overlay (roughly 25% size)
- Front photo positioned in top-right corner with rounded corners and subtle border
- User can tap front photo to drag/reposition it
3. **Basic Features**
- Save combined image to photo library
- Retake button to return to camera
- Request camera and photo library permissions properly
4. **Project Structure**
- Clean MVVM architecture
- Separate camera service class
- The app should build and run without errors
## Success Criteria
- App builds without errors or warnings
- Both cameras capture simultaneously on supported devices
- Photos display in correct layout (rear=background, front=overlay)
- Can save the composite image
Output <promise>DUAL CAM APP COMPLETE</promise> when the app builds successfully and meets all requirements." --completion-promise "DUAL CAM APP COMPLETE"
--max-iterations 25