commit 57f6c18550ba83c437f7108e54e616bb8509b02a Author: jared Date: Mon Jan 19 21:58:44 2026 -0500 Initial commit: FlipTalk iOS app Co-Authored-By: Claude Opus 4.5 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..aa7e383 --- /dev/null +++ b/.gitignore @@ -0,0 +1,67 @@ +# Xcode +build/ +DerivedData/ +*.xcodeproj/xcuserdata/ +*.xcworkspace/xcuserdata/ +*.xcodeproj/project.xcworkspace/xcuserdata/ + +# Xcode build state +*.moved-aside +*.xcuserstate +*.xccheckout +*.xcscmblueprint + +# Swift Package Manager +.build/ +.swiftpm/ +Package.resolved + +# CocoaPods +Pods/ +Podfile.lock + +# Carthage +Carthage/Build/ +Carthage/Checkouts/ + +# Node (if any JS tooling) +node_modules/ +dist/ +.npm/ + +# macOS +.DS_Store +.AppleDouble +.LSOverride +._* +.Spotlight-V100 +.Trashes + +# IDEs +*.swp +*.swo +*~ +.idea/ +.vscode/ + +# Archives +*.ipa +*.dSYM.zip +*.dSYM + +# Playgrounds +timeline.xctimeline +playground.xcworkspace + +# Fastlane +fastlane/report.xml +fastlane/Preview.html +fastlane/screenshots/**/*.png +fastlane/test_output/ + +# Environment and secrets +.env +.env.* +*.pem +*.p12 +*.mobileprovision diff --git a/FlipTalk.xcodeproj/project.pbxproj b/FlipTalk.xcodeproj/project.pbxproj new file mode 100644 index 0000000..a58eb24 --- /dev/null +++ b/FlipTalk.xcodeproj/project.pbxproj @@ -0,0 +1,361 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 77; + objects = { + +/* Begin PBXFileReference section */ + 7F95F89F2EDF7D3B00ABB7F4 /* FlipTalk.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FlipTalk.app; sourceTree = BUILT_PRODUCTS_DIR; }; +/* End PBXFileReference section */ + +/* Begin PBXFileSystemSynchronizedRootGroup section */ + 7F95F8A12EDF7D3B00ABB7F4 /* FlipTalk */ = { + isa = PBXFileSystemSynchronizedRootGroup; + path = FlipTalk; + sourceTree = ""; + }; +/* End PBXFileSystemSynchronizedRootGroup section */ + +/* Begin PBXFrameworksBuildPhase section */ + 7F95F89C2EDF7D3B00ABB7F4 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 7F95F8962EDF7D3B00ABB7F4 = { + isa = PBXGroup; + children = ( + 7F95F8A12EDF7D3B00ABB7F4 /* FlipTalk */, + 7F95F8A02EDF7D3B00ABB7F4 /* Products */, + ); + sourceTree = ""; + }; + 7F95F8A02EDF7D3B00ABB7F4 /* Products */ = { + isa = PBXGroup; + children = ( + 7F95F89F2EDF7D3B00ABB7F4 /* FlipTalk.app */, + ); + name = Products; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 7F95F89E2EDF7D3B00ABB7F4 /* FlipTalk */ = { + isa = PBXNativeTarget; + buildConfigurationList = 7F95F8AA2EDF7D3C00ABB7F4 /* Build configuration list for PBXNativeTarget "FlipTalk" */; + buildPhases = ( + 7F95F89B2EDF7D3B00ABB7F4 /* Sources */, + 7F95F89C2EDF7D3B00ABB7F4 /* Frameworks */, + 7F95F89D2EDF7D3B00ABB7F4 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + fileSystemSynchronizedGroups = ( + 7F95F8A12EDF7D3B00ABB7F4 /* FlipTalk */, + ); + name = FlipTalk; + packageProductDependencies = ( + ); + productName = FlipTalk; + productReference = 7F95F89F2EDF7D3B00ABB7F4 /* FlipTalk.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 7F95F8972EDF7D3B00ABB7F4 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 2610; + LastUpgradeCheck = 2620; + TargetAttributes = { + 7F95F89E2EDF7D3B00ABB7F4 = { + CreatedOnToolsVersion = 26.1.1; + }; + }; + }; + buildConfigurationList = 7F95F89A2EDF7D3B00ABB7F4 /* Build configuration list for PBXProject "FlipTalk" */; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 7F95F8962EDF7D3B00ABB7F4; + minimizedProjectReferenceProxies = 1; + preferredProjectObjectVersion = 77; + productRefGroup = 7F95F8A02EDF7D3B00ABB7F4 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 7F95F89E2EDF7D3B00ABB7F4 /* FlipTalk */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 7F95F89D2EDF7D3B00ABB7F4 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 7F95F89B2EDF7D3B00ABB7F4 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + 7F95F8A82EDF7D3C00ABB7F4 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = 7X85543FQQ; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 26.1; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 7F95F8A92EDF7D3C00ABB7F4 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + DEVELOPMENT_TEAM = 7X85543FQQ; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 26.1; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_COMPILATION_MODE = wholemodule; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 7F95F8AB2EDF7D3C00ABB7F4 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 2; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_CFBundleDisplayName = "Flip-Talk"; + INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities"; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Needed for recording the person speaking"; + INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "Needed to transcribe."; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; + IPHONEOS_DEPLOYMENT_TARGET = 18.6; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 2.3; + ONLY_ACTIVE_ARCH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.jaredlog.Flip-Talk"; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES; + SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 7F95F8AC2EDF7D3C00ABB7F4 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 2; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_CFBundleDisplayName = "Flip-Talk"; + INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities"; + INFOPLIST_KEY_NSMicrophoneUsageDescription = "Needed for recording the person speaking"; + INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "Needed to transcribe."; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown"; + IPHONEOS_DEPLOYMENT_TARGET = 18.6; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 2.3; + ONLY_ACTIVE_ARCH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.jaredlog.Flip-Talk"; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; + SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES; + SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 7F95F89A2EDF7D3B00ABB7F4 /* Build configuration list for PBXProject "FlipTalk" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 7F95F8A82EDF7D3C00ABB7F4 /* Debug */, + 7F95F8A92EDF7D3C00ABB7F4 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 7F95F8AA2EDF7D3C00ABB7F4 /* Build configuration list for PBXNativeTarget "FlipTalk" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 7F95F8AB2EDF7D3C00ABB7F4 /* Debug */, + 7F95F8AC2EDF7D3C00ABB7F4 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 7F95F8972EDF7D3B00ABB7F4 /* Project object */; +} diff --git a/FlipTalk.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/FlipTalk.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000..919434a --- /dev/null +++ b/FlipTalk.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/FlipTalk.xcodeproj/xcshareddata/xcschemes/FlipTalk.xcscheme b/FlipTalk.xcodeproj/xcshareddata/xcschemes/FlipTalk.xcscheme new file mode 100644 index 0000000..c93950c --- /dev/null +++ b/FlipTalk.xcodeproj/xcshareddata/xcschemes/FlipTalk.xcscheme @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/FlipTalk/Assets.xcassets/AccentColor.colorset/Contents.json b/FlipTalk/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000..eb87897 --- /dev/null +++ b/FlipTalk/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/100.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/100.png new file mode 100644 index 0000000..578af67 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/100.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/1024.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/1024.png new file mode 100644 index 0000000..fa677cc Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/1024.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/114.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/114.png new file mode 100644 index 0000000..468b196 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/114.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/120.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/120.png new file mode 100644 index 0000000..76e3d59 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/120.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/144.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/144.png new file mode 100644 index 0000000..a208b80 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/144.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/152.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/152.png new file mode 100644 index 0000000..2a03ca1 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/152.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/167.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/167.png new file mode 100644 index 0000000..8894fb9 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/167.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/180.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/180.png new file mode 100644 index 0000000..be5c8fd Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/180.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/20.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/20.png new file mode 100644 index 0000000..c19a67a Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/20.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/29.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/29.png new file mode 100644 index 0000000..ed0dd28 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/29.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/40.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/40.png new file mode 100644 index 0000000..32dee35 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/40.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/50.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/50.png new file mode 100644 index 0000000..6ffc7b3 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/50.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/57.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/57.png new file mode 100644 index 0000000..669ac5c Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/57.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/58.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/58.png new file mode 100644 index 0000000..371f989 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/58.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/60.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/60.png new file mode 100644 index 0000000..dff90ad Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/60.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/72.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/72.png new file mode 100644 index 0000000..4ed50cd Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/72.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/76.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/76.png new file mode 100644 index 0000000..7a4a314 Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/76.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/80.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/80.png new file mode 100644 index 0000000..c649e5e Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/80.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/87.png b/FlipTalk/Assets.xcassets/AppIcon.appiconset/87.png new file mode 100644 index 0000000..dd9349a Binary files /dev/null and b/FlipTalk/Assets.xcassets/AppIcon.appiconset/87.png differ diff --git a/FlipTalk/Assets.xcassets/AppIcon.appiconset/Contents.json b/FlipTalk/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..65b74d7 --- /dev/null +++ b/FlipTalk/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1 @@ +{"images":[{"size":"60x60","expected-size":"180","filename":"180.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"40x40","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"60x60","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"57x57","expected-size":"57","filename":"57.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"87","filename":"87.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"57x57","expected-size":"114","filename":"114.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"60","filename":"60.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"1024x1024","filename":"1024.png","expected-size":"1024","idiom":"ios-marketing","folder":"Assets.xcassets/AppIcon.appiconset/","scale":"1x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"72x72","expected-size":"72","filename":"72.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"76x76","expected-size":"152","filename":"152.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"50x50","expected-size":"100","filename":"100.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"76x76","expected-size":"76","filename":"76.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"50x50","expected-size":"50","filename":"50.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"72x72","expected-size":"144","filename":"144.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"40x40","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"83.5x83.5","expected-size":"167","filename":"167.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"20x20","expected-size":"20","filename":"20.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"}]} \ No newline at end of file diff --git a/FlipTalk/Assets.xcassets/Contents.json b/FlipTalk/Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/FlipTalk/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/FlipTalk/ContentView.swift b/FlipTalk/ContentView.swift new file mode 100644 index 0000000..cabd516 --- /dev/null +++ b/FlipTalk/ContentView.swift @@ -0,0 +1,1998 @@ +import SwiftUI +import UIKit +import AVFoundation +#if canImport(Translation) +import Translation +#endif +import Combine +import GameController + +struct NoteItem: Identifiable, Hashable, Codable { + let id: UUID + var content: String + var timestamp: Date + var group: String? + var isHiddenFromHistory: Bool + var displayName: String? + + init(content: String, timestamp: Date, group: String? = nil, isHiddenFromHistory: Bool = false, displayName: String? = nil) { + self.id = UUID() + self.content = content + self.timestamp = timestamp + self.group = group + self.isHiddenFromHistory = isHiddenFromHistory + self.displayName = displayName + } +} + +struct ContentView: View { + @EnvironmentObject var themeManager: ThemeManager + @State private var currentNote: String = "" + @State private var savedNotes: [NoteItem] = [] + @State private var groups: [String] = [] + @State private var groupColors: [String: String] = [:] // Map group name to color name + + @State private var isMenuOpen: Bool = false + @State private var isGroupsMenuOpen: Bool = false + @State private var dragOffset: CGFloat = 0 + + @State private var fontSize: CGFloat = 40 + @State private var keyboardHeight: CGFloat = 0 + @FocusState private var isInputActive: Bool + + // Group Selection State + @State private var noteForGroupSelection: NoteItem? + @State private var showGroupSelectionSheet: Bool = false + @State private var showCreateGroupAlert: Bool = false + @State private var newGroupName: String = "" + + // Group Rename State + @State private var groupToRename: String? + @State private var showRenameGroupAlert: Bool = false + @State private var newGroupNameInput: String = "" + + // Rename State + @State private var noteForRenaming: NoteItem? + @State private var showRenameAlert: Bool = false + @State private var newDisplayName: String = "" + + // Settings State + @State private var showSettings: Bool = false + + // Voice Screen State + @State private var isVoiceScreenActive: Bool = false + + // Split Screen State + @State private var isSplitScreenMode: Bool = false + @State private var splitScreenLayoutID = UUID() // For forcing layout updates + @ObservedObject private var speechRecognizer = SpeechRecognizer.shared + + // Text-to-Speech + @State private var speechSynthesizer = AVSpeechSynthesizer() + @StateObject private var speechDelegate = SpeechDelegate() + @State private var isSpeakingAnimation = false + + // Translation State + @State private var isShowingTranslation = false + @State private var translatedNote = "" + @State private var translationTaskID = UUID() + @ObservedObject private var languageManager = LanguageManager.shared + @AppStorage("targetLanguageIdentifier") private var targetLanguageIdentifier: String = "" + + // Resume Speaking State + @State private var lastSpokenText: String = "" + @State private var currentUtteranceOffset: Int = 0 + @State private var isContinuousMode: Bool = false + + #if canImport(Translation) + @State private var mainTranslationConfig: TranslationSession.Configuration? + // Split screen translation configs + @State private var splitTranscriptConfig: TranslationSession.Configuration? + @State private var splitUserTextConfig: TranslationSession.Configuration? + #endif + + // Split screen translation state + @State private var translatedTranscript = "" // Hearing person's text translated to English + @State private var translatedUserText = "" // User's text translated to target language + + // Hardware Keyboard State + @State private var isHardwareKeyboardAttached: Bool = false + + var body: some View { + GeometryReader { geometry in + let menuWidth = geometry.size.width * 0.75 + // Calculate the actual height taken by the keyboard within the safe area + let keyboardOverlap = max(0, keyboardHeight - geometry.safeAreaInsets.bottom) + let availableHeight = geometry.size.height - keyboardOverlap + + ZStack(alignment: .leading) { + if isVoiceScreenActive { + VoiceNoteView(onFlipBack: { + withAnimation(.easeInOut(duration: 0.6)) { + isVoiceScreenActive = false + } + // Ensure keyboard comes up when returning + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + isInputActive = true + } + }) + .transition(.modifier( + active: FlipTransition(angle: 90), + identity: FlipTransition(angle: 0) + )) + } else { + // Main Content + mainContentView(geometry: geometry, keyboardOverlap: keyboardOverlap) + } + + // Dimming overlay when any menu is open + if isMenuOpen || isGroupsMenuOpen { + Color.black.opacity(0.3) + .ignoresSafeArea() + .onTapGesture { + withAnimation { + isMenuOpen = false + isGroupsMenuOpen = false + } + } + } + + // Left Side Menu (History) + SideMenuView( + notes: savedNotes.filter { !$0.isHiddenFromHistory }, + onSelect: { selectedNote in + currentNote = selectedNote.content + withAnimation { + isMenuOpen = false + isVoiceScreenActive = false + } + // Recalculate font after loading + DispatchQueue.main.async { + adjustFontSize(containerSize: CGSize(width: geometry.size.width, height: availableHeight - 60)) + } + }, + onStarTap: { note in + noteForGroupSelection = note + if groups.isEmpty { + showCreateGroupAlert = true + } else { + showGroupSelectionSheet = true + } + }, + onClearAll: { + clearAllHistory() + }, + onDelete: { note in + deleteNoteFromHistory(note) + }, + onSettings: { + showSettings = true + }, + onClose: { + withAnimation { + isMenuOpen = false + } + } + ) + .frame(width: menuWidth) + .offset(x: isMenuOpen ? 0 : -menuWidth) + .animation(.easeInOut, value: isMenuOpen) + + + // Right Side Menu (Groups) + GroupsMenuView( + groups: groups, + groupColors: groupColors, + notes: savedNotes, + getGroupColor: getGroupColor, + onSelect: { selectedNote in + currentNote = selectedNote.content + withAnimation { + isGroupsMenuOpen = false + isVoiceScreenActive = false + } + DispatchQueue.main.async { + adjustFontSize(containerSize: CGSize(width: geometry.size.width, height: availableHeight - 60)) + } + }, + onRenameNote: { note in + noteForRenaming = note + newDisplayName = note.displayName ?? note.content + showRenameAlert = true + }, + onRenameGroup: { group in + groupToRename = group + newGroupNameInput = group + showRenameGroupAlert = true + }, + onColorGroup: { group, color in + setGroupColor(group: group, color: color) + }, + onDelete: { note in + deleteNote(note) + }, + onDeleteGroup: { group in + deleteGroup(group) + }, + onClose: { + withAnimation { + isGroupsMenuOpen = false + } + } + ) + .frame(width: menuWidth) + .offset(x: isGroupsMenuOpen ? geometry.size.width - menuWidth : geometry.size.width) + .animation(.easeInOut, value: isGroupsMenuOpen) + + + // Edge Swipe Gesture Areas + + // Left Edge (History) + HStack { + Color.clear + .frame(width: 20) + .contentShape(Rectangle()) + .allowsHitTesting(!isMenuOpen && !isGroupsMenuOpen) + .gesture( + DragGesture() + .onChanged { value in + if value.translation.width > 0 { + // Swipe Right + } + } + .onEnded { value in + if value.translation.width > 50 { + saveCurrentNote() + isInputActive = false // Hide keyboard + withAnimation { + isMenuOpen = true + } + } + } + ) + Spacer() + } + + // Right Edge (Groups) + HStack { + Spacer() + Color.clear + .frame(width: 20) + .contentShape(Rectangle()) + .allowsHitTesting(!isMenuOpen && !isGroupsMenuOpen) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width < -50 { + isInputActive = false // Hide keyboard + withAnimation { + isGroupsMenuOpen = true + } + } + } + ) + } + } + .ignoresSafeArea(.keyboard, edges: .bottom) // Prevent automatic resizing + .onAppear { + SpeechRecognizer.shared.startTranscribing() + speechSynthesizer.delegate = speechDelegate + setupKeyboardObservers() + loadData() + // Automatically focus the text editor when app loads + isInputActive = true + + // Initialize VoiceManager with saved language + if !targetLanguageIdentifier.isEmpty { + VoiceManager.shared.updateTargetLanguage(to: targetLanguageIdentifier) + } + } + .onChange(of: targetLanguageIdentifier) { _, newValue in + if !newValue.isEmpty { + VoiceManager.shared.updateTargetLanguage(to: newValue) + } + } + .onDisappear { + removeKeyboardObservers() + } + // Alerts and Sheets + .alert("Create New Group", isPresented: $showCreateGroupAlert) { + TextField("Group Name", text: $newGroupName) + Button("Cancel", role: .cancel) { } + Button("Create") { + if !newGroupName.isEmpty { + groups.append(newGroupName) + assignGroupToNote(group: newGroupName) + newGroupName = "" + saveData() + } + } + } message: { + Text("Enter a name for the new group.") + } + .alert("Rename Note", isPresented: $showRenameAlert) { + TextField("Display Name", text: $newDisplayName) + Button("Cancel", role: .cancel) { } + Button("Save") { + renameNote() + } + } message: { + Text("Enter a new display name for this note in the list.") + } + .alert("Rename Group", isPresented: $showRenameGroupAlert) { + TextField("Group Name", text: $newGroupNameInput) + Button("Cancel", role: .cancel) { } + Button("Save") { + renameGroup() + } + } message: { + Text("Enter a new name for this group.") + } + + .sheet(isPresented: $showSettings) { + SettingsView() + .environmentObject(themeManager) + } + .confirmationDialog("Select Faves Group", isPresented: $showGroupSelectionSheet, titleVisibility: .visible) { + ForEach(groups, id: \.self) { group in + Button(group) { + assignGroupToNote(group: group) + } + } + Button("New Faves Group") { + showCreateGroupAlert = true + } + } + .onShake { + saveCurrentNote() + currentNote = "" + isShowingTranslation = false + translatedNote = "" + } + #if canImport(Translation) + .translationTask(mainTranslationConfig) { session in + do { + let response = try await session.translate(currentNote) + translatedNote = response.targetText + } catch { + print("Translation failed: \(error)") + translatedNote = "Translation failed" + } + } + // Split screen: Transcript (foreign → English) + .translationTask(splitTranscriptConfig) { session in + do { + let response = try await session.translate(speechRecognizer.transcript) + translatedTranscript = response.targetText + } catch { + print("Transcript translation failed: \(error)") + translatedTranscript = "Translation failed" + } + } + // Split screen: User text (English → foreign) + .translationTask(splitUserTextConfig) { session in + do { + let response = try await session.translate(currentNote) + translatedUserText = response.targetText + } catch { + print("User text translation failed: \(error)") + translatedUserText = "Translation failed" + } + } + #endif + .id(translationTaskID) + } + } + + private func triggerTranslation() { + if isShowingTranslation { + // Revert to original + isShowingTranslation = false + translatedTranscript = "" + translatedUserText = "" + } else { + // Trigger Translation + if isSplitScreenMode { + // Split screen mode: bidirectional translation + // Top pane: transcript (foreign language) → English + // Bottom pane: user text (English) → target language + guard !speechRecognizer.transcript.isEmpty || !currentNote.isEmpty else { return } + + translatedTranscript = "Translating..." + translatedUserText = "Translating..." + isShowingTranslation = true + + if #available(iOS 18.0, *) { + #if canImport(Translation) + let langId = targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier + + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + translationTaskID = UUID() + + // Transcript: foreign → English + splitTranscriptConfig = TranslationSession.Configuration( + source: Locale.Language(identifier: langId), + target: Locale.Language(identifier: "en-US") + ) + + // User text: English → foreign + splitUserTextConfig = TranslationSession.Configuration( + source: Locale.Language(identifier: "en-US"), + target: Locale.Language(identifier: langId) + ) + } + #else + translatedTranscript = "Translation unavailable" + translatedUserText = "Translation unavailable" + #endif + } else { + translatedTranscript = "Translation requires iOS 18" + translatedUserText = "Translation requires iOS 18" + } + } else { + // Normal mode: translate user text to target language + guard !currentNote.isEmpty else { return } + translatedNote = "Translating..." + isShowingTranslation = true + + if #available(iOS 18.0, *) { + #if canImport(Translation) + let langId = targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier + + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + translationTaskID = UUID() + mainTranslationConfig = TranslationSession.Configuration( + source: Locale.Language(identifier: "en-US"), + target: Locale.Language(identifier: langId) + ) + } + #else + translatedNote = "Translation unavailable" + #endif + } else { + translatedNote = "Translation requires iOS 18" + } + } + } + } + + private func assignGroupToNote(group: String) { + guard let note = noteForGroupSelection else { return } + if let index = savedNotes.firstIndex(where: { $0.id == note.id }) { + savedNotes[index].group = group + saveData() + } + noteForGroupSelection = nil + } + + private func renameNote() { + guard let note = noteForRenaming else { return } + if let index = savedNotes.firstIndex(where: { $0.id == note.id }) { + savedNotes[index].displayName = newDisplayName + saveData() + } + noteForRenaming = nil + } + + private func renameGroup() { + guard let oldName = groupToRename, !newGroupNameInput.isEmpty else { return } + guard !groups.contains(newGroupNameInput) else { return } // Prevent duplicate names + + // Update groups list + if let index = groups.firstIndex(of: oldName) { + groups[index] = newGroupNameInput + } + + // Update group colors + if let color = groupColors[oldName] { + groupColors[newGroupNameInput] = color + groupColors.removeValue(forKey: oldName) + } + + // Update notes + for index in savedNotes.indices { + if savedNotes[index].group == oldName { + savedNotes[index].group = newGroupNameInput + } + } + + saveData() + groupToRename = nil + newGroupNameInput = "" + } + + private func setGroupColor(group: String, color: String) { + groupColors[group] = color + saveData() + } + + private func getGroupColor(group: String) -> (text: Color, bg: Color) { + guard let colorName = groupColors[group] else { + return (.primary, Color(UIColor.systemGray6)) // Default + } + + switch colorName { + case "Red": + return (Color(red: 0.5, green: 0, blue: 0), Color(red: 1.0, green: 0.9, blue: 0.9)) + case "Green": + return (Color(red: 0, green: 0.4, blue: 0), Color(red: 0.9, green: 1.0, blue: 0.9)) + case "Blue": + return (Color(red: 0, green: 0, blue: 0.5), Color(red: 0.9, green: 0.9, blue: 1.0)) + case "Grey": + return (Color.black, Color(UIColor.systemGray4)) + default: + return (.primary, Color(UIColor.systemGray6)) + } + } + + private func deleteNote(_ note: NoteItem) { + savedNotes.removeAll { $0.id == note.id } + saveData() + } + + private func deleteNoteFromHistory(_ note: NoteItem) { + if let index = savedNotes.firstIndex(where: { $0.id == note.id }) { + if savedNotes[index].group != nil { + savedNotes[index].isHiddenFromHistory = true + } else { + savedNotes.remove(at: index) + } + saveData() + } + } + + private func deleteGroup(_ group: String) { + // Remove all notes in this group + savedNotes.removeAll { $0.group == group } + + // Remove the group from the groups list + groups.removeAll { $0 == group } + + // Remove the group's color + groupColors.removeValue(forKey: group) + + saveData() + } + + private func adjustFontSize(containerSize: CGSize) { + let maxFontSize: CGFloat = 40 + let minFontSize: CGFloat = 12 + + // Determine which text to measure + let text: String + if isShowingTranslation && !translatedNote.isEmpty { + text = translatedNote + } else { + text = currentNote.isEmpty ? " " : currentNote + } + + // We need to account for padding in TextEditor (approx 10-20pts usually) + let horizontalPadding: CGFloat = 16 // Standard padding + let width = containerSize.width - (horizontalPadding * 2) + let height = containerSize.height - 90 // Account for top padding (40) + visibility buffer (50) + + // Simple binary search or iterative approach to find best fit + var bestSize = minFontSize + + // Check from max down to min + for size in stride(from: maxFontSize, through: minFontSize, by: -1) { + let font = UIFont.systemFont(ofSize: size, weight: .bold) + let attributes = [NSAttributedString.Key.font: font] + let boundingRect = NSString(string: text).boundingRect( + with: CGSize(width: width, height: .greatestFiniteMagnitude), + options: [.usesLineFragmentOrigin, .usesFontLeading], + attributes: attributes, + context: nil + ) + + if boundingRect.height <= height { + bestSize = size + break + } + } + + if fontSize != bestSize { + fontSize = bestSize + } + } + + private func saveCurrentNote() { + guard !currentNote.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty else { return } + + // Check if the note already exists (even if hidden) + if savedNotes.contains(where: { $0.content == currentNote }) { + // If it exists but is hidden, should we unhide it? + // The user didn't specify, but typically "saving" again might imply bringing it back. + // For now, let's just prevent duplicates as requested before. + return + } + + let newNote = NoteItem(content: currentNote, timestamp: Date()) + savedNotes.insert(newNote, at: 0) + saveData() + } + + private func clearAllHistory() { + // Remove notes that don't have a group + savedNotes.removeAll { $0.group == nil } + + // Mark remaining notes (which must have a group) as hidden from history + for index in savedNotes.indices { + savedNotes[index].isHiddenFromHistory = true + } + saveData() + } + + + private func speakText() { + // Configure Audio Session + let audioSession = AVAudioSession.sharedInstance() + do { + try audioSession.setCategory(.playback, mode: .default) + try audioSession.setActive(true) + } catch { + print("Failed to set up audio session: \(error)") + } + + // Stop any ongoing speech + if speechSynthesizer.isSpeaking { + speechSynthesizer.stopSpeaking(at: .immediate) + } + + // Determine text source + let fullText = (isShowingTranslation && !translatedNote.isEmpty) ? translatedNote : currentNote + + // Resume Logic + var textToSpeak = fullText + + // Only attempt resume if NOT in translation mode (simpler to ensure correctness) + // and if currently showing source text. + if !isShowingTranslation { + if !lastSpokenText.isEmpty && fullText.hasPrefix(lastSpokenText) { + // Resume from where we left off + let remainingText = String(fullText.dropFirst(lastSpokenText.count)) + + // If there's nothing new to speak, maybe speak all? Or just nothing? + // User said "resume from that position to speak the remaining text". + // If remaining is empty, maybe they just clicked it again. Let's speak all in that case. + if remainingText.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty { + textToSpeak = fullText + currentUtteranceOffset = 0 + } else { + textToSpeak = remainingText + currentUtteranceOffset = lastSpokenText.utf16.count + } + } else { + // Text changed significantly, or new start + currentUtteranceOffset = 0 + } + } else { + // Translation mode: always speak full for now to ensure correctness + currentUtteranceOffset = 0 + } + + let utterance = AVSpeechUtterance(string: textToSpeak) + + if isShowingTranslation && !translatedNote.isEmpty { + utterance.voice = VoiceManager.shared.getSelectedTargetVoice() + } else { + utterance.voice = VoiceManager.shared.getSelectedEnglishVoice() + } + + utterance.rate = 0.5 + + // Manual speak still uses currentUtteranceOffset, which we set earlier. + // We must register it now. + speechDelegate.register(utterance: utterance, offset: currentUtteranceOffset) + + speechSynthesizer.speak(utterance) + + // Update state assuming success + // Careful: if we are speaking a partial chunk, the "lastSpokenText" essentially becomes the full text + // because next time we want to skip EVERYTHING we just spoke + what we skipped. + if !isShowingTranslation { + lastSpokenText = fullText + } else { + // If in translation mode, we don't track resume state for now + lastSpokenText = "" + } + } + + private func checkAndSpeakSentences() { + // Only valid if continuous mode is on, not showing translation, and we have text + guard isContinuousMode, !isShowingTranslation, !currentNote.isEmpty else { return } + + let fullText = currentNote + + // Calculate the "new" text that hasn't been spoken yet + // If lastSpokenText is a prefix, we only look at what's after it. + // If currentNote changed completely, lastSpokenText might be invalid prefix -> treat as new start? + // But for "resume", we usually reset lastSpokenText if it doesn't match prefix. + + let textToCheck: Substring + let baseOffset: Int + + if fullText.hasPrefix(lastSpokenText) { + textToCheck = fullText.dropFirst(lastSpokenText.count) + baseOffset = lastSpokenText.count + } else { + // Text changed in a way that invalidated history (e.g. deletion at start) + // Should we respeak everything? Or just start monitoring from now? + // "Pick up from last position" implies we monitor new additions. + // Let's assume we start from scratch. + textToCheck = Substring(fullText) + baseOffset = 0 + lastSpokenText = "" // Reset history + } + + let stringToCheck = String(textToCheck) + + // Find sentences ending with '.' + // logic: split by '.', but keep the delimiter? + // Or simpler: find ranges of "." + + // Robust way: scan for "." + // We want to speak chunk by chunk. + // "Hello world. This is test." + // 1. "Hello world." -> Speak. Update lastSpokenText. + // 2. " This is test." -> Wait until next period. + + var currentIndex = stringToCheck.startIndex + + while let range = stringToCheck[currentIndex...].rangeOfCharacter(from: CharacterSet(charactersIn: ".?!")) { + // Found a period, question mark, or exclamation mark + let endOfSentence = range.upperBound + let sentenceSub = stringToCheck[currentIndex.. lastSpokenText.count { + lastSpokenText = newLastSpoken + } + } + + // Persistence + private func saveData() { + do { + let notesData = try JSONEncoder().encode(savedNotes) + let groupsData = try JSONEncoder().encode(groups) + let colorsData = try JSONEncoder().encode(groupColors) + UserDefaults.standard.set(notesData, forKey: "savedNotes") + UserDefaults.standard.set(groupsData, forKey: "savedGroups") + UserDefaults.standard.set(colorsData, forKey: "savedGroupColors") + } catch { + print("Error saving data: \(error)") + } + } + + private func loadData() { + if let notesData = UserDefaults.standard.data(forKey: "savedNotes") { + do { + savedNotes = try JSONDecoder().decode([NoteItem].self, from: notesData) + } catch { + print("Error loading notes: \(error)") + } + } + + if let groupsData = UserDefaults.standard.data(forKey: "savedGroups") { + do { + groups = try JSONDecoder().decode([String].self, from: groupsData) + } catch { + print("Error loading groups: \(error)") + } + } + + if let colorsData = UserDefaults.standard.data(forKey: "savedGroupColors") { + do { + groupColors = try JSONDecoder().decode([String: String].self, from: colorsData) + } catch { + print("Error loading group colors: \(error)") + } + } + } + + // Keyboard handling + private func setupKeyboardObservers() { + let updateKeyboardHeight: (Notification) -> Void = { notification in + if let keyboardFrame = notification.userInfo?[UIResponder.keyboardFrameEndUserInfoKey] as? CGRect { + self.keyboardHeight = keyboardFrame.height + } + } + + NotificationCenter.default.addObserver(forName: UIResponder.keyboardWillShowNotification, object: nil, queue: .main, using: updateKeyboardHeight) + NotificationCenter.default.addObserver(forName: UIResponder.keyboardWillChangeFrameNotification, object: nil, queue: .main, using: updateKeyboardHeight) + + NotificationCenter.default.addObserver(forName: UIResponder.keyboardWillHideNotification, object: nil, queue: .main) { _ in + self.keyboardHeight = 0 + } + } + + private func removeKeyboardObservers() { + NotificationCenter.default.removeObserver(self, name: UIResponder.keyboardWillShowNotification, object: nil) + NotificationCenter.default.removeObserver(self, name: UIResponder.keyboardWillHideNotification, object: nil) + } + + private func mainContentView(geometry: GeometryProxy, keyboardOverlap: CGFloat) -> some View { + let menuWidth = geometry.size.width * 0.75 + + return ZStack { + VStack(spacing: 0) { + // 1. Top Content (Text Editor + Overlays) + // Takes all remaining space above buttons + GeometryReader { textGeo in + ZStack(alignment: .topLeading) { + if isSplitScreenMode { + // SPLIT SCREEN MODE + VStack(spacing: 0) { + // Top Half: Live Transcript from SpeechRecognizer + VStack(alignment: .leading, spacing: 0) { + HStack { + Image(systemName: "person.wave.2") + .font(.system(size: 14)) + .foregroundColor(themeManager.secondaryTextColor) + Text("Hearing Person Speaking") + .font(.caption) + .foregroundColor(themeManager.secondaryTextColor) + Spacer() + if speechRecognizer.isRecording { + Circle() + .fill(Color.red) + .frame(width: 8, height: 8) + } + } + .padding(.horizontal) + .padding(.top, 45) + .padding(.bottom, 8) + + ScrollViewReader { proxy in + ScrollView { + // When translating: show transcript translated to English + // When not translating: show original transcript + let displayText: String = { + if isShowingTranslation { + return translatedTranscript.isEmpty ? "Translating..." : translatedTranscript + } else { + return speechRecognizer.transcript.isEmpty ? "Listening for speech..." : speechRecognizer.transcript + } + }() + let isPlaceholder = (!isShowingTranslation && speechRecognizer.transcript.isEmpty) || (isShowingTranslation && translatedTranscript.isEmpty) + + Text(displayText) + .font(.system(size: UIDevice.current.userInterfaceIdiom == .pad ? 28 : 18, weight: .medium)) + .foregroundColor(isPlaceholder ? themeManager.secondaryTextColor : themeManager.textColor) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(.horizontal) + .padding(.bottom, 10) + .id("transcriptBottom") + } + .onAppear { + // Scroll to bottom when entering split screen + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + proxy.scrollTo("transcriptBottom", anchor: .bottom) + } + } + .onChange(of: speechRecognizer.transcript) { + withAnimation { + proxy.scrollTo("transcriptBottom", anchor: .bottom) + } + } + .onChange(of: translatedTranscript) { + withAnimation { + proxy.scrollTo("transcriptBottom", anchor: .bottom) + } + } + } + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(themeManager.backgroundColor.opacity(0.95)) + + // Divider + Rectangle() + .fill(themeManager.secondaryTextColor.opacity(0.3)) + .frame(height: 2) + + // Bottom Half: User Typed Text + VStack(alignment: .leading, spacing: 0) { + HStack { + Image(systemName: "keyboard") + .font(.system(size: 14)) + .foregroundColor(themeManager.secondaryTextColor) + Text("You Are Typing") + .font(.caption) + .foregroundColor(themeManager.secondaryTextColor) + Spacer() + } + .padding(.horizontal) + .padding(.top, 8) + .padding(.bottom, 8) + + ZStack(alignment: .topLeading) { + if isShowingTranslation { + // Show user text translated to target language (read-only) + ScrollViewReader { proxy in + ScrollView { + let displayText = translatedUserText.isEmpty ? "Translating..." : translatedUserText + Text(displayText) + .font(.system(size: UIDevice.current.userInterfaceIdiom == .pad ? 28 : 18, weight: .medium)) + .foregroundColor(translatedUserText.isEmpty ? themeManager.secondaryTextColor : themeManager.textColor) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(.horizontal) + .padding(.bottom, 10) + .id("typedTextBottom") + } + .onAppear { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) { + withAnimation { + proxy.scrollTo("typedTextBottom", anchor: .bottom) + } + } + } + .onChange(of: translatedUserText) { + withAnimation { + proxy.scrollTo("typedTextBottom", anchor: .bottom) + } + } + } + } else { + // Show TextEditor for user input - Native TextEditor handles newlines + TextEditor(text: $currentNote) + .font(.system(size: UIDevice.current.userInterfaceIdiom == .pad ? 28 : 18, weight: .medium)) + .foregroundColor(themeManager.textColor) + .focused($isInputActive) + .scrollContentBackground(.hidden) + .frame(maxWidth: .infinity, maxHeight: .infinity) + .padding(.horizontal, 12) + .onChange(of: currentNote) { + // Auto-speak in continuous mode (same as main screen) + if isContinuousMode { + checkAndSpeakSentences() + } + } + } + + // Speech Highlight Overlay for split screen (covers TextEditor) + if speechDelegate.isSpeaking && !isShowingTranslation { + if let range = speechDelegate.characterRange { + let adjustedLocation = range.location + currentUtteranceOffset + let adjustedRange = NSRange(location: adjustedLocation, length: range.length) + + ScrollView { + Text(attributedText(for: currentNote, highlighting: adjustedRange)) + .font(.system(size: UIDevice.current.userInterfaceIdiom == .pad ? 28 : 18, weight: .medium)) + .foregroundColor(themeManager.textColor) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(.horizontal) + .padding(.bottom, 10) + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(themeManager.backgroundColor) + .allowsHitTesting(false) + } + } + + // Placeholder when empty (only show when not translating and note is empty) + if currentNote.isEmpty && !isShowingTranslation && !speechDelegate.isSpeaking { + Text("Start typing here...") + .font(.system(size: UIDevice.current.userInterfaceIdiom == .pad ? 28 : 18, weight: .medium)) + .foregroundColor(themeManager.secondaryTextColor) + .padding(.horizontal, 16) // Match TextEditor padding + .padding(.top, 8) + .allowsHitTesting(false) + } + } + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + .clipped() // Prevent content from extending beyond bounds + .background(themeManager.backgroundColor) + } + .layoutPriority(1) // Ensure split screen takes priority in layout + .background(themeManager.backgroundColor) + .colorScheme(themeManager.currentTheme == .light ? .light : .dark) + // Swipe gestures for split screen + .gesture( + DragGesture() + .onEnded { value in + if value.translation.height > 50 { + // Swipe Down: Dismiss Keyboard + isInputActive = false + } else if value.translation.width < -50 { + // Swipe Left: Clear both user text and transcript + saveCurrentNote() + currentNote = "" + speechRecognizer.clear() + translatedNote = "" + translatedTranscript = "" + translatedUserText = "" + isShowingTranslation = false + lastSpokenText = "" + currentUtteranceOffset = 0 + } else if value.translation.width > 50 { + // Swipe Right: Go to full screen VoiceNoteView + withAnimation(.easeInOut(duration: 0.6)) { + isVoiceScreenActive = true + } + } + } + ) + } else { + // NORMAL FULL SCREEN MODE + TextEditor(text: $currentNote) + .scrollDisabled(true) + .focused($isInputActive) + .font(.system(size: fontSize, weight: .bold)) + .foregroundColor(themeManager.textColor) + .padding(.horizontal) + .padding(.top, 40) // Avoid overlap with Clear All button + .frame(maxWidth: .infinity, maxHeight: .infinity) + .scrollContentBackground(.hidden) // Allow custom background + .background(themeManager.backgroundColor) + .colorScheme(themeManager.currentTheme == .light ? .light : .dark) + .onChange(of: currentNote) { + adjustFontSize(containerSize: textGeo.size) + } + .onChange(of: translatedNote) { + adjustFontSize(containerSize: textGeo.size) + } + .onChange(of: isShowingTranslation) { + adjustFontSize(containerSize: textGeo.size) + } + // We trigger font adjustment when the frame size changes (e.g. keyboard appears/disappears) + .onChange(of: textGeo.size) { + adjustFontSize(containerSize: textGeo.size) + } + // Center Swipe Gestures + .gesture( + DragGesture() + .onEnded { value in + // Check if it's a center swipe (not edge) + let startX = value.startLocation.x + let isEdgeSwipe = startX < 50 || startX > geometry.size.width - 50 + + if !isEdgeSwipe { + if value.translation.height > 50 { + // Swipe Down: Dismiss Keyboard + isInputActive = false + } else if value.translation.width < -50 { + // Swipe Left: Clear Text + saveCurrentNote() + currentNote = "" + // Also clear translation state + isShowingTranslation = false + translatedNote = "" + lastSpokenText = "" + currentUtteranceOffset = 0 + } else if value.translation.width > 50 { + // Swipe Right: Flip to Voice Screen + withAnimation(.easeInOut(duration: 0.6)) { + isVoiceScreenActive = true + } + } + } + } + ) + + // Auto-speak monitor + .onChange(of: currentNote) { + if isContinuousMode { + checkAndSpeakSentences() + } + } + + // Speech Highlight Overlay + + // Speech Highlight Overlay + if speechDelegate.isSpeaking { + let textToHighlight = (isShowingTranslation && !translatedNote.isEmpty) ? translatedNote : currentNote + + // Only overlay if we have a range to highlight + if let range = speechDelegate.characterRange { + // Adjust range by offset + let adjustedLocation = range.location + currentUtteranceOffset + let adjustedRange = NSRange(location: adjustedLocation, length: range.length) + + ScrollView { + Text(attributedText(for: textToHighlight, highlighting: adjustedRange)) + .font(.system(size: fontSize, weight: .bold)) + .foregroundColor(themeManager.textColor) + .padding(.horizontal) // Match TextEditor's 16pt default approximately + .padding(.top, 40) // Match padding + .frame(maxWidth: .infinity, alignment: .leading) + } + .padding(.horizontal, 5) // TextEditor has distinct internal padding, adjusting slightly + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(themeManager.backgroundColor) // Cover the editor + .allowsHitTesting(false) // Pass touches through + } + } + + // 2. Translation View (Overlay) + if isShowingTranslation { + ScrollView { + Text(translatedNote.isEmpty ? "Translating..." : translatedNote) + .font(.system(size: fontSize, weight: .bold)) + .foregroundColor(themeManager.textColor) + .padding(.horizontal) + .padding(.top, 40) + .frame(maxWidth: .infinity, alignment: .leading) + } + .scrollDisabled(true) + .background(themeManager.backgroundColor) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width < -50 { + // Swipe Left: Clear Text + saveCurrentNote() + currentNote = "" + translatedNote = "" + isShowingTranslation = false + lastSpokenText = "" + currentUtteranceOffset = 0 + + // Focus input + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + isInputActive = true + } + } else if value.translation.width > 50 { + // Swipe Right: Flip to Voice Screen + withAnimation(.easeInOut(duration: 0.6)) { + isVoiceScreenActive = true + } + } + } + ) + } + } + + // Placeholder text (hidden in split screen mode) + if currentNote.isEmpty && !isSplitScreenMode { + VStack(spacing: 10) { + VStack(spacing: 8) { + Text("Start Typing") + .font(.system(size: 30, weight: .bold)) + .foregroundColor(themeManager.secondaryTextColor.opacity(0.8)) + + Text("(shake to start over)") + .font(.system(size: 16)) + .foregroundColor(themeManager.secondaryTextColor.opacity(0.8)) + } + .padding(.bottom, 20) + + Image(systemName: "arrow.right") + .font(.system(size: fontSize * 0.8, weight: .bold)) + .foregroundColor(themeManager.secondaryTextColor) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(.leading, 20) + + Text("swipe from left edge for History") + .font(.system(size: fontSize * 0.5, weight: .medium)) + .foregroundColor(themeManager.secondaryTextColor) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(.leading, 20) + + Text("swipe from right edge for Favorites") + .font(.system(size: fontSize * 0.5, weight: .medium)) + .foregroundColor(themeManager.secondaryTextColor) + .frame(maxWidth: .infinity, alignment: .trailing) + .padding(.trailing, 20) + + Image(systemName: "arrow.left") + .font(.system(size: fontSize * 0.8, weight: .bold)) + .foregroundColor(themeManager.secondaryTextColor) + .frame(maxWidth: .infinity, alignment: .trailing) + .padding(.trailing, 20) + } + .padding() + .padding(.top, 100) // Add some top padding to position it "upper middle" + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .top) + .allowsHitTesting(false) + } + + // Top bar with Split Screen toggle and Clear all button + HStack { + Spacer() + + // Split Screen Toggle (Center) + Button(action: { + withAnimation(.easeInOut(duration: 0.3)) { + isSplitScreenMode.toggle() + } + // Show keyboard when entering split screen mode (triggers layout fix) + if isSplitScreenMode { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + isInputActive = true + } + } + }) { + Image(systemName: isSplitScreenMode ? "rectangle" : "rectangle.split.1x2") + .font(.system(size: 22)) + .foregroundColor(themeManager.textColor.opacity(0.7)) + .padding(10) + } + + Spacer() + + // Clear all button (Right) - also show in split mode if there's transcript + if !currentNote.isEmpty || (isSplitScreenMode && !speechRecognizer.transcript.isEmpty) { + Button(action: { + saveCurrentNote() + currentNote = "" + translatedNote = "" + isShowingTranslation = false + lastSpokenText = "" + currentUtteranceOffset = 0 + + // In split screen mode, also clear transcript + if isSplitScreenMode { + speechRecognizer.clear() + translatedTranscript = "" + translatedUserText = "" + } + + // Dismiss keyboard to reset layout state + isInputActive = false + }) { + Text("Clear all") + .foregroundColor(.red) + .padding() + } + } + } + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .top) + .padding(.horizontal, 10) + .padding(.top, 5) + } + } // End GeometryReader (Text Area) + + // 3. Bottom Buttons + // Stacked below text editor + HStack(spacing: 10) { + Spacer(minLength: 0) + + // Translate Button (show in split mode or when there's text) + if !currentNote.isEmpty || (isSplitScreenMode && !speechRecognizer.transcript.isEmpty) { + Button(action: { + if targetLanguageIdentifier.isEmpty { + showSettings = true + } else { + triggerTranslation() + } + }) { + Group { + if isShowingTranslation { + Text("Original 🇺🇸") + } else { + if let lang = languageManager.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) { + Text("Translate to \(lang.flag)") + } else { + if targetLanguageIdentifier.isEmpty { + Text("Translate") + } else { + Text("Translate to 🇲🇽") + } + } + } + } + .font(.system(size: 16, weight: .medium)) + .fixedSize(horizontal: true, vertical: false) + .foregroundColor(.white) + .padding(.horizontal, 16) + .padding(.vertical, 12) + .background(Color.blue.opacity(0.6)) + .cornerRadius(25) + } + } + + // "Speak for me" Button + if !currentNote.isEmpty { + Button(action: { + if isShowingTranslation { + // Translation Mode: Simple Speak Toggle + if speechDelegate.isSpeaking { + speechSynthesizer.stopSpeaking(at: .immediate) + } else { + speakText() + } + } else { + // Original Continuous Monitor Logic + if isContinuousMode { + // Turn off continuous mode + isContinuousMode = false + speechSynthesizer.stopSpeaking(at: .immediate) + } else { + if !speechSynthesizer.isSpeaking { + // "Start monitoring" -> Enable continuous mode + isContinuousMode = true + // Speak current content immediately (manual resume style) AND start monitoring + speakText() + } else { + // If already speaking manually, stop + speechSynthesizer.stopSpeaking(at: .immediate) + } + } + } + }) { + Text(isShowingTranslation ? (speechDelegate.isSpeaking ? "Speaking..." : "Speak") : (isContinuousMode ? "Speaking..." : (speechDelegate.isSpeaking ? "Speaking..." : "Speak for me"))) + .font(.system(size: 16, weight: .medium)) + .fixedSize(horizontal: true, vertical: false) + .foregroundColor(.white) + .padding(.horizontal, 12) + .padding(.vertical, 12) + .background( + ZStack { + if isShowingTranslation { + // Simple Blue/Cyan for Translation + if speechDelegate.isSpeaking { + Color.cyan + .shadow(color: .cyan, radius: isSpeakingAnimation ? 30 : 2) + } else { + Color.blue.opacity(0.6) + } + } else { + // Continuous Mode Colors + if isContinuousMode { + Color.green + } else if speechDelegate.isSpeaking { + Color.cyan + .shadow(color: .cyan, radius: isSpeakingAnimation ? 30 : 2) + } else { + Color.blue.opacity(0.6) + } + } + } + ) + .cornerRadius(25) + .overlay( + RoundedRectangle(cornerRadius: 25) + .stroke(Color.white.opacity(0.5), lineWidth: speechDelegate.isSpeaking ? 1 : 0) + ) + .scaleEffect(isSpeakingAnimation ? 1.1 : 1.0) + } + .onChange(of: speechDelegate.isSpeaking) { oldValue, speaking in + if speaking { + withAnimation(.easeInOut(duration: 0.5).repeatForever(autoreverses: true)) { + isSpeakingAnimation = true + } + } else { + withAnimation(.default) { + isSpeakingAnimation = false + } + } + } + } + + // Flip Button + Button(action: { + withAnimation(.easeInOut(duration: 0.6)) { + isVoiceScreenActive = true + } + }) { + Text("Flip") + .font(.system(size: 16, weight: .medium)) + .fixedSize(horizontal: true, vertical: false) + .foregroundColor(.white) + .padding(.horizontal, 20) + .padding(.vertical, 12) + .background(Color.blue) + .cornerRadius(25) + } + } + .padding(.trailing, 8) + .padding(.bottom, keyboardHeight > 0 ? 10 : geometry.safeAreaInsets.bottom + 10) + .padding(.top, isSplitScreenMode ? 3 : 10) // Small padding in split screen mode + .background(Color.black) + .allowsHitTesting(!isMenuOpen && !isGroupsMenuOpen) + } + .onChange(of: isSplitScreenMode) { oldValue, newValue in + // Force layout recalculation when entering split screen + if newValue { + // Simulate typing to trigger layout update, which user confirmed fixes the gap + DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { + let original = currentNote + // Only trigger if text is not empty to avoid placeholder glitches, or handle empty case + if !original.isEmpty { + currentNote = original + " " + DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { + currentNote = original + } + } else { + // If empty, we can toggle a dummy character since placeholder might be showing + currentNote = " " + DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { + currentNote = "" + } + } + } + } + } + .onChange(of: isVoiceScreenActive) { oldValue, newValue in + // Force layout recalculation when returning from voice screen to split screen + if !newValue && isSplitScreenMode { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { + let original = currentNote + if !original.isEmpty { + currentNote = original + " " + DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { + currentNote = original + } + } else { + currentNote = " " + DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { + currentNote = "" + } + } + } + } + } + // View-Level Enforcement: + // iPhone: Force 0 padding if in Landscape OR Hardware Keyboard attached (Strict Mode). + // iPad: Allow padding so the shortcut bar doesn't cover content. + .padding(.bottom, (UIDevice.current.userInterfaceIdiom != .pad && (geometry.size.width > geometry.size.height || isHardwareKeyboardAttached)) ? 0 : (keyboardHeight > 0 ? keyboardHeight : 0)) + .background(Color.black) // Ensure keyboard area is black + .offset(x: isMenuOpen ? menuWidth : (isGroupsMenuOpen ? -menuWidth : 0)) + .animation(.easeInOut, value: isMenuOpen) + .animation(.easeInOut, value: isGroupsMenuOpen) + .transition(.modifier( + active: FlipTransition(angle: -90), + identity: FlipTransition(angle: 0) + )) + } + .ignoresSafeArea(.all, edges: .bottom) + .overlay { + // Blocking Overlay for Landscape Mode without Hardware Keyboard + GeometryReader { proxy in + if proxy.size.width > proxy.size.height && !isHardwareKeyboardAttached { + ZStack { + themeManager.backgroundColor.ignoresSafeArea() + + VStack(spacing: 20) { + Image(systemName: "keyboard.badge.exclamationmark") + .font(.system(size: 60)) + .foregroundColor(themeManager.secondaryTextColor) + + Text("Landscape Not Supported") + .font(.title) + .bold() + .foregroundColor(themeManager.textColor) + + Text("Please rotate your device to portrait mode\nor connect a hardware keyboard.") + .multilineTextAlignment(.center) + .font(.body) + .foregroundColor(themeManager.secondaryTextColor) + } + .padding() + } + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(themeManager.backgroundColor) + .task { + // Dismiss keyboard immediately when blocking overlay appears + isInputActive = false + keyboardHeight = 0 + } + } + } + } + .onReceive(NotificationCenter.default.publisher(for: UIResponder.keyboardWillShowNotification)) { notification in + // Ghost Keyboard Filter & Landscape Policy: + if let keyboardFrame = notification.userInfo?[UIResponder.keyboardFrameEndUserInfoKey] as? CGRect { + let isLandscape = UIScreen.main.bounds.width > UIScreen.main.bounds.height + let isHardware = isHardwareKeyboardAttached || GCKeyboard.coalesced != nil + let isPad = UIDevice.current.userInterfaceIdiom == .pad + + // Policy: + // iPhone: In landscape or if HW keyboard attached, strict 0 height. + // iPad: Allow system height (accounts for shortcut bar). + if !isPad && (isLandscape || isHardware) { + self.keyboardHeight = 0 + } else { + self.keyboardHeight = keyboardFrame.height + } + } + } + .onReceive(NotificationCenter.default.publisher(for: UIResponder.keyboardWillChangeFrameNotification)) { notification in + if let keyboardFrame = notification.userInfo?[UIResponder.keyboardFrameEndUserInfoKey] as? CGRect { + let isLandscape = UIScreen.main.bounds.width > UIScreen.main.bounds.height + let isHardware = isHardwareKeyboardAttached || GCKeyboard.coalesced != nil + let isPad = UIDevice.current.userInterfaceIdiom == .pad + + if !isPad && (isLandscape || isHardware) { + self.keyboardHeight = 0 + } else { + self.keyboardHeight = keyboardFrame.height + } + } + } + .onReceive(NotificationCenter.default.publisher(for: UIResponder.keyboardWillHideNotification)) { _ in + self.keyboardHeight = 0 + } + .onReceive(NotificationCenter.default.publisher(for: .GCKeyboardDidConnect)) { _ in + isHardwareKeyboardAttached = true + // Reset layout if "squashed"; assume 0 height for hardware kb initially + self.keyboardHeight = 0 + } + .onReceive(NotificationCenter.default.publisher(for: .GCKeyboardDidDisconnect)) { _ in + isHardwareKeyboardAttached = false + // If we are in landscape, this will trigger the overlay, which should dismiss KB. + // But let's let the geometry/state change handler do that. + } + .onAppear { + // Existing onAppear logic + SpeechRecognizer.shared.startTranscribing() + speechSynthesizer.delegate = speechDelegate + // setupKeyboardObservers() -- REMOVED + loadData() + isInputActive = true + + if !targetLanguageIdentifier.isEmpty { + VoiceManager.shared.updateTargetLanguage(to: targetLanguageIdentifier) + } + + // Hardware Keyboard Detection + isHardwareKeyboardAttached = GCKeyboard.coalesced != nil + } + .onChange(of: isHardwareKeyboardAttached) { _, attached in + // If detached while in landscape -> Blocking overlay appears -> Dismiss KB + // We can check geometry here via GeometryReader but we are outside of it. + // We rely on the overlay's existence or separate check? + // Simplest: The overlay itself can have a .task / .onAppear + } + } + + private func attributedText(for text: String, highlighting nsRange: NSRange?) -> AttributedString { + // Fallback or base style + let baseColor = themeManager.textColor + + guard let nsRange = nsRange, + let range = Range(nsRange, in: text) else { + var full = AttributedString(text) + full.foregroundColor = baseColor + return full + } + + // Construct by parts to ensure we target the EXACT range + // rather than searching for the substring content + let prefix = text[.. Void + let onStarTap: (NoteItem) -> Void + let onClearAll: () -> Void + let onDelete: (NoteItem) -> Void + let onSettings: () -> Void + let onClose: () -> Void + + var body: some View { + HStack(spacing: 0) { + ZStack { + Color.black + .contentShape(Rectangle()) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width < -50 { + onClose() + } + } + ) + + VStack(alignment: .leading) { + HStack { + Text("History") + .font(.title) + .bold() + .foregroundColor(.white) + Spacer() + Button("Clear All") { + onClearAll() + } + .foregroundColor(.red) + } + .padding(.top, 50) + .padding(.horizontal) + .contentShape(Rectangle()) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width < -50 { + onClose() + } + } + ) + + List { + ForEach(notes) { note in + HStack { + Button(action: { + onSelect(note) + }) { + VStack(alignment: .leading) { + Text(note.content) + .lineLimit(1) + .font(.headline) + .foregroundColor(.white) + Text(note.timestamp, style: .time) + .font(.caption) + .foregroundColor(.gray) + } + .frame(maxWidth: .infinity, alignment: .leading) + .contentShape(Rectangle()) + } + .buttonStyle(.plain) + + Spacer() + + Button(action: { + onStarTap(note) + }) { + Image(systemName: note.group != nil ? "star.fill" : "star") + .foregroundColor(note.group != nil ? .yellow : .gray) + } + .buttonStyle(BorderlessButtonStyle()) + } + .listRowBackground(Color.black) + } + .onDelete { indexSet in + for index in indexSet { + onDelete(notes[index]) + } + } + + // Footer to catch clicks in empty space + Section { + Color.clear + .contentShape(Rectangle()) + .frame(height: 1000) + .listRowBackground(Color.clear) + .listRowSeparator(.hidden) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width < -50 { + onClose() + } + } + ) + } + } + .listStyle(.plain) + .scrollContentBackground(.hidden) + + Spacer() + + Button(action: onSettings) { + HStack { + Image(systemName: "gearshape.fill") + Text("Settings") + .bold() + } + .foregroundColor(.white) + .padding() + } + } + } + + Rectangle() + .fill(Color.white) + .frame(width: 1) + } + .edgesIgnoringSafeArea(.all) + } +} + +struct GroupsMenuView: View { + let groups: [String] + let groupColors: [String: String] + let notes: [NoteItem] + let getGroupColor: (String) -> (text: Color, bg: Color) + let onSelect: (NoteItem) -> Void + let onRenameNote: (NoteItem) -> Void + let onRenameGroup: (String) -> Void + let onColorGroup: (String, String) -> Void + let onDelete: (NoteItem) -> Void + let onDeleteGroup: (String) -> Void + let onClose: () -> Void + + var body: some View { + HStack(spacing: 0) { + Rectangle() + .fill(Color.white) + .frame(width: 1) + + ZStack { + Color.black + .contentShape(Rectangle()) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width > 50 { + onClose() + } + } + ) + + VStack(alignment: .leading) { + Text("Faves") + .font(.title) + .bold() + .foregroundColor(.white) + .padding(.top, 50) + .padding(.leading) + .contentShape(Rectangle()) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width > 50 { + onClose() + } + } + ) + + List { + ForEach(groups, id: \.self) { group in + let colors = getGroupColor(group) + + Section { + // Group Header Row (interactive) + HStack { + Text(group) + .font(.headline) + .foregroundColor(colors.text) + .frame(maxWidth: .infinity, alignment: .leading) + .contentShape(Rectangle()) + .onTapGesture {} // Consume tap to prevent closing + } + .padding(.vertical, 8) + .listRowBackground(colors.bg) + .swipeActions(edge: .trailing, allowsFullSwipe: true) { + Button(role: .destructive) { + onDeleteGroup(group) + } label: { + Label("Delete", systemImage: "trash") + } + } + .contextMenu { + Button { + onRenameGroup(group) + } label: { + Label("Rename", systemImage: "pencil") + } + + Menu("Color") { + Button("Grey") { + onColorGroup(group, "Grey") + } + Button("Green") { + onColorGroup(group, "Green") + } + Button("Red") { + onColorGroup(group, "Red") + } + Button("Blue") { + onColorGroup(group, "Blue") + } + } + } + + // Notes under this group + ForEach(notes.filter { $0.group == group }) { note in + Text(note.displayName ?? note.content) + .lineLimit(1) + .foregroundColor(colors.text) + .onTapGesture { + onSelect(note) + } + .listRowBackground(colors.bg) + .contextMenu { + Button { + onRenameNote(note) + } label: { + Label("Rename", systemImage: "pencil") + } + } + } + .onDelete { indexSet in + let filteredNotes = notes.filter { $0.group == group } + for index in indexSet { + let noteToDelete = filteredNotes[index] + onDelete(noteToDelete) + } + } + } + } + + // Footer to catch clicks in empty space + Section { + Color.clear + .contentShape(Rectangle()) + .frame(height: 1000) + .listRowBackground(Color.clear) + .listRowSeparator(.hidden) + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width > 50 { + onClose() + } + } + ) + } + } + .listStyle(.insetGrouped) + .scrollContentBackground(.hidden) + + Spacer() + } + } + } + .edgesIgnoringSafeArea(.all) + } +} + +#Preview { + ContentView() +} + +// Extensions for Shake Gesture +extension UIDevice { + static let deviceDidShakeNotification = Notification.Name(rawValue: "deviceDidShakeNotification") +} + +extension UIWindow { + open override func motionEnded(_ motion: UIEvent.EventSubtype, with event: UIEvent?) { + if motion == .motionShake { + NotificationCenter.default.post(name: UIDevice.deviceDidShakeNotification, object: nil) + } + } +} + +struct DeviceShakeViewModifier: ViewModifier { + let action: () -> Void + + func body(content: Content) -> some View { + content + .onAppear() + .onReceive(NotificationCenter.default.publisher(for: UIDevice.deviceDidShakeNotification)) { _ in + action() + } + } +} + +extension View { + func onShake(perform action: @escaping () -> Void) -> some View { + self.modifier(DeviceShakeViewModifier(action: action)) + } +} + +struct FlipTransition: GeometryEffect { + var angle: Double + var axis: (x: CGFloat, y: CGFloat) = (x: 0, y: 1) + + var animatableData: Double { + get { angle } + set { angle = newValue } + } + + func effectValue(size: CGSize) -> ProjectionTransform { + let a = CGFloat(Angle(degrees: angle).radians) + + var transform3d = CATransform3DIdentity + transform3d.m34 = -1 / 500 // Perspective + + transform3d = CATransform3DRotate(transform3d, a, axis.x, axis.y, 0) + transform3d = CATransform3DTranslate(transform3d, -size.width/2.0, -size.height/2.0, 0) + + let affineTransform = ProjectionTransform(CGAffineTransform(translationX: size.width/2.0, y: size.height/2.0)) + + return ProjectionTransform(transform3d).concatenating(affineTransform) + } +} + +class SpeechDelegate: NSObject, ObservableObject, AVSpeechSynthesizerDelegate { + @Published var isSpeaking = false + @Published var characterRange: NSRange? + + // Map to store offsets for each utterance + private var utteranceOffsets: [AVSpeechUtterance: Int] = [:] + + func register(utterance: AVSpeechUtterance, offset: Int) { + utteranceOffsets[utterance] = offset + } + + func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, willSpeakRangeOfSpeechString characterRange: NSRange, utterance: AVSpeechUtterance) { + let offset = utteranceOffsets[utterance] ?? 0 + DispatchQueue.main.async { + // Apply offset to create a global range + self.characterRange = NSRange(location: characterRange.location + offset, length: characterRange.length) + } + } + + func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didStart utterance: AVSpeechUtterance) { + DispatchQueue.main.async { + self.isSpeaking = true + // Don't reset range here, wait for willSpeakRange + } + } + + func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didFinish utterance: AVSpeechUtterance) { + DispatchQueue.main.async { + // Only stop if this was the last thing (or if queue is empty? AVSpeechSynthesizer manages queue) + // But we can check if synthesizer.isSpeaking, although the delegate is called per utterance. + // If we have more in queue, isSpeaking remains true? + // Actually, synthesizer.isSpeaking returns true if there are more utterances. + + // Clean up offset + self.utteranceOffsets.removeValue(forKey: utterance) + + if !synthesizer.isSpeaking { + self.isSpeaking = false + self.characterRange = nil + } + } + } + + func speechSynthesizer(_ synthesizer: AVSpeechSynthesizer, didCancel utterance: AVSpeechUtterance) { + DispatchQueue.main.async { + self.utteranceOffsets.removeAll() + self.isSpeaking = false + self.characterRange = nil + } + } +} diff --git a/FlipTalk/FlipTalkApp.swift b/FlipTalk/FlipTalkApp.swift new file mode 100644 index 0000000..8fc85c0 --- /dev/null +++ b/FlipTalk/FlipTalkApp.swift @@ -0,0 +1,20 @@ +// +// FlipTalkApp.swift +// FlipTalk +// +// Created by Jared Evans on 12/2/25. +// + +import SwiftUI + +@main +struct FlipTalkApp: App { + @StateObject private var themeManager = ThemeManager() + + var body: some Scene { + WindowGroup { + ContentView() + .environmentObject(themeManager) + } + } +} diff --git a/FlipTalk/LanguageManager.swift b/FlipTalk/LanguageManager.swift new file mode 100644 index 0000000..b9913ea --- /dev/null +++ b/FlipTalk/LanguageManager.swift @@ -0,0 +1,171 @@ +import Foundation +import Combine +import SwiftUI +#if canImport(Translation) +import Translation +#endif + +class LanguageManager: ObservableObject { + static let shared = LanguageManager() + + @Published var supportedLanguages: [LanguageStatus] = [] + + init() { + // Initialize with candidates assuming internet required (fail-safe) + self.supportedLanguages = candidateLocales.compactMap { id in + let locale = Locale(identifier: id) + return LanguageStatus(id: id, locale: locale, isOnlineRequired: true) + }.sorted { $0.name < $1.name } + } + + struct LanguageStatus: Identifiable, Equatable { + let id: String // Locale identifier + let locale: Locale + let isOnlineRequired: Bool + + // English names for all supported languages + private static let englishNames: [String: String] = [ + "ar-SA": "Arabic", + "zh-CN": "Chinese (Simplified)", + "zh-TW": "Chinese (Traditional)", + "nl-NL": "Dutch", + "en-US": "English (United States)", + "en-UK": "English (United Kingdom)", + "fr-FR": "French (France)", + "de-DE": "German", + "id-ID": "Indonesian", + "it-IT": "Italian", + "ja-JP": "Japanese", + "ko-KR": "Korean", + "pl-PL": "Polish", + "pt-BR": "Portuguese (Brazil)", + "ru-RU": "Russian", + "es-ES": "Spanish (Spain)", + "es-MX": "Spanish (Mexico)", + "th-TH": "Thai", + "tr-TR": "Turkish", + "uk-UA": "Ukrainian", + "vi-VN": "Vietnamese" + ] + + var name: String { + Self.englishNames[id] ?? locale.identifier + } + + var flag: String { + locale.flagEmoji ?? "🏳️" + } + + var requiresInternet: Bool { + isOnlineRequired + } + + /// Languages that don't have downloadable packs (use built-in/internet) + var isDownloadAvailable: Bool { + // Only en-UK and es-MX don't have download available + return id != "en-UK" && id != "es-MX" + } + } + + // A curated list of languages supported by Apple Translate + private let candidateLocales: [String] = [ + "ar-SA", "zh-CN", "zh-TW", "nl-NL", "en-UK", "fr-FR", + "de-DE", "id-ID", "it-IT", "ja-JP", "ko-KR", "pl-PL", "pt-BR", + "ru-RU", "es-ES", "es-MX", "th-TH", "tr-TR", "uk-UA", "vi-VN" + ] + + private let startSpeakingTranslations: [String: String] = [ + "ar-SA": "تحدث الآن...", + "zh-CN": "开始说话...", + "zh-TW": "開始說話...", + "nl-NL": "Begin met spreken...", + "en-US": "Start speaking...", + "en-UK": "Start speaking...", + "fr-FR": "Commencez à parler...", + "de-DE": "Jetzt sprechen...", + "id-ID": "Mulai berbicara...", + "it-IT": "Inizia a parlare...", + "ja-JP": "話し始めてください...", + "ko-KR": "말씀해 주세요...", + "pl-PL": "Zacznij mówić...", + "pt-BR": "Comece a falar...", + "ru-RU": "Начните говорить...", + "es-ES": "Empieza a hablar...", + "es-MX": "Empieza a hablar...", + "th-TH": "เริ่มพูด...", + "tr-TR": "Konuşmaya başla...", + "uk-UA": "Почніть говорити...", + "vi-VN": "Bắt đầu nói..." + ] + + func getStartSpeakingText(for identifier: String) -> String { + return startSpeakingTranslations[identifier] ?? "Start speaking..." + } + + func checkAvailability() async { + guard #available(iOS 18.0, *) else { return } + + #if canImport(Translation) + var results: [LanguageStatus] = [] + let availability = LanguageAvailability() + let source = Locale.Language(identifier: "en-US") + + for identifier in candidateLocales { + let target = Locale.Language(identifier: identifier) + let status = await availability.status(from: source, to: target) + + // Should we limit to only supported/installed? + // If we initialized with all candidates, maybe we should keep all candidates + // but update their online status? + // For now, let's just stick to the discovered ones to be accurate about "Translation" support. + // Actually, if we want the menu to work, we need items. + // If 'status' is unsupported, we should probably remove it? + // But 'candidateLocales' are presumably supported by the backend. + + if status == .supported || status == .installed { + let locale = Locale(identifier: identifier) + let isOnline = (status == .supported) + results.append(LanguageStatus(id: identifier, locale: locale, isOnlineRequired: isOnline)) + } + } + + await MainActor.run { + // Update the list with verified verification status + if !results.isEmpty { + self.supportedLanguages = results.sorted { $0.name < $1.name } + } + } + #endif + } + + /// Check if a language identifier is internet-only (no download available) + func isInternetOnlyLanguage(_ identifier: String) -> Bool { + return identifier == "en-UK" || identifier == "es-MX" + } +} + +extension Locale { + var flagEmoji: String? { + // Simple heuristic for region code + guard let region = self.region?.identifier else { + // Try to parse from identifier if region is missing + let parts = identifier.split(separator: "-") + if parts.count > 1 { + return String(parts.last!).flagEmoji + } + return nil + } + return region.flagEmoji + } +} + +extension String { + var flagEmoji: String { + let base: UInt32 = 127397 + var s = "" + for v in self.unicodeScalars { + s.unicodeScalars.append(UnicodeScalar(base + v.value)!) + } + return s + } +} diff --git a/FlipTalk/RecommendedVoicesView.swift b/FlipTalk/RecommendedVoicesView.swift new file mode 100644 index 0000000..30f7972 --- /dev/null +++ b/FlipTalk/RecommendedVoicesView.swift @@ -0,0 +1,136 @@ +import SwiftUI + +struct RecommendedVoicesView: View { + @Environment(\.dismiss) var dismiss + + var body: some View { + NavigationView { + List { + Section(header: Text("English (United States)")) { + VoiceRecommendationRow( + name: "Alex", + details: "Male, High Quality", + description: "The smartest voice on iOS; breathes between sentences, sounds academic and very natural." + ) + VoiceRecommendationRow( + name: "Samantha", + details: "Female, Standard", + description: "The classic \"original Siri\" voice; clear and friendly but slightly computerized." + ) + VoiceRecommendationRow( + name: "Ava", + details: "Female, Premium", + description: "A modern, high-quality voice that sounds professional, warm, and very human-like." + ) + VoiceRecommendationRow( + name: "Allison", + details: "Female, Premium", + description: "A lighter, breathy, and pleasant voice; sounds like a helpful assistant." + ) + VoiceRecommendationRow( + name: "Tom", + details: "Male, Premium", + description: "A friendly, standard American male voice; clear and trustworthy." + ) + VoiceRecommendationRow( + name: "Susan", + details: "Female, Standard/Premium", + description: "A slightly more formal and crisp voice; sounds like a teacher or automated reader." + ) + VoiceRecommendationRow( + name: "Zoe", + details: "Female, Premium", + description: "A bright, cheerful, and younger-sounding voice; energetic vibe." + ) + VoiceRecommendationRow( + name: "Evan", + details: "Male, Enhanced", + description: "A deep, smooth, and modern voice; very natural sounding." + ) + VoiceRecommendationRow( + name: "Nathan", + details: "Male, Enhanced", + description: "A lighter, younger-sounding male voice; casual and friendly." + ) + VoiceRecommendationRow( + name: "Noelle", + details: "Female, Enhanced", + description: "A soft, sweet, and modern female voice; very smooth flow." + ) + VoiceRecommendationRow( + name: "Joelle", + details: "Female, Enhanced", + description: "A clear, articulate, and slightly deeper modern female voice." + ) + VoiceRecommendationRow( + name: "Aaron (Siri Voice 2)", + details: "Male, Neural", + description: "The current standard American Male Siri voice; distinct, helpful, and highly polished." + ) + VoiceRecommendationRow( + name: "Nicky (Siri Voice 1)", + details: "Female, Neural", + description: "The current standard American Female Siri voice; recognizable and high-fidelity." + ) + } + + Section(header: Text("Spanish (Mexico)")) { + VoiceRecommendationRow( + name: "Paulina", + details: "Female, Standard/Premium", + description: "The gold standard for Mexican Spanish; sounds like a professional news anchor or navigator." + ) + VoiceRecommendationRow( + name: "Juan", + details: "Male, Standard/Premium", + description: "A clear, neutral male voice; sounds polite but slightly more robotic than Paulina." + ) + VoiceRecommendationRow( + name: "Siri Female (Voice 1)", + details: "Female, Neural", + description: "(If downloaded) Very smooth, natural, and helpful; indistinguishable from a real human assistant." + ) + VoiceRecommendationRow( + name: "Siri Male (Voice 2)", + details: "Male, Neural", + description: "(If downloaded) A professional, modern male assistant voice with a Mexican accent." + ) + } + } + .navigationTitle("Recommended Voices") + .toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button("Done") { + dismiss() + } + } + } + } + } +} + +struct VoiceRecommendationRow: View { + let name: String + let details: String + let description: String + + var body: some View { + VStack(alignment: .leading, spacing: 4) { + HStack { + Text(name) + .font(.headline) + Spacer() + Text(details) + .font(.caption) + .padding(4) + .background(Color.blue.opacity(0.1)) + .cornerRadius(4) + .foregroundColor(.blue) + } + Text(description) + .font(.subheadline) + .foregroundColor(.secondary) + } + .padding(.vertical, 4) + } +} diff --git a/FlipTalk/SettingsView.swift b/FlipTalk/SettingsView.swift new file mode 100644 index 0000000..8674038 --- /dev/null +++ b/FlipTalk/SettingsView.swift @@ -0,0 +1,253 @@ +import SwiftUI +import AVFoundation +import AVKit +#if canImport(Translation) +import Translation +#endif + +struct SettingsView: View { + @EnvironmentObject var themeManager: ThemeManager + @ObservedObject var voiceManager = VoiceManager.shared + @State private var showRecommendedVoices = false + @Environment(\.dismiss) var dismiss + + // Easter Egg State + @State private var lightbulbTapCount = 0 + @State private var showEasterEgg = false + + // Persist selected language + @AppStorage("targetLanguageIdentifier") private var targetLanguageIdentifier: String = "" + + // Download confirmation state + @State private var showDownloadAlert = false + @State private var languageToDownload: LanguageManager.LanguageStatus? + @State private var previousLanguageIdentifier: String = "" + + // Translation configuration for triggering download + #if canImport(Translation) + @State private var downloadConfig: TranslationSession.Configuration? + #endif + + var body: some View { + NavigationStack { + Form { + Section(header: Text("Theme Colors") + .font(.title2) + .bold() + .foregroundColor(.primary) + .textCase(nil)) { + Picker("Theme Colors", selection: $themeManager.currentTheme) { + ForEach(AppTheme.allCases) { theme in + Text(theme.displayName) + .tag(theme) + .onTapGesture { + themeManager.currentTheme = theme + if theme == .lightbulb { + lightbulbTapCount += 1 + if lightbulbTapCount >= 5 { + showEasterEgg = true + lightbulbTapCount = 0 + } + } else { + lightbulbTapCount = 0 + } + } + } + } + .pickerStyle(.inline) + .labelsHidden() + } + + Section(header: Text("Translation Language") + .font(.title2) + .bold() + .foregroundColor(.primary) + .textCase(nil)) { + Picker("Target Language", selection: $targetLanguageIdentifier) { + Text("Select a language").tag("") + ForEach(LanguageManager.shared.supportedLanguages) { lang in + if lang.isDownloadAvailable { + Text("\(lang.flag) \(lang.name) (Download available)").tag(lang.id) + } else { + Text("\(lang.flag) \(lang.name) (Internet required)").tag(lang.id) + } + } + } + .pickerStyle(.navigationLink) + } + + Section(header: Text("Voice Settings") + .font(.title2) + .bold() + .foregroundColor(.primary) + .textCase(nil), + footer: Text("Tip: For the most natural sound, download \"Enhanced\" or \"Premium\" voices in your iPhone Settings:\nSettings > Accessibility > Read & Speak > Voices.") + .font(.caption) + .foregroundColor(.secondary) + .padding(.top, 8)) { + + VStack(alignment: .leading) { + Text("English Voice") + .font(.headline) + Picker("English (US)", selection: $voiceManager.selectedEnglishVoiceIdentifier) { + ForEach(voiceManager.availableEnglishVoices, id: \.identifier) { voice in + Text(voiceManager.description(for: voice)) + .tag(voice.identifier) + } + } + .pickerStyle(.menu) + } + + VStack(alignment: .leading) { + // Dynamic Header for Target Voice + if let lang = LanguageManager.shared.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) { + Text("\(lang.name) Voice") + .font(.headline) + } else { + Text("Target Language Voice") + .font(.headline) + } + + Picker("Voice", selection: $voiceManager.selectedTargetVoiceIdentifier) { + ForEach(voiceManager.availableTargetVoices, id: \.identifier) { voice in + Text(voiceManager.description(for: voice)) + .tag(voice.identifier) + } + } + .pickerStyle(.menu) + .disabled(voiceManager.availableTargetVoices.isEmpty) + + if voiceManager.availableTargetVoices.isEmpty { + Text("No specific voices found for this language. System default will be used.") + .font(.caption) + .foregroundColor(.secondary) + } + } + + Button(action: { + showRecommendedVoices = true + }) { + Text("See recommended voices.") + .font(.subheadline) + .foregroundColor(.blue) + } + } + } + .navigationTitle("Settings") + .toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button("Close") { + dismiss() + } + } + } + .onAppear { + // Ensure VoiceManager has the correct target language loaded on appear + if !targetLanguageIdentifier.isEmpty { + voiceManager.updateTargetLanguage(to: targetLanguageIdentifier) + } + } + .onChange(of: targetLanguageIdentifier) { oldValue, newValue in + // Update voice manager when user picks a new language + voiceManager.updateTargetLanguage(to: newValue) + + // Check if this language is downloadable and prompt for download + if !newValue.isEmpty, + let lang = LanguageManager.shared.supportedLanguages.first(where: { $0.id == newValue }), + lang.isDownloadAvailable { + previousLanguageIdentifier = oldValue + languageToDownload = lang + showDownloadAlert = true + } + } + .onDisappear { + // Settings closed + } + .alert("Download Language", isPresented: $showDownloadAlert) { + Button("Download") { + triggerDownload() + } + Button("Not Now", role: .cancel) { + // Keep the selection but don't download + languageToDownload = nil + } + } message: { + if let lang = languageToDownload { + Text("Would you like to download \(lang.name) for offline translation? This provides faster translations and works without internet.") + } else { + Text("Would you like to download this language for offline translation?") + } + } + #if canImport(Translation) + .translationTask(downloadConfig) { session in + // This will trigger Apple's download UI automatically + do { + try await session.prepareTranslation() + } catch { + print("Download preparation failed: \(error)") + } + await MainActor.run { + downloadConfig = nil + languageToDownload = nil + } + } + #endif + .sheet(isPresented: $showRecommendedVoices) { + RecommendedVoicesView() + } + .fullScreenCover(isPresented: $showEasterEgg) { + EasterEggPlayerView() + } + } + } + + private func triggerDownload() { + guard let lang = languageToDownload else { return } + + #if canImport(Translation) + if #available(iOS 18.0, *) { + // Create a configuration to trigger the download + downloadConfig = TranslationSession.Configuration( + source: Locale.Language(identifier: "en-US"), + target: Locale.Language(identifier: lang.id) + ) + } + #endif + } +} + +struct EasterEggPlayerView: View { + @Environment(\.dismiss) var dismiss + @State private var player: AVPlayer? + + var body: some View { + ZStack { + Color.black.edgesIgnoringSafeArea(.all) + + if let player = player { + VideoPlayer(player: player) + .edgesIgnoringSafeArea(.all) + .onAppear { + player.play() + NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in + dismiss() + } + } + } else { + ProgressView() + } + } + .onAppear { + if let url = Bundle.main.url(forResource: "easteregg", withExtension: "mp4") { + self.player = AVPlayer(url: url) + } else { + print("Easter egg video not found") + dismiss() + } + } + .onDisappear { + player?.pause() + NotificationCenter.default.removeObserver(self) + } + } +} diff --git a/FlipTalk/ThemeManager.swift b/FlipTalk/ThemeManager.swift new file mode 100644 index 0000000..25c7fac --- /dev/null +++ b/FlipTalk/ThemeManager.swift @@ -0,0 +1,79 @@ +import SwiftUI +import Combine + +enum AppTheme: String, CaseIterable, Identifiable, Codable { + case dark + case light + case lightbulb + + var id: String { rawValue } + + var displayName: String { + switch self { + case .dark: return "Default (Dark)" + case .light: return "Light" + case .lightbulb: return "Lightbulb" + } + } +} + +class ThemeManager: ObservableObject { + @Published var currentTheme: AppTheme { + didSet { + saveTheme() + } + } + + init() { + if let data = UserDefaults.standard.data(forKey: "selectedTheme"), + let theme = try? JSONDecoder().decode(AppTheme.self, from: data) { + self.currentTheme = theme + } else { + self.currentTheme = .dark + } + } + + private func saveTheme() { + if let data = try? JSONEncoder().encode(currentTheme) { + UserDefaults.standard.set(data, forKey: "selectedTheme") + } + } + + // MARK: - Color Accessors + + var backgroundColor: Color { + switch currentTheme { + case .dark: return .black + case .light: return .white + case .lightbulb: return .black + } + } + + var textColor: Color { + switch currentTheme { + case .dark: return .white + case .light: return .black + case .lightbulb: return .yellow + } + } + + var secondaryTextColor: Color { + switch currentTheme { + case .dark: return .white.opacity(0.25) + case .light: return .black.opacity(0.25) + case .lightbulb: return .yellow.opacity(0.5) + } + } + + var menuBackgroundColor: Color { + // Keeping menu dark for now as per usual side menu patterns, or should it match? + // User asked for "main screen and voice screen" colors. + // Let's make the menu adaptive too for consistency, or keep it dark? + // "Theme Colors ... for the main screen and the voice screen" + // Let's assume Side Menu should probably at least not clash. + // For now, let's keep side menu dark to distinguish it, or maybe match? + // Let's stick to modifying main and voice screens primarily as requested. + // But the text in side menu is white. If we make background white, text needs to be black. + return .black // Keeping side menu consistently dark for this iteration unless genericized. + } +} diff --git a/FlipTalk/VoiceManager.swift b/FlipTalk/VoiceManager.swift new file mode 100644 index 0000000..49a1973 --- /dev/null +++ b/FlipTalk/VoiceManager.swift @@ -0,0 +1,185 @@ +import Foundation +import AVFoundation +import Combine + +class VoiceManager: ObservableObject { + static let shared = VoiceManager() + + @Published var availableEnglishVoices: [AVSpeechSynthesisVoice] = [] + @Published var availableTargetVoices: [AVSpeechSynthesisVoice] = [] + + // UserDefaults Keys + private let kSelectedEnglishVoice = "selectedEnglishVoiceIdentifier" + private let kSelectedTargetVoice = "selectedTargetVoiceIdentifier" + + // Internal state to track current target language + private var currentTargetLocaleID: String = "es-MX" // Default to Spanish (MX) + + init() { + // Load initial target language from somewhere or default + // For now, we load with default, but we'll expose a method to update it + loadVoices() + setupNotifications() + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + private func setupNotifications() { + NotificationCenter.default.addObserver(forName: Notification.Name("AVSpeechSynthesisVoiceIdentifierDidChangeNotification"), object: nil, queue: .main) { [weak self] _ in + self?.loadVoices() + } + } + + func updateTargetLanguage(to localeID: String) { + guard !localeID.isEmpty else { return } + self.currentTargetLocaleID = localeID + loadVoices() + } + + func loadVoices() { + let allVoices = AVSpeechSynthesisVoice.speechVoices() + + // Blacklist of Novelty voices to exclude + let noveltyVoices = Set([ + "Albert", "Bad News", "Bahh", "Bells", "Boing", "Bubbles", "Cellos", + "Deranged", "Good News", "Hysterical", "Junior", "Kathy", "Organ", + "Princess", "Ralph", "Trinoids", "Whisper", "Zarvox", + "Jester", "Superstar", "Wobble", "Fred", + "Eddy", "Flo", "Grandma", "Grandpa", "Reed", "Rocko", "Sandy", "Shelley" + ]) + + // Helper to sort by quality (Premium > Enhanced > Default) + let sortComparator: (AVSpeechSynthesisVoice, AVSpeechSynthesisVoice) -> Bool = { v1, v2 in + // First sort by Quality + if v1.quality != v2.quality { + // strict order: premium > enhanced > default + let q1 = v1.quality == .premium ? 3 : (v1.quality == .enhanced ? 2 : 1) + let q2 = v2.quality == .premium ? 3 : (v2.quality == .enhanced ? 2 : 1) + return q1 > q2 + } + // Then by Name + return v1.name < v2.name + } + + // Filter for English (US) + availableEnglishVoices = allVoices.filter { + $0.language == "en-US" && !noveltyVoices.contains($0.name) + }.sorted(by: sortComparator) + + // Filter for Target Language + // Handle simplified codes (e.g. "fr" -> "fr-FR") if necessary, though usually we have full codes + let targetVoices = allVoices.filter { + ($0.language == self.currentTargetLocaleID || $0.language.starts(with: self.currentTargetLocaleID)) && !noveltyVoices.contains($0.name) + } + + if !targetVoices.isEmpty { + availableTargetVoices = targetVoices.sorted(by: sortComparator) + } else { + // If no specific voices found, empty list (UI should handle fallback to system default) + availableTargetVoices = [] + } + } + + // MARK: - Selection Handling + + var selectedEnglishVoiceIdentifier: String { + get { + if let saved = UserDefaults.standard.string(forKey: kSelectedEnglishVoice) { + return saved + } + // Default to Samantha if available + if let samantha = availableEnglishVoices.first(where: { $0.name == "Samantha" }) { + return samantha.identifier + } + return availableEnglishVoices.first?.identifier ?? AVSpeechSynthesisVoice(language: "en-US")?.identifier ?? "" + } + set { + UserDefaults.standard.set(newValue, forKey: kSelectedEnglishVoice) + objectWillChange.send() + } + } + + // We persist the selected voice PER language ideally, but for simplicity/user request + // we might just store one "Target Voice" preference. + // HOWEVER, a French voice ID won't work for Spanish. + // So we should probably key the storage by the language code? + // User plan didn't specify, but "Smart" behavior is better. + // Let's use a dynamic key: "selectedVoice_\(currentTargetLocaleID)" + + var selectedTargetVoiceIdentifier: String { + get { + let key = "selectedVoice_\(currentTargetLocaleID)" + if let saved = UserDefaults.standard.string(forKey: key) { + // Verify this voice still exists and matches language (optional, but good) + return saved + } + // Default logic: Premium/Enhanced if available + return availableTargetVoices.first?.identifier ?? "" + } + set { + let key = "selectedVoice_\(currentTargetLocaleID)" + UserDefaults.standard.set(newValue, forKey: key) + objectWillChange.send() + } + } + + func getSelectedEnglishVoice() -> AVSpeechSynthesisVoice? { + if let voice = AVSpeechSynthesisVoice(identifier: selectedEnglishVoiceIdentifier) { + return voice + } + return availableEnglishVoices.first ?? AVSpeechSynthesisVoice(language: "en-US") + } + + func getSelectedTargetVoice() -> AVSpeechSynthesisVoice? { + if let voice = AVSpeechSynthesisVoice(identifier: selectedTargetVoiceIdentifier) { + return voice + } + // Fallback to any voice for this language + return availableTargetVoices.first ?? AVSpeechSynthesisVoice(language: currentTargetLocaleID) + } + + // MARK: - Descriptions + + func description(for voice: AVSpeechSynthesisVoice) -> String { + var traits: [String] = [] + + if voice.quality == .enhanced { traits.append("Enhanced") } + if voice.quality == .premium { traits.append("Premium") } + + let qualitySuffix = traits.isEmpty ? "" : " (\(traits.joined(separator: ", ")))" + + // Detailed User Descriptions + let knownDescriptions: [String: String] = [ + "Alex": "Male - Top Tier. The smartest voice; sounds very natural.", + "Samantha": "Female - Standard. The classic 'original Siri' voice.", + "Ava": "Female - Premium. Professional, warm, and very human-like.", + "Allison": "Female - Premium. Lighter, breathy, and pleasant.", + "Tom": "Male - Premium. Friendly, standard American male.", + "Susan": "Female - Standard/Premium. Slightly formal and crisp.", + "Zoe": "Female - Premium. Bright, cheerful, and younger-sounding.", + "Evan": "Male - Enhanced. Deep, smooth, and modern.", + "Nathan": "Male - Enhanced. Lighter, younger-sounding male.", + "Noelle": "Female - Enhanced. Soft, sweet, and modern flow.", + "Joelle": "Female - Enhanced. Clear, articulate, modern.", + "Aaron": "Male - Neural. Standard American Male Siri voice.", + "Nicky": "Female - Neural. Standard American Female Siri voice.", + + // Spanish + "Paulina": "Female (MX) - Standard/Premium. Gold standard; professional news anchor style.", + "Juan": "Male (MX) - Standard/Premium. Clear, neutral, polite.", + "Siri Female": "Female (MX) - Neural. Very smooth, natural assistant.", + "Siri Male": "Male (MX) - Neural. Professional modern assistant.", + "Monica": "Female (ES) - Clear Spanish (Spain).", + "Jorge": "Male (ES) - Clear Spanish (Spain)." + ] + + // Check exact name match first + if let specificDesc = knownDescriptions[voice.name] { + return "\(voice.name) - \(specificDesc)\(qualitySuffix)" + } + + return "\(voice.name)\(qualitySuffix)" + } +} diff --git a/FlipTalk/VoiceNoteView.swift b/FlipTalk/VoiceNoteView.swift new file mode 100644 index 0000000..0ed60ff --- /dev/null +++ b/FlipTalk/VoiceNoteView.swift @@ -0,0 +1,660 @@ +import SwiftUI +import Speech +import AVFoundation +import Combine +#if canImport(Translation) +import Translation +#endif + +class SpeechRecognizer: ObservableObject { + static let shared = SpeechRecognizer() + + @Published var transcript = "" + @Published var isRecording = false + @Published var error: String? + + private var audioEngine = AVAudioEngine() + private var request: SFSpeechAudioBufferRecognitionRequest? + private var task: SFSpeechRecognitionTask? + private var recognizer: SFSpeechRecognizer? + private var currentLocale = Locale(identifier: "en-US") + + // To handle appending new sessions to existing text + private var sessionStartTranscript = "" + + private init() { + // Load saved transcript + if let saved = UserDefaults.standard.string(forKey: "voiceNoteTranscript") { + self.transcript = saved + } + // Initialize default recognizer + self.recognizer = SFSpeechRecognizer(locale: currentLocale) + requestPermissions() + } + + private func requestPermissions() { + SFSpeechRecognizer.requestAuthorization { authStatus in + DispatchQueue.main.async { + switch authStatus { + case .authorized: + break + case .denied: + self.error = "Speech recognition authorization denied" + case .restricted: + self.error = "Speech recognition restricted on this device" + case .notDetermined: + self.error = "Speech recognition not yet authorized" + @unknown default: + self.error = "Unknown authorization status" + } + } + } + } + + func setLanguage(locale: Locale) { + currentLocale = locale + // If we are already recording, we'd need to stop and restart, + // but for now we just update the recognizer for the next session + if recognizer?.locale != locale { + recognizer = SFSpeechRecognizer(locale: locale) + } + } + + func startTranscribing(allowOnline: Bool = true, locale: Locale? = nil) { + // Determine locale + if let locale = locale { + self.currentLocale = locale + } + let localeToUse = self.currentLocale + + // Safety net: Reload from UserDefaults if empty + if transcript.isEmpty { + if let saved = UserDefaults.standard.string(forKey: "voiceNoteTranscript") { + self.transcript = saved + } + } + + guard !isRecording else { return } + + // Update recognizer if needed + if recognizer?.locale != localeToUse { + recognizer = SFSpeechRecognizer(locale: localeToUse) + } + + guard let recognizer = recognizer, recognizer.isAvailable else { + self.error = "Speech recognizer is not available for \(localeToUse.identifier)" + return + } + + // Save current transcript as the starting point for this session + sessionStartTranscript = transcript + + do { + let audioSession = AVAudioSession.sharedInstance() + try audioSession.setCategory(.playAndRecord, mode: .measurement, options: [.duckOthers, .defaultToSpeaker, .allowBluetoothHFP]) + try audioSession.setActive(true, options: .notifyOthersOnDeactivation) + + request = SFSpeechAudioBufferRecognitionRequest() + guard let request = request else { return } + request.shouldReportPartialResults = true + request.requiresOnDeviceRecognition = !allowOnline + + // Enable automatic punctuation (iOS 16+) + if #available(iOS 16, *) { + request.addsPunctuation = true + } + + let inputNode = audioEngine.inputNode + + task = recognizer.recognitionTask(with: request) { [weak self] result, error in + guard let self = self else { return } + + if let error = error { + // Check if we should fallback (if we were trying online and it failed) + if allowOnline { + print("Online transcription failed: \(error.localizedDescription). Retrying offline.") + // We must stop the current engine/request before retrying + self.stopTranscribing() + + // Retry with on-device only + // Add a small delay to ensure cleanup completes + DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { + self.startTranscribing(allowOnline: false, locale: localeToUse) + } + return + } else { + // We were already offline (or forced offline), just report error + DispatchQueue.main.async { + self.error = "Transcription error: \(error.localizedDescription)" + } + } + } + + if let result = result { + DispatchQueue.main.async { + // Only update if we're still recording (prevents race condition with clear) + guard self.isRecording else { return } + + // Append new text to the session start text + let newText = result.bestTranscription.formattedString + + // Ignore empty results to prevent overwriting persistence + guard !newText.isEmpty else { return } + + if self.sessionStartTranscript.isEmpty { + self.transcript = newText + } else { + self.transcript = self.sessionStartTranscript + " " + newText + } + + // Save to UserDefaults + UserDefaults.standard.set(self.transcript, forKey: "voiceNoteTranscript") + } + } + + if error != nil || (result?.isFinal ?? false) { + self.stopTranscribing() + } + } + + let recordingFormat = inputNode.outputFormat(forBus: 0) + inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in + request.append(buffer) + } + + try audioEngine.start() + isRecording = true + error = nil + + } catch { + self.error = "Error starting recording: \(error.localizedDescription)" + stopTranscribing() + } + } + + func stopTranscribing() { + // Ensure we only stop if we are actually recording/have resources allocated + guard isRecording else { return } + + audioEngine.stop() + audioEngine.inputNode.removeTap(onBus: 0) + request?.endAudio() + task?.cancel() + + task = nil + request = nil + isRecording = false + } + + func clear() { + // Stop recording first to prevent it from saving again + stopTranscribing() + + // Then clear everything + transcript = "" + sessionStartTranscript = "" + UserDefaults.standard.removeObject(forKey: "voiceNoteTranscript") + + // Restart transcribing after a brief delay to allow audio engine to fully stop + DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { + self.startTranscribing() + } + } +} + +struct VoiceNoteView: View { + @EnvironmentObject var themeManager: ThemeManager + @ObservedObject var speechRecognizer = SpeechRecognizer.shared + var onFlipBack: () -> Void + + @State private var fontSize: CGFloat = 40 + @AppStorage("isSpanishMode") private var isSpanishMode = false + @State private var translatedText = "" + @State private var triggerTranslation = false + @State private var translationError: String? + @State private var manualTranslationConfig: TranslationSession.Configuration? + @State private var showTranslation = false + @State private var translationTaskID = UUID() + @State private var longPressTriggered = false + @State private var showingSettingsAlert = false + + // New Dynamic Language State + @ObservedObject private var languageManager = LanguageManager.shared + @AppStorage("targetLanguageIdentifier") private var targetLanguageIdentifier: String = "" + @State private var showSettings = false + + // Configuration for iOS 18+ Translation + #if canImport(Translation) + @State private var translationConfig: TranslationSession.Configuration? + #endif + + var body: some View { + GeometryReader { geometry in + ZStack { + // Main Text Area + if isSpanishMode { + // Target Language Mode (Single View) + ScrollView { + VStack(alignment: .leading) { + if showTranslation { + // Show English Translation + if let error = translationError { + Text("Error: \(error)") + .foregroundColor(.red) + .font(.caption) + .padding(.bottom) + } + Text(translatedText.isEmpty ? "Translation will appear here..." : translatedText) + .font(.system(size: fontSize, weight: .bold)) // Use dynamic fontSize + .foregroundColor(translatedText.isEmpty ? .gray : themeManager.textColor) + .multilineTextAlignment(.leading) + } else { + // Show Target Transcript + Text(speechRecognizer.transcript.isEmpty ? languageManager.getStartSpeakingText(for: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : speechRecognizer.transcript) + .font(.system(size: fontSize, weight: .bold)) // Use dynamic fontSize + .foregroundColor(themeManager.textColor) + .multilineTextAlignment(.leading) + } + } + .padding(.horizontal) + .padding(.top, 80) // Clear top bar + .padding(.bottom, 120) // Clear bottom controls + .frame(maxWidth: .infinity, alignment: .leading) + .onChange(of: speechRecognizer.transcript) { + adjustFontSize(containerSize: geometry.size) + } + .onChange(of: translatedText) { + adjustFontSize(containerSize: geometry.size) + } + .onChange(of: showTranslation) { + adjustFontSize(containerSize: geometry.size) + } + .onChange(of: targetLanguageIdentifier) { + print("VoiceNoteView: Language changed to \(targetLanguageIdentifier)") + // Force state update if needed, though AppStorage should trigger redraw. + // Might need to update speechRecognizer language if in Spanish mode? + if isSpanishMode { + let locale = Locale(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) + speechRecognizer.setLanguage(locale: locale) + speechRecognizer.clear() + } + } + } + .scrollDisabled(true) + .frame(maxWidth: .infinity, maxHeight: .infinity) + .background(themeManager.backgroundColor) + } else { + // Normal English Full Screen + ScrollView { + Text(speechRecognizer.transcript.isEmpty ? "Start speaking..." : speechRecognizer.transcript) + .font(.system(size: fontSize, weight: .bold)) + .foregroundColor(themeManager.textColor) + .multilineTextAlignment(.leading) + .padding(.horizontal) + .padding(.top, 80) // Clear top bar + .padding(.bottom, 120) // Clear bottom controls + .frame(maxWidth: .infinity, alignment: .leading) + .textSelection(.enabled) + .onChange(of: speechRecognizer.transcript) { + adjustFontSize(containerSize: geometry.size) + } + } + .scrollDisabled(true) + .frame(maxWidth: .infinity, maxHeight: .infinity) + } + + // Top Bar with Clear All Button + VStack { + HStack { + // Language Toggle + Button(action: { + toggleLanguage() + }) { + HStack(spacing: 6) { + // Dynamic Flag and Name + if isSpanishMode { + if let lang = languageManager.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) { + Text(lang.flag) + .font(.title2) + Text(lang.name) + .fontWeight(.semibold) + .font(.system(size: 14)) + } else { + // Fallback if not found or empty (e.g. legacy state) + Text("🏳️") + .font(.title2) + Text("Select Language") + .fontWeight(.semibold) + .font(.system(size: 14)) + } + } else { + Text("🇺🇸") + .font(.title2) + Text("English") + .fontWeight(.semibold) + .font(.system(size: 14)) + } + } + .padding(.horizontal, 12) + .padding(.vertical, 8) + .background(Color.black.opacity(0.1)) + .cornerRadius(20) + } + .padding(.leading) + + Spacer() + Button(action: { + // Ensure the recognizer stays in the correct mode + let localeID = isSpanishMode ? (targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : "en-US" + speechRecognizer.setLanguage(locale: Locale(identifier: localeID)) + + speechRecognizer.clear() + translatedText = "" + translationError = nil + showTranslation = false + + // Reset translation config so it can be re-triggered + manualTranslationConfig?.invalidate() + manualTranslationConfig = nil + }) { + Text("Clear all") + .foregroundColor(.red) + .padding() + } + } + Spacer() + } + + // Bottom Area (Status & Flip Button) + VStack { + Spacer() + + ZStack { + // Left & Right Controls + HStack { + // Left: Status + VStack(alignment: .leading, spacing: 4) { + if speechRecognizer.isRecording { + Text("Listening...") + .font(.caption) + .foregroundColor(.red) + } + if let error = speechRecognizer.error { + Text(error) + .font(.caption) + .foregroundColor(.red) + } + } + .padding(.leading, 20) + + Spacer() + + // Right: Flip Button + Button(action: { + onFlipBack() + }) { + Text("Flip") + .font(.system(size: 16, weight: .medium)) + .foregroundColor(.white) + .padding(.horizontal, 20) + .padding(.vertical, 12) + .background(Color.blue) + .cornerRadius(25) + } + .padding(.trailing, 20) + } + + // Simple Translate Button + if isSpanishMode { + Button(action: { + if targetLanguageIdentifier.isEmpty { + // No language selected. Prompt user to open settings. + showingSettingsAlert = true + } else { + if showTranslation { + showTranslation = false + } else { + performTranslation() + } + } + }) { + Group { + if targetLanguageIdentifier.isEmpty { + Text("Translate") + } else { + if let lang = languageManager.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) { + if showTranslation { + Text("Original \(lang.flag)") + } else { + Text("Translate to 🇺🇸") + } + } else { + Text("Translate") + } + } + } + .fontWeight(.semibold) + .padding(.horizontal, 20) + .padding(.vertical, 12) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(25) + .shadow(radius: 3) + } + .alert("Select Language", isPresented: $showingSettingsAlert) { + Button("Open Settings") { + showSettings = true + } + Button("Cancel", role: .cancel) { } + } message: { + Text("Please select a translation language in Settings.") + } + } + } + .padding(.bottom, 20) + .background( + LinearGradient( + gradient: Gradient(colors: [themeManager.backgroundColor.opacity(0), themeManager.backgroundColor]), + startPoint: .top, + endPoint: .bottom + ) + .frame(height: 100) + ) + } + } + .background(themeManager.backgroundColor) + .onAppear { + if #available(iOS 18.0, *) { + Task { + await languageManager.checkAvailability() + } + } + + // Recalculate font size immediately in case there is existing text + adjustFontSize(containerSize: geometry.size) + } + .onChange(of: targetLanguageIdentifier) { + if isSpanishMode { + let locale = Locale(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) + speechRecognizer.setLanguage(locale: locale) + speechRecognizer.clear() + } + } + .sheet(isPresented: $showSettings, onDismiss: { + if isSpanishMode { + let locale = Locale(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) + speechRecognizer.setLanguage(locale: locale) + speechRecognizer.clear() + } + }) { + SettingsView() + .environmentObject(themeManager) + } + + .onDisappear { + // speechRecognizer.stopTranscribing() - Removed to keep recording in background + } + // Batch Translation Task + #if canImport(Translation) + .translationTask(manualTranslationConfig) { session in + do { + // Perform single batch translation + let response = try await session.translate(speechRecognizer.transcript) + translatedText = response.targetText + translationError = nil + // showTranslation is already true + } catch { + // Suppress specific "empty" errors or generic noise + let errorMsg = error.localizedDescription + if !errorMsg.localizedCaseInsensitiveContains("empty") { + print("Translation error: \(error)") + translationError = errorMsg + } else { + translationError = nil + // If silently failed, revert view so user can try again or see transcript + showTranslation = false + } + } + } + #else + // Fallback or empty logic for older iOS versions handled by UI checks + #endif + .id(translationTaskID) // Force recreation of the task on every request + .gesture( + DragGesture() + .onEnded { value in + if value.translation.width > 50 { + // Swipe Right -> Flip Back + onFlipBack() + } else if value.translation.width < -50 { + // Swipe Left -> Clear & Reset + if showTranslation { + showTranslation = false + } + speechRecognizer.clear() + } + } + ) + + + + } +} + + // Toggle Logic + private func toggleLanguage() { + isSpanishMode.toggle() + showTranslation = false + + // Stop current recording + speechRecognizer.stopTranscribing() + + // Update the locale in SpeechRecognizer immediately + // Update the locale in SpeechRecognizer immediately + let localeID = isSpanishMode ? (targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : "en-US" + speechRecognizer.setLanguage(locale: Locale(identifier: localeID)) + + // Clear old text and restart (clear() automatically restarts transcription using the current locale) + speechRecognizer.clear() + + // Configure translation session if needed + if #available(iOS 18.0, *), isSpanishMode { + #if canImport(Translation) + // Invalidating and recreating config triggers the task + manualTranslationConfig = TranslationSession.Configuration( + source: Locale.Language(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier), + target: Locale.Language(identifier: "en-US") + ) + #endif + } else { + #if canImport(Translation) + manualTranslationConfig?.invalidate() + manualTranslationConfig = nil + #endif + } + } + + // Trigger translation + private func performTranslation() { + guard !speechRecognizer.transcript.isEmpty else { return } + translatedText = "Translating..." + translationError = nil + showTranslation = true // Show loading state immediately + + if #available(iOS 18.0, *) { + #if canImport(Translation) + // Force a reset of the configuration + if manualTranslationConfig != nil { + manualTranslationConfig?.invalidate() + manualTranslationConfig = nil + } + + let langId = targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier + + // All languages use .translationTask modifier + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + translationTaskID = UUID() + manualTranslationConfig = TranslationSession.Configuration( + source: Locale.Language(identifier: langId), + target: Locale.Language(identifier: "en-US") + ) + } + #endif + } else { + translationError = "Translation requires iOS 18" + } + } + + private func startWithCurrentLanguage() { + let localeID = isSpanishMode ? (targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : "en-US" + speechRecognizer.startTranscribing(locale: Locale(identifier: localeID)) + } + + private func adjustFontSize(containerSize: CGSize) { + + let maxFontSize: CGFloat = 40 + let minFontSize: CGFloat = 12 + // Use the displayed text for sizing + let text: String + if isSpanishMode { + if showTranslation { + text = translatedText.isEmpty ? "Translation will appear here..." : translatedText + } else { + text = speechRecognizer.transcript.isEmpty ? languageManager.getStartSpeakingText(for: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : speechRecognizer.transcript + } + } else { + text = speechRecognizer.transcript.isEmpty ? "Start speaking..." : speechRecognizer.transcript + } + // Account for padding + let horizontalPadding: CGFloat = 32 // 16 * 2 + let width = containerSize.width - horizontalPadding + let height = containerSize.height - 180 // Increased buffer for bottom controls + + var bestSize = minFontSize + + for size in stride(from: maxFontSize, through: minFontSize, by: -2) { + let font = UIFont.systemFont(ofSize: size, weight: .bold) + let attributes = [NSAttributedString.Key.font: font] + let boundingRect = NSString(string: text).boundingRect( + with: CGSize(width: width, height: .greatestFiniteMagnitude), + options: [.usesLineFragmentOrigin, .usesFontLeading], + attributes: attributes, + context: nil + ) + + if boundingRect.height <= height { + bestSize = size + break + } + } + + if fontSize != bestSize { + withAnimation { + fontSize = bestSize + } + } + } +} + + + diff --git a/FlipTalk/appstore.png b/FlipTalk/appstore.png new file mode 100644 index 0000000..fa677cc Binary files /dev/null and b/FlipTalk/appstore.png differ diff --git a/FlipTalk/easteregg.mp4 b/FlipTalk/easteregg.mp4 new file mode 100644 index 0000000..e39dc6b Binary files /dev/null and b/FlipTalk/easteregg.mp4 differ diff --git a/FlipTalk/playstore.png b/FlipTalk/playstore.png new file mode 100644 index 0000000..6d049ea Binary files /dev/null and b/FlipTalk/playstore.png differ diff --git a/README.md b/README.md new file mode 100644 index 0000000..3efb815 --- /dev/null +++ b/README.md @@ -0,0 +1,23 @@ +#FlipTalk: The Bridge for Instant In-Person Communication + +Effortless 1:1 conversations for the Deaf and Hard of Hearing. Type your message, flip the screen, and instantly see spoken words transcribed. Private, multilingual, and always ready. + +## Full Description + +**Bridging the Conversation Gap** +FlipTalk is designed for immediate, barrier-free communication between Deaf and hearing individuals. Whether you are ordering coffee or having a deep conversation, FlipTalk empowers you to communicate in over 20 languages without missing a beat. + +**Key Features** + +* **⚡️ Instant Transcription:** No "Start" button needed. Transcription is active the moment you open the app or flip the screen, ensuring you never miss the first word. +* **🔒 Privacy First:** Your security matters. All processing happens 100% on-device. Your conversations are never stored on a server, guaranteeing complete privacy. +* **🔄 The "Flip" Experience:** Seamlessly switch between your typed message and the hearing person's transcribed speech. Use the Split-View for a unified real-time conversation log. +* **🗣️ Continuous Speaking Mode:** Keep the conversation natural. Enable "Speak for me" to automatically vocalize your sentences as you finish typing them (triggered by punctuation), eliminating the need for constant tapping. +* **🌍 Multilingual Support:** Fluent in 20+ languages including English, Spanish, French, German, and Chinese. Ideal for travel and multilingual households. +* **🧠 Smart History & Favorites:** Swipe left to review past chats or swipe right to access your custom Quick Phrases (e.g., "Coffee please," "Emergency"). +* **👁️ Designed for Readability:** Built with high-contrast optics, Dark Mode, and auto-sizing text that dynamically fills the screen for maximum clarity. + +## How It Works +1. **Type:** You type your message. +2. **Flip:** Swipe right or tap to flip the screen towards the hearing person. +3. **Read:** They speak, and their words appear instantly on your screen. \ No newline at end of file diff --git a/Screenshots/iPad/1.png b/Screenshots/iPad/1.png new file mode 100644 index 0000000..f9362c0 Binary files /dev/null and b/Screenshots/iPad/1.png differ diff --git a/Screenshots/iPad/2.png b/Screenshots/iPad/2.png new file mode 100644 index 0000000..6f63f06 Binary files /dev/null and b/Screenshots/iPad/2.png differ diff --git a/Screenshots/iPad/3.png b/Screenshots/iPad/3.png new file mode 100644 index 0000000..a7200f4 Binary files /dev/null and b/Screenshots/iPad/3.png differ diff --git a/Screenshots/iPad/4.png b/Screenshots/iPad/4.png new file mode 100644 index 0000000..72b1adb Binary files /dev/null and b/Screenshots/iPad/4.png differ diff --git a/Screenshots/iPad/5.png b/Screenshots/iPad/5.png new file mode 100644 index 0000000..1e54600 Binary files /dev/null and b/Screenshots/iPad/5.png differ diff --git a/Screenshots/iPad/6.png b/Screenshots/iPad/6.png new file mode 100644 index 0000000..3ef7075 Binary files /dev/null and b/Screenshots/iPad/6.png differ diff --git a/Screenshots/iPhone/1.png b/Screenshots/iPhone/1.png new file mode 100644 index 0000000..7ada00a Binary files /dev/null and b/Screenshots/iPhone/1.png differ diff --git a/Screenshots/iPhone/2.png b/Screenshots/iPhone/2.png new file mode 100644 index 0000000..b6de5e6 Binary files /dev/null and b/Screenshots/iPhone/2.png differ diff --git a/Screenshots/iPhone/3.png b/Screenshots/iPhone/3.png new file mode 100644 index 0000000..de481f9 Binary files /dev/null and b/Screenshots/iPhone/3.png differ diff --git a/Screenshots/iPhone/4.png b/Screenshots/iPhone/4.png new file mode 100644 index 0000000..7fdfb3a Binary files /dev/null and b/Screenshots/iPhone/4.png differ diff --git a/Screenshots/iPhone/5.png b/Screenshots/iPhone/5.png new file mode 100644 index 0000000..d897c2d Binary files /dev/null and b/Screenshots/iPhone/5.png differ diff --git a/Screenshots/iPhone/6.png b/Screenshots/iPhone/6.png new file mode 100644 index 0000000..e816a7c Binary files /dev/null and b/Screenshots/iPhone/6.png differ diff --git a/Screenshots/iPhone/7.png b/Screenshots/iPhone/7.png new file mode 100644 index 0000000..7b195a5 Binary files /dev/null and b/Screenshots/iPhone/7.png differ diff --git a/check_build.sh b/check_build.sh new file mode 100755 index 0000000..41be6ef --- /dev/null +++ b/check_build.sh @@ -0,0 +1,27 @@ +#!/bin/zsh +set -o pipefail # Fail if xcodebuild fails, even with xcbeautify + +# --- Configuration --- +SCHEME="FlipTalk" +DEVICE_NAME="iPhone 17 Pro" +BUILD_PATH="./build" + +echo "🔍 Checking compilation for $SCHEME..." + +# Build Only (No Install/Launch) +# We use 'env -u' to hide Homebrew variables +# We use '-derivedDataPath' to keep it isolated +env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \ + -scheme "$SCHEME" \ + -destination "platform=iOS Simulator,name=$DEVICE_NAME" \ + -configuration Debug \ + -derivedDataPath "$BUILD_PATH" \ + build | xcbeautify + +# Check exit code of the pipeline +if [ $? -eq 0 ]; then + echo "✅ Build Succeeded. No errors found." +else + echo "❌ Build Failed." + exit 1 +fi diff --git a/fliptalk_orig.png b/fliptalk_orig.png new file mode 100644 index 0000000..e6d3773 Binary files /dev/null and b/fliptalk_orig.png differ diff --git a/run_ios_simulator.sh b/run_ios_simulator.sh new file mode 100755 index 0000000..d34f8c2 --- /dev/null +++ b/run_ios_simulator.sh @@ -0,0 +1,47 @@ +#!/bin/zsh +set -e # Exit immediately if any command fails + +# --- Configuration --- +SCHEME="FlipTalk" +BUNDLE_ID="com.jaredlog.Flip-Talk" +DEVICE_NAME="iPhone 17 Pro" +BUILD_PATH="./build" # This ensures Predictable Paths + +echo "🚀 Starting Build for $DEVICE_NAME..." + +# 1. Boot the simulator if it isn't already running +# We use 'grep' to check status so we don't try to boot an active device +if ! xcrun simctl list devices | grep "$DEVICE_NAME" | grep -q "(Booted)"; then + echo "⚙️ Booting Simulator..." + xcrun simctl boot "$DEVICE_NAME" +fi +open -a Simulator + +# 2. Build the App +# We use 'env -u' to hide your Homebrew variables (CC, CXX) from Xcode +# We use '-derivedDataPath' to force the build into the local ./build folder +echo "🔨 Compiling..." +env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \ + -scheme "$SCHEME" \ + -destination "platform=iOS Simulator,name=$DEVICE_NAME" \ + -configuration Debug \ + -derivedDataPath "$BUILD_PATH" \ + clean build | xcbeautify + +# 3. Locate the .app bundle +# Since we used -derivedDataPath, we know EXACTLY where this is. +APP_PATH="$BUILD_PATH/Build/Products/Debug-iphonesimulator/$SCHEME.app" + +if [ ! -d "$APP_PATH" ]; then + echo "❌ Error: App bundle not found at $APP_PATH" + exit 1 +fi + +# 4. Install and Launch +echo "📲 Installing..." +xcrun simctl install "$DEVICE_NAME" "$APP_PATH" + +echo "▶️ Launching $BUNDLE_ID..." +xcrun simctl launch "$DEVICE_NAME" "$BUNDLE_ID" + +echo "✅ Done!"