Initial commit: FlipTalk iOS app

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-19 21:58:44 -05:00
commit 57f6c18550
54 changed files with 4130 additions and 0 deletions

67
.gitignore vendored Normal file
View File

@@ -0,0 +1,67 @@
# Xcode
build/
DerivedData/
*.xcodeproj/xcuserdata/
*.xcworkspace/xcuserdata/
*.xcodeproj/project.xcworkspace/xcuserdata/
# Xcode build state
*.moved-aside
*.xcuserstate
*.xccheckout
*.xcscmblueprint
# Swift Package Manager
.build/
.swiftpm/
Package.resolved
# CocoaPods
Pods/
Podfile.lock
# Carthage
Carthage/Build/
Carthage/Checkouts/
# Node (if any JS tooling)
node_modules/
dist/
.npm/
# macOS
.DS_Store
.AppleDouble
.LSOverride
._*
.Spotlight-V100
.Trashes
# IDEs
*.swp
*.swo
*~
.idea/
.vscode/
# Archives
*.ipa
*.dSYM.zip
*.dSYM
# Playgrounds
timeline.xctimeline
playground.xcworkspace
# Fastlane
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots/**/*.png
fastlane/test_output/
# Environment and secrets
.env
.env.*
*.pem
*.p12
*.mobileprovision

View File

@@ -0,0 +1,361 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXFileReference section */
7F95F89F2EDF7D3B00ABB7F4 /* FlipTalk.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FlipTalk.app; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
7F95F8A12EDF7D3B00ABB7F4 /* FlipTalk */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = FlipTalk;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
7F95F89C2EDF7D3B00ABB7F4 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7F95F8962EDF7D3B00ABB7F4 = {
isa = PBXGroup;
children = (
7F95F8A12EDF7D3B00ABB7F4 /* FlipTalk */,
7F95F8A02EDF7D3B00ABB7F4 /* Products */,
);
sourceTree = "<group>";
};
7F95F8A02EDF7D3B00ABB7F4 /* Products */ = {
isa = PBXGroup;
children = (
7F95F89F2EDF7D3B00ABB7F4 /* FlipTalk.app */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7F95F89E2EDF7D3B00ABB7F4 /* FlipTalk */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7F95F8AA2EDF7D3C00ABB7F4 /* Build configuration list for PBXNativeTarget "FlipTalk" */;
buildPhases = (
7F95F89B2EDF7D3B00ABB7F4 /* Sources */,
7F95F89C2EDF7D3B00ABB7F4 /* Frameworks */,
7F95F89D2EDF7D3B00ABB7F4 /* Resources */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
7F95F8A12EDF7D3B00ABB7F4 /* FlipTalk */,
);
name = FlipTalk;
packageProductDependencies = (
);
productName = FlipTalk;
productReference = 7F95F89F2EDF7D3B00ABB7F4 /* FlipTalk.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7F95F8972EDF7D3B00ABB7F4 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 2610;
LastUpgradeCheck = 2620;
TargetAttributes = {
7F95F89E2EDF7D3B00ABB7F4 = {
CreatedOnToolsVersion = 26.1.1;
};
};
};
buildConfigurationList = 7F95F89A2EDF7D3B00ABB7F4 /* Build configuration list for PBXProject "FlipTalk" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7F95F8962EDF7D3B00ABB7F4;
minimizedProjectReferenceProxies = 1;
preferredProjectObjectVersion = 77;
productRefGroup = 7F95F8A02EDF7D3B00ABB7F4 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7F95F89E2EDF7D3B00ABB7F4 /* FlipTalk */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7F95F89D2EDF7D3B00ABB7F4 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7F95F89B2EDF7D3B00ABB7F4 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
7F95F8A82EDF7D3C00ABB7F4 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 26.1;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
7F95F8A92EDF7D3C00ABB7F4 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 26.1;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
7F95F8AB2EDF7D3C00ABB7F4 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 2;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_CFBundleDisplayName = "Flip-Talk";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "Needed for recording the person speaking";
INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "Needed to transcribe.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 2.3;
ONLY_ACTIVE_ARCH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.jaredlog.Flip-Talk";
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
7F95F8AC2EDF7D3C00ABB7F4 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 2;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_CFBundleDisplayName = "Flip-Talk";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "Needed for recording the person speaking";
INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "Needed to transcribe.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 2.3;
ONLY_ACTIVE_ARCH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.jaredlog.Flip-Talk";
PRODUCT_NAME = "$(TARGET_NAME)";
PROVISIONING_PROFILE_SPECIFIER = "";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7F95F89A2EDF7D3B00ABB7F4 /* Build configuration list for PBXProject "FlipTalk" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F95F8A82EDF7D3C00ABB7F4 /* Debug */,
7F95F8A92EDF7D3C00ABB7F4 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7F95F8AA2EDF7D3C00ABB7F4 /* Build configuration list for PBXNativeTarget "FlipTalk" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F95F8AB2EDF7D3C00ABB7F4 /* Debug */,
7F95F8AC2EDF7D3C00ABB7F4 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7F95F8972EDF7D3B00ABB7F4 /* Project object */;
}

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,78 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "2620"
version = "1.7">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES"
buildArchitectures = "Automatic">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7F95F89E2EDF7D3B00ABB7F4"
BuildableName = "FlipTalk.app"
BlueprintName = "FlipTalk"
ReferencedContainer = "container:FlipTalk.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
shouldAutocreateTestPlan = "YES">
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7F95F89E2EDF7D3B00ABB7F4"
BuildableName = "FlipTalk.app"
BlueprintName = "FlipTalk"
ReferencedContainer = "container:FlipTalk.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7F95F89E2EDF7D3B00ABB7F4"
BuildableName = "FlipTalk.app"
BlueprintName = "FlipTalk"
ReferencedContainer = "container:FlipTalk.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 977 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

View File

@@ -0,0 +1 @@
{"images":[{"size":"60x60","expected-size":"180","filename":"180.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"40x40","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"60x60","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"57x57","expected-size":"57","filename":"57.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"87","filename":"87.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"57x57","expected-size":"114","filename":"114.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"60","filename":"60.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"1024x1024","filename":"1024.png","expected-size":"1024","idiom":"ios-marketing","folder":"Assets.xcassets/AppIcon.appiconset/","scale":"1x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"72x72","expected-size":"72","filename":"72.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"76x76","expected-size":"152","filename":"152.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"50x50","expected-size":"100","filename":"100.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"76x76","expected-size":"76","filename":"76.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"50x50","expected-size":"50","filename":"50.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"72x72","expected-size":"144","filename":"144.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"40x40","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"83.5x83.5","expected-size":"167","filename":"167.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"20x20","expected-size":"20","filename":"20.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"}]}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

1998
FlipTalk/ContentView.swift Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
//
// FlipTalkApp.swift
// FlipTalk
//
// Created by Jared Evans on 12/2/25.
//
import SwiftUI
@main
struct FlipTalkApp: App {
@StateObject private var themeManager = ThemeManager()
var body: some Scene {
WindowGroup {
ContentView()
.environmentObject(themeManager)
}
}
}

View File

@@ -0,0 +1,171 @@
import Foundation
import Combine
import SwiftUI
#if canImport(Translation)
import Translation
#endif
class LanguageManager: ObservableObject {
static let shared = LanguageManager()
@Published var supportedLanguages: [LanguageStatus] = []
init() {
// Initialize with candidates assuming internet required (fail-safe)
self.supportedLanguages = candidateLocales.compactMap { id in
let locale = Locale(identifier: id)
return LanguageStatus(id: id, locale: locale, isOnlineRequired: true)
}.sorted { $0.name < $1.name }
}
struct LanguageStatus: Identifiable, Equatable {
let id: String // Locale identifier
let locale: Locale
let isOnlineRequired: Bool
// English names for all supported languages
private static let englishNames: [String: String] = [
"ar-SA": "Arabic",
"zh-CN": "Chinese (Simplified)",
"zh-TW": "Chinese (Traditional)",
"nl-NL": "Dutch",
"en-US": "English (United States)",
"en-UK": "English (United Kingdom)",
"fr-FR": "French (France)",
"de-DE": "German",
"id-ID": "Indonesian",
"it-IT": "Italian",
"ja-JP": "Japanese",
"ko-KR": "Korean",
"pl-PL": "Polish",
"pt-BR": "Portuguese (Brazil)",
"ru-RU": "Russian",
"es-ES": "Spanish (Spain)",
"es-MX": "Spanish (Mexico)",
"th-TH": "Thai",
"tr-TR": "Turkish",
"uk-UA": "Ukrainian",
"vi-VN": "Vietnamese"
]
var name: String {
Self.englishNames[id] ?? locale.identifier
}
var flag: String {
locale.flagEmoji ?? "🏳️"
}
var requiresInternet: Bool {
isOnlineRequired
}
/// Languages that don't have downloadable packs (use built-in/internet)
var isDownloadAvailable: Bool {
// Only en-UK and es-MX don't have download available
return id != "en-UK" && id != "es-MX"
}
}
// A curated list of languages supported by Apple Translate
private let candidateLocales: [String] = [
"ar-SA", "zh-CN", "zh-TW", "nl-NL", "en-UK", "fr-FR",
"de-DE", "id-ID", "it-IT", "ja-JP", "ko-KR", "pl-PL", "pt-BR",
"ru-RU", "es-ES", "es-MX", "th-TH", "tr-TR", "uk-UA", "vi-VN"
]
private let startSpeakingTranslations: [String: String] = [
"ar-SA": "تحدث الآن...",
"zh-CN": "开始说话...",
"zh-TW": "開始說話...",
"nl-NL": "Begin met spreken...",
"en-US": "Start speaking...",
"en-UK": "Start speaking...",
"fr-FR": "Commencez à parler...",
"de-DE": "Jetzt sprechen...",
"id-ID": "Mulai berbicara...",
"it-IT": "Inizia a parlare...",
"ja-JP": "話し始めてください...",
"ko-KR": "말씀해 주세요...",
"pl-PL": "Zacznij mówić...",
"pt-BR": "Comece a falar...",
"ru-RU": "Начните говорить...",
"es-ES": "Empieza a hablar...",
"es-MX": "Empieza a hablar...",
"th-TH": "เริ่มพูด...",
"tr-TR": "Konuşmaya başla...",
"uk-UA": "Почніть говорити...",
"vi-VN": "Bắt đầu nói..."
]
func getStartSpeakingText(for identifier: String) -> String {
return startSpeakingTranslations[identifier] ?? "Start speaking..."
}
func checkAvailability() async {
guard #available(iOS 18.0, *) else { return }
#if canImport(Translation)
var results: [LanguageStatus] = []
let availability = LanguageAvailability()
let source = Locale.Language(identifier: "en-US")
for identifier in candidateLocales {
let target = Locale.Language(identifier: identifier)
let status = await availability.status(from: source, to: target)
// Should we limit to only supported/installed?
// If we initialized with all candidates, maybe we should keep all candidates
// but update their online status?
// For now, let's just stick to the discovered ones to be accurate about "Translation" support.
// Actually, if we want the menu to work, we need items.
// If 'status' is unsupported, we should probably remove it?
// But 'candidateLocales' are presumably supported by the backend.
if status == .supported || status == .installed {
let locale = Locale(identifier: identifier)
let isOnline = (status == .supported)
results.append(LanguageStatus(id: identifier, locale: locale, isOnlineRequired: isOnline))
}
}
await MainActor.run {
// Update the list with verified verification status
if !results.isEmpty {
self.supportedLanguages = results.sorted { $0.name < $1.name }
}
}
#endif
}
/// Check if a language identifier is internet-only (no download available)
func isInternetOnlyLanguage(_ identifier: String) -> Bool {
return identifier == "en-UK" || identifier == "es-MX"
}
}
extension Locale {
var flagEmoji: String? {
// Simple heuristic for region code
guard let region = self.region?.identifier else {
// Try to parse from identifier if region is missing
let parts = identifier.split(separator: "-")
if parts.count > 1 {
return String(parts.last!).flagEmoji
}
return nil
}
return region.flagEmoji
}
}
extension String {
var flagEmoji: String {
let base: UInt32 = 127397
var s = ""
for v in self.unicodeScalars {
s.unicodeScalars.append(UnicodeScalar(base + v.value)!)
}
return s
}
}

View File

@@ -0,0 +1,136 @@
import SwiftUI
struct RecommendedVoicesView: View {
@Environment(\.dismiss) var dismiss
var body: some View {
NavigationView {
List {
Section(header: Text("English (United States)")) {
VoiceRecommendationRow(
name: "Alex",
details: "Male, High Quality",
description: "The smartest voice on iOS; breathes between sentences, sounds academic and very natural."
)
VoiceRecommendationRow(
name: "Samantha",
details: "Female, Standard",
description: "The classic \"original Siri\" voice; clear and friendly but slightly computerized."
)
VoiceRecommendationRow(
name: "Ava",
details: "Female, Premium",
description: "A modern, high-quality voice that sounds professional, warm, and very human-like."
)
VoiceRecommendationRow(
name: "Allison",
details: "Female, Premium",
description: "A lighter, breathy, and pleasant voice; sounds like a helpful assistant."
)
VoiceRecommendationRow(
name: "Tom",
details: "Male, Premium",
description: "A friendly, standard American male voice; clear and trustworthy."
)
VoiceRecommendationRow(
name: "Susan",
details: "Female, Standard/Premium",
description: "A slightly more formal and crisp voice; sounds like a teacher or automated reader."
)
VoiceRecommendationRow(
name: "Zoe",
details: "Female, Premium",
description: "A bright, cheerful, and younger-sounding voice; energetic vibe."
)
VoiceRecommendationRow(
name: "Evan",
details: "Male, Enhanced",
description: "A deep, smooth, and modern voice; very natural sounding."
)
VoiceRecommendationRow(
name: "Nathan",
details: "Male, Enhanced",
description: "A lighter, younger-sounding male voice; casual and friendly."
)
VoiceRecommendationRow(
name: "Noelle",
details: "Female, Enhanced",
description: "A soft, sweet, and modern female voice; very smooth flow."
)
VoiceRecommendationRow(
name: "Joelle",
details: "Female, Enhanced",
description: "A clear, articulate, and slightly deeper modern female voice."
)
VoiceRecommendationRow(
name: "Aaron (Siri Voice 2)",
details: "Male, Neural",
description: "The current standard American Male Siri voice; distinct, helpful, and highly polished."
)
VoiceRecommendationRow(
name: "Nicky (Siri Voice 1)",
details: "Female, Neural",
description: "The current standard American Female Siri voice; recognizable and high-fidelity."
)
}
Section(header: Text("Spanish (Mexico)")) {
VoiceRecommendationRow(
name: "Paulina",
details: "Female, Standard/Premium",
description: "The gold standard for Mexican Spanish; sounds like a professional news anchor or navigator."
)
VoiceRecommendationRow(
name: "Juan",
details: "Male, Standard/Premium",
description: "A clear, neutral male voice; sounds polite but slightly more robotic than Paulina."
)
VoiceRecommendationRow(
name: "Siri Female (Voice 1)",
details: "Female, Neural",
description: "(If downloaded) Very smooth, natural, and helpful; indistinguishable from a real human assistant."
)
VoiceRecommendationRow(
name: "Siri Male (Voice 2)",
details: "Male, Neural",
description: "(If downloaded) A professional, modern male assistant voice with a Mexican accent."
)
}
}
.navigationTitle("Recommended Voices")
.toolbar {
ToolbarItem(placement: .navigationBarTrailing) {
Button("Done") {
dismiss()
}
}
}
}
}
}
struct VoiceRecommendationRow: View {
let name: String
let details: String
let description: String
var body: some View {
VStack(alignment: .leading, spacing: 4) {
HStack {
Text(name)
.font(.headline)
Spacer()
Text(details)
.font(.caption)
.padding(4)
.background(Color.blue.opacity(0.1))
.cornerRadius(4)
.foregroundColor(.blue)
}
Text(description)
.font(.subheadline)
.foregroundColor(.secondary)
}
.padding(.vertical, 4)
}
}

253
FlipTalk/SettingsView.swift Normal file
View File

@@ -0,0 +1,253 @@
import SwiftUI
import AVFoundation
import AVKit
#if canImport(Translation)
import Translation
#endif
struct SettingsView: View {
@EnvironmentObject var themeManager: ThemeManager
@ObservedObject var voiceManager = VoiceManager.shared
@State private var showRecommendedVoices = false
@Environment(\.dismiss) var dismiss
// Easter Egg State
@State private var lightbulbTapCount = 0
@State private var showEasterEgg = false
// Persist selected language
@AppStorage("targetLanguageIdentifier") private var targetLanguageIdentifier: String = ""
// Download confirmation state
@State private var showDownloadAlert = false
@State private var languageToDownload: LanguageManager.LanguageStatus?
@State private var previousLanguageIdentifier: String = ""
// Translation configuration for triggering download
#if canImport(Translation)
@State private var downloadConfig: TranslationSession.Configuration?
#endif
var body: some View {
NavigationStack {
Form {
Section(header: Text("Theme Colors")
.font(.title2)
.bold()
.foregroundColor(.primary)
.textCase(nil)) {
Picker("Theme Colors", selection: $themeManager.currentTheme) {
ForEach(AppTheme.allCases) { theme in
Text(theme.displayName)
.tag(theme)
.onTapGesture {
themeManager.currentTheme = theme
if theme == .lightbulb {
lightbulbTapCount += 1
if lightbulbTapCount >= 5 {
showEasterEgg = true
lightbulbTapCount = 0
}
} else {
lightbulbTapCount = 0
}
}
}
}
.pickerStyle(.inline)
.labelsHidden()
}
Section(header: Text("Translation Language")
.font(.title2)
.bold()
.foregroundColor(.primary)
.textCase(nil)) {
Picker("Target Language", selection: $targetLanguageIdentifier) {
Text("Select a language").tag("")
ForEach(LanguageManager.shared.supportedLanguages) { lang in
if lang.isDownloadAvailable {
Text("\(lang.flag) \(lang.name) (Download available)").tag(lang.id)
} else {
Text("\(lang.flag) \(lang.name) (Internet required)").tag(lang.id)
}
}
}
.pickerStyle(.navigationLink)
}
Section(header: Text("Voice Settings")
.font(.title2)
.bold()
.foregroundColor(.primary)
.textCase(nil),
footer: Text("Tip: For the most natural sound, download \"Enhanced\" or \"Premium\" voices in your iPhone Settings:\nSettings > Accessibility > Read & Speak > Voices.")
.font(.caption)
.foregroundColor(.secondary)
.padding(.top, 8)) {
VStack(alignment: .leading) {
Text("English Voice")
.font(.headline)
Picker("English (US)", selection: $voiceManager.selectedEnglishVoiceIdentifier) {
ForEach(voiceManager.availableEnglishVoices, id: \.identifier) { voice in
Text(voiceManager.description(for: voice))
.tag(voice.identifier)
}
}
.pickerStyle(.menu)
}
VStack(alignment: .leading) {
// Dynamic Header for Target Voice
if let lang = LanguageManager.shared.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) {
Text("\(lang.name) Voice")
.font(.headline)
} else {
Text("Target Language Voice")
.font(.headline)
}
Picker("Voice", selection: $voiceManager.selectedTargetVoiceIdentifier) {
ForEach(voiceManager.availableTargetVoices, id: \.identifier) { voice in
Text(voiceManager.description(for: voice))
.tag(voice.identifier)
}
}
.pickerStyle(.menu)
.disabled(voiceManager.availableTargetVoices.isEmpty)
if voiceManager.availableTargetVoices.isEmpty {
Text("No specific voices found for this language. System default will be used.")
.font(.caption)
.foregroundColor(.secondary)
}
}
Button(action: {
showRecommendedVoices = true
}) {
Text("See recommended voices.")
.font(.subheadline)
.foregroundColor(.blue)
}
}
}
.navigationTitle("Settings")
.toolbar {
ToolbarItem(placement: .navigationBarTrailing) {
Button("Close") {
dismiss()
}
}
}
.onAppear {
// Ensure VoiceManager has the correct target language loaded on appear
if !targetLanguageIdentifier.isEmpty {
voiceManager.updateTargetLanguage(to: targetLanguageIdentifier)
}
}
.onChange(of: targetLanguageIdentifier) { oldValue, newValue in
// Update voice manager when user picks a new language
voiceManager.updateTargetLanguage(to: newValue)
// Check if this language is downloadable and prompt for download
if !newValue.isEmpty,
let lang = LanguageManager.shared.supportedLanguages.first(where: { $0.id == newValue }),
lang.isDownloadAvailable {
previousLanguageIdentifier = oldValue
languageToDownload = lang
showDownloadAlert = true
}
}
.onDisappear {
// Settings closed
}
.alert("Download Language", isPresented: $showDownloadAlert) {
Button("Download") {
triggerDownload()
}
Button("Not Now", role: .cancel) {
// Keep the selection but don't download
languageToDownload = nil
}
} message: {
if let lang = languageToDownload {
Text("Would you like to download \(lang.name) for offline translation? This provides faster translations and works without internet.")
} else {
Text("Would you like to download this language for offline translation?")
}
}
#if canImport(Translation)
.translationTask(downloadConfig) { session in
// This will trigger Apple's download UI automatically
do {
try await session.prepareTranslation()
} catch {
print("Download preparation failed: \(error)")
}
await MainActor.run {
downloadConfig = nil
languageToDownload = nil
}
}
#endif
.sheet(isPresented: $showRecommendedVoices) {
RecommendedVoicesView()
}
.fullScreenCover(isPresented: $showEasterEgg) {
EasterEggPlayerView()
}
}
}
private func triggerDownload() {
guard let lang = languageToDownload else { return }
#if canImport(Translation)
if #available(iOS 18.0, *) {
// Create a configuration to trigger the download
downloadConfig = TranslationSession.Configuration(
source: Locale.Language(identifier: "en-US"),
target: Locale.Language(identifier: lang.id)
)
}
#endif
}
}
struct EasterEggPlayerView: View {
@Environment(\.dismiss) var dismiss
@State private var player: AVPlayer?
var body: some View {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
if let player = player {
VideoPlayer(player: player)
.edgesIgnoringSafeArea(.all)
.onAppear {
player.play()
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
dismiss()
}
}
} else {
ProgressView()
}
}
.onAppear {
if let url = Bundle.main.url(forResource: "easteregg", withExtension: "mp4") {
self.player = AVPlayer(url: url)
} else {
print("Easter egg video not found")
dismiss()
}
}
.onDisappear {
player?.pause()
NotificationCenter.default.removeObserver(self)
}
}
}

View File

@@ -0,0 +1,79 @@
import SwiftUI
import Combine
enum AppTheme: String, CaseIterable, Identifiable, Codable {
case dark
case light
case lightbulb
var id: String { rawValue }
var displayName: String {
switch self {
case .dark: return "Default (Dark)"
case .light: return "Light"
case .lightbulb: return "Lightbulb"
}
}
}
class ThemeManager: ObservableObject {
@Published var currentTheme: AppTheme {
didSet {
saveTheme()
}
}
init() {
if let data = UserDefaults.standard.data(forKey: "selectedTheme"),
let theme = try? JSONDecoder().decode(AppTheme.self, from: data) {
self.currentTheme = theme
} else {
self.currentTheme = .dark
}
}
private func saveTheme() {
if let data = try? JSONEncoder().encode(currentTheme) {
UserDefaults.standard.set(data, forKey: "selectedTheme")
}
}
// MARK: - Color Accessors
var backgroundColor: Color {
switch currentTheme {
case .dark: return .black
case .light: return .white
case .lightbulb: return .black
}
}
var textColor: Color {
switch currentTheme {
case .dark: return .white
case .light: return .black
case .lightbulb: return .yellow
}
}
var secondaryTextColor: Color {
switch currentTheme {
case .dark: return .white.opacity(0.25)
case .light: return .black.opacity(0.25)
case .lightbulb: return .yellow.opacity(0.5)
}
}
var menuBackgroundColor: Color {
// Keeping menu dark for now as per usual side menu patterns, or should it match?
// User asked for "main screen and voice screen" colors.
// Let's make the menu adaptive too for consistency, or keep it dark?
// "Theme Colors ... for the main screen and the voice screen"
// Let's assume Side Menu should probably at least not clash.
// For now, let's keep side menu dark to distinguish it, or maybe match?
// Let's stick to modifying main and voice screens primarily as requested.
// But the text in side menu is white. If we make background white, text needs to be black.
return .black // Keeping side menu consistently dark for this iteration unless genericized.
}
}

185
FlipTalk/VoiceManager.swift Normal file
View File

@@ -0,0 +1,185 @@
import Foundation
import AVFoundation
import Combine
class VoiceManager: ObservableObject {
static let shared = VoiceManager()
@Published var availableEnglishVoices: [AVSpeechSynthesisVoice] = []
@Published var availableTargetVoices: [AVSpeechSynthesisVoice] = []
// UserDefaults Keys
private let kSelectedEnglishVoice = "selectedEnglishVoiceIdentifier"
private let kSelectedTargetVoice = "selectedTargetVoiceIdentifier"
// Internal state to track current target language
private var currentTargetLocaleID: String = "es-MX" // Default to Spanish (MX)
init() {
// Load initial target language from somewhere or default
// For now, we load with default, but we'll expose a method to update it
loadVoices()
setupNotifications()
}
deinit {
NotificationCenter.default.removeObserver(self)
}
private func setupNotifications() {
NotificationCenter.default.addObserver(forName: Notification.Name("AVSpeechSynthesisVoiceIdentifierDidChangeNotification"), object: nil, queue: .main) { [weak self] _ in
self?.loadVoices()
}
}
func updateTargetLanguage(to localeID: String) {
guard !localeID.isEmpty else { return }
self.currentTargetLocaleID = localeID
loadVoices()
}
func loadVoices() {
let allVoices = AVSpeechSynthesisVoice.speechVoices()
// Blacklist of Novelty voices to exclude
let noveltyVoices = Set([
"Albert", "Bad News", "Bahh", "Bells", "Boing", "Bubbles", "Cellos",
"Deranged", "Good News", "Hysterical", "Junior", "Kathy", "Organ",
"Princess", "Ralph", "Trinoids", "Whisper", "Zarvox",
"Jester", "Superstar", "Wobble", "Fred",
"Eddy", "Flo", "Grandma", "Grandpa", "Reed", "Rocko", "Sandy", "Shelley"
])
// Helper to sort by quality (Premium > Enhanced > Default)
let sortComparator: (AVSpeechSynthesisVoice, AVSpeechSynthesisVoice) -> Bool = { v1, v2 in
// First sort by Quality
if v1.quality != v2.quality {
// strict order: premium > enhanced > default
let q1 = v1.quality == .premium ? 3 : (v1.quality == .enhanced ? 2 : 1)
let q2 = v2.quality == .premium ? 3 : (v2.quality == .enhanced ? 2 : 1)
return q1 > q2
}
// Then by Name
return v1.name < v2.name
}
// Filter for English (US)
availableEnglishVoices = allVoices.filter {
$0.language == "en-US" && !noveltyVoices.contains($0.name)
}.sorted(by: sortComparator)
// Filter for Target Language
// Handle simplified codes (e.g. "fr" -> "fr-FR") if necessary, though usually we have full codes
let targetVoices = allVoices.filter {
($0.language == self.currentTargetLocaleID || $0.language.starts(with: self.currentTargetLocaleID)) && !noveltyVoices.contains($0.name)
}
if !targetVoices.isEmpty {
availableTargetVoices = targetVoices.sorted(by: sortComparator)
} else {
// If no specific voices found, empty list (UI should handle fallback to system default)
availableTargetVoices = []
}
}
// MARK: - Selection Handling
var selectedEnglishVoiceIdentifier: String {
get {
if let saved = UserDefaults.standard.string(forKey: kSelectedEnglishVoice) {
return saved
}
// Default to Samantha if available
if let samantha = availableEnglishVoices.first(where: { $0.name == "Samantha" }) {
return samantha.identifier
}
return availableEnglishVoices.first?.identifier ?? AVSpeechSynthesisVoice(language: "en-US")?.identifier ?? ""
}
set {
UserDefaults.standard.set(newValue, forKey: kSelectedEnglishVoice)
objectWillChange.send()
}
}
// We persist the selected voice PER language ideally, but for simplicity/user request
// we might just store one "Target Voice" preference.
// HOWEVER, a French voice ID won't work for Spanish.
// So we should probably key the storage by the language code?
// User plan didn't specify, but "Smart" behavior is better.
// Let's use a dynamic key: "selectedVoice_\(currentTargetLocaleID)"
var selectedTargetVoiceIdentifier: String {
get {
let key = "selectedVoice_\(currentTargetLocaleID)"
if let saved = UserDefaults.standard.string(forKey: key) {
// Verify this voice still exists and matches language (optional, but good)
return saved
}
// Default logic: Premium/Enhanced if available
return availableTargetVoices.first?.identifier ?? ""
}
set {
let key = "selectedVoice_\(currentTargetLocaleID)"
UserDefaults.standard.set(newValue, forKey: key)
objectWillChange.send()
}
}
func getSelectedEnglishVoice() -> AVSpeechSynthesisVoice? {
if let voice = AVSpeechSynthesisVoice(identifier: selectedEnglishVoiceIdentifier) {
return voice
}
return availableEnglishVoices.first ?? AVSpeechSynthesisVoice(language: "en-US")
}
func getSelectedTargetVoice() -> AVSpeechSynthesisVoice? {
if let voice = AVSpeechSynthesisVoice(identifier: selectedTargetVoiceIdentifier) {
return voice
}
// Fallback to any voice for this language
return availableTargetVoices.first ?? AVSpeechSynthesisVoice(language: currentTargetLocaleID)
}
// MARK: - Descriptions
func description(for voice: AVSpeechSynthesisVoice) -> String {
var traits: [String] = []
if voice.quality == .enhanced { traits.append("Enhanced") }
if voice.quality == .premium { traits.append("Premium") }
let qualitySuffix = traits.isEmpty ? "" : " (\(traits.joined(separator: ", ")))"
// Detailed User Descriptions
let knownDescriptions: [String: String] = [
"Alex": "Male - Top Tier. The smartest voice; sounds very natural.",
"Samantha": "Female - Standard. The classic 'original Siri' voice.",
"Ava": "Female - Premium. Professional, warm, and very human-like.",
"Allison": "Female - Premium. Lighter, breathy, and pleasant.",
"Tom": "Male - Premium. Friendly, standard American male.",
"Susan": "Female - Standard/Premium. Slightly formal and crisp.",
"Zoe": "Female - Premium. Bright, cheerful, and younger-sounding.",
"Evan": "Male - Enhanced. Deep, smooth, and modern.",
"Nathan": "Male - Enhanced. Lighter, younger-sounding male.",
"Noelle": "Female - Enhanced. Soft, sweet, and modern flow.",
"Joelle": "Female - Enhanced. Clear, articulate, modern.",
"Aaron": "Male - Neural. Standard American Male Siri voice.",
"Nicky": "Female - Neural. Standard American Female Siri voice.",
// Spanish
"Paulina": "Female (MX) - Standard/Premium. Gold standard; professional news anchor style.",
"Juan": "Male (MX) - Standard/Premium. Clear, neutral, polite.",
"Siri Female": "Female (MX) - Neural. Very smooth, natural assistant.",
"Siri Male": "Male (MX) - Neural. Professional modern assistant.",
"Monica": "Female (ES) - Clear Spanish (Spain).",
"Jorge": "Male (ES) - Clear Spanish (Spain)."
]
// Check exact name match first
if let specificDesc = knownDescriptions[voice.name] {
return "\(voice.name) - \(specificDesc)\(qualitySuffix)"
}
return "\(voice.name)\(qualitySuffix)"
}
}

View File

@@ -0,0 +1,660 @@
import SwiftUI
import Speech
import AVFoundation
import Combine
#if canImport(Translation)
import Translation
#endif
class SpeechRecognizer: ObservableObject {
static let shared = SpeechRecognizer()
@Published var transcript = ""
@Published var isRecording = false
@Published var error: String?
private var audioEngine = AVAudioEngine()
private var request: SFSpeechAudioBufferRecognitionRequest?
private var task: SFSpeechRecognitionTask?
private var recognizer: SFSpeechRecognizer?
private var currentLocale = Locale(identifier: "en-US")
// To handle appending new sessions to existing text
private var sessionStartTranscript = ""
private init() {
// Load saved transcript
if let saved = UserDefaults.standard.string(forKey: "voiceNoteTranscript") {
self.transcript = saved
}
// Initialize default recognizer
self.recognizer = SFSpeechRecognizer(locale: currentLocale)
requestPermissions()
}
private func requestPermissions() {
SFSpeechRecognizer.requestAuthorization { authStatus in
DispatchQueue.main.async {
switch authStatus {
case .authorized:
break
case .denied:
self.error = "Speech recognition authorization denied"
case .restricted:
self.error = "Speech recognition restricted on this device"
case .notDetermined:
self.error = "Speech recognition not yet authorized"
@unknown default:
self.error = "Unknown authorization status"
}
}
}
}
func setLanguage(locale: Locale) {
currentLocale = locale
// If we are already recording, we'd need to stop and restart,
// but for now we just update the recognizer for the next session
if recognizer?.locale != locale {
recognizer = SFSpeechRecognizer(locale: locale)
}
}
func startTranscribing(allowOnline: Bool = true, locale: Locale? = nil) {
// Determine locale
if let locale = locale {
self.currentLocale = locale
}
let localeToUse = self.currentLocale
// Safety net: Reload from UserDefaults if empty
if transcript.isEmpty {
if let saved = UserDefaults.standard.string(forKey: "voiceNoteTranscript") {
self.transcript = saved
}
}
guard !isRecording else { return }
// Update recognizer if needed
if recognizer?.locale != localeToUse {
recognizer = SFSpeechRecognizer(locale: localeToUse)
}
guard let recognizer = recognizer, recognizer.isAvailable else {
self.error = "Speech recognizer is not available for \(localeToUse.identifier)"
return
}
// Save current transcript as the starting point for this session
sessionStartTranscript = transcript
do {
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(.playAndRecord, mode: .measurement, options: [.duckOthers, .defaultToSpeaker, .allowBluetoothHFP])
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
request = SFSpeechAudioBufferRecognitionRequest()
guard let request = request else { return }
request.shouldReportPartialResults = true
request.requiresOnDeviceRecognition = !allowOnline
// Enable automatic punctuation (iOS 16+)
if #available(iOS 16, *) {
request.addsPunctuation = true
}
let inputNode = audioEngine.inputNode
task = recognizer.recognitionTask(with: request) { [weak self] result, error in
guard let self = self else { return }
if let error = error {
// Check if we should fallback (if we were trying online and it failed)
if allowOnline {
print("Online transcription failed: \(error.localizedDescription). Retrying offline.")
// We must stop the current engine/request before retrying
self.stopTranscribing()
// Retry with on-device only
// Add a small delay to ensure cleanup completes
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.startTranscribing(allowOnline: false, locale: localeToUse)
}
return
} else {
// We were already offline (or forced offline), just report error
DispatchQueue.main.async {
self.error = "Transcription error: \(error.localizedDescription)"
}
}
}
if let result = result {
DispatchQueue.main.async {
// Only update if we're still recording (prevents race condition with clear)
guard self.isRecording else { return }
// Append new text to the session start text
let newText = result.bestTranscription.formattedString
// Ignore empty results to prevent overwriting persistence
guard !newText.isEmpty else { return }
if self.sessionStartTranscript.isEmpty {
self.transcript = newText
} else {
self.transcript = self.sessionStartTranscript + " " + newText
}
// Save to UserDefaults
UserDefaults.standard.set(self.transcript, forKey: "voiceNoteTranscript")
}
}
if error != nil || (result?.isFinal ?? false) {
self.stopTranscribing()
}
}
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
request.append(buffer)
}
try audioEngine.start()
isRecording = true
error = nil
} catch {
self.error = "Error starting recording: \(error.localizedDescription)"
stopTranscribing()
}
}
func stopTranscribing() {
// Ensure we only stop if we are actually recording/have resources allocated
guard isRecording else { return }
audioEngine.stop()
audioEngine.inputNode.removeTap(onBus: 0)
request?.endAudio()
task?.cancel()
task = nil
request = nil
isRecording = false
}
func clear() {
// Stop recording first to prevent it from saving again
stopTranscribing()
// Then clear everything
transcript = ""
sessionStartTranscript = ""
UserDefaults.standard.removeObject(forKey: "voiceNoteTranscript")
// Restart transcribing after a brief delay to allow audio engine to fully stop
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
self.startTranscribing()
}
}
}
struct VoiceNoteView: View {
@EnvironmentObject var themeManager: ThemeManager
@ObservedObject var speechRecognizer = SpeechRecognizer.shared
var onFlipBack: () -> Void
@State private var fontSize: CGFloat = 40
@AppStorage("isSpanishMode") private var isSpanishMode = false
@State private var translatedText = ""
@State private var triggerTranslation = false
@State private var translationError: String?
@State private var manualTranslationConfig: TranslationSession.Configuration?
@State private var showTranslation = false
@State private var translationTaskID = UUID()
@State private var longPressTriggered = false
@State private var showingSettingsAlert = false
// New Dynamic Language State
@ObservedObject private var languageManager = LanguageManager.shared
@AppStorage("targetLanguageIdentifier") private var targetLanguageIdentifier: String = ""
@State private var showSettings = false
// Configuration for iOS 18+ Translation
#if canImport(Translation)
@State private var translationConfig: TranslationSession.Configuration?
#endif
var body: some View {
GeometryReader { geometry in
ZStack {
// Main Text Area
if isSpanishMode {
// Target Language Mode (Single View)
ScrollView {
VStack(alignment: .leading) {
if showTranslation {
// Show English Translation
if let error = translationError {
Text("Error: \(error)")
.foregroundColor(.red)
.font(.caption)
.padding(.bottom)
}
Text(translatedText.isEmpty ? "Translation will appear here..." : translatedText)
.font(.system(size: fontSize, weight: .bold)) // Use dynamic fontSize
.foregroundColor(translatedText.isEmpty ? .gray : themeManager.textColor)
.multilineTextAlignment(.leading)
} else {
// Show Target Transcript
Text(speechRecognizer.transcript.isEmpty ? languageManager.getStartSpeakingText(for: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : speechRecognizer.transcript)
.font(.system(size: fontSize, weight: .bold)) // Use dynamic fontSize
.foregroundColor(themeManager.textColor)
.multilineTextAlignment(.leading)
}
}
.padding(.horizontal)
.padding(.top, 80) // Clear top bar
.padding(.bottom, 120) // Clear bottom controls
.frame(maxWidth: .infinity, alignment: .leading)
.onChange(of: speechRecognizer.transcript) {
adjustFontSize(containerSize: geometry.size)
}
.onChange(of: translatedText) {
adjustFontSize(containerSize: geometry.size)
}
.onChange(of: showTranslation) {
adjustFontSize(containerSize: geometry.size)
}
.onChange(of: targetLanguageIdentifier) {
print("VoiceNoteView: Language changed to \(targetLanguageIdentifier)")
// Force state update if needed, though AppStorage should trigger redraw.
// Might need to update speechRecognizer language if in Spanish mode?
if isSpanishMode {
let locale = Locale(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier)
speechRecognizer.setLanguage(locale: locale)
speechRecognizer.clear()
}
}
}
.scrollDisabled(true)
.frame(maxWidth: .infinity, maxHeight: .infinity)
.background(themeManager.backgroundColor)
} else {
// Normal English Full Screen
ScrollView {
Text(speechRecognizer.transcript.isEmpty ? "Start speaking..." : speechRecognizer.transcript)
.font(.system(size: fontSize, weight: .bold))
.foregroundColor(themeManager.textColor)
.multilineTextAlignment(.leading)
.padding(.horizontal)
.padding(.top, 80) // Clear top bar
.padding(.bottom, 120) // Clear bottom controls
.frame(maxWidth: .infinity, alignment: .leading)
.textSelection(.enabled)
.onChange(of: speechRecognizer.transcript) {
adjustFontSize(containerSize: geometry.size)
}
}
.scrollDisabled(true)
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
// Top Bar with Clear All Button
VStack {
HStack {
// Language Toggle
Button(action: {
toggleLanguage()
}) {
HStack(spacing: 6) {
// Dynamic Flag and Name
if isSpanishMode {
if let lang = languageManager.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) {
Text(lang.flag)
.font(.title2)
Text(lang.name)
.fontWeight(.semibold)
.font(.system(size: 14))
} else {
// Fallback if not found or empty (e.g. legacy state)
Text("🏳️")
.font(.title2)
Text("Select Language")
.fontWeight(.semibold)
.font(.system(size: 14))
}
} else {
Text("🇺🇸")
.font(.title2)
Text("English")
.fontWeight(.semibold)
.font(.system(size: 14))
}
}
.padding(.horizontal, 12)
.padding(.vertical, 8)
.background(Color.black.opacity(0.1))
.cornerRadius(20)
}
.padding(.leading)
Spacer()
Button(action: {
// Ensure the recognizer stays in the correct mode
let localeID = isSpanishMode ? (targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : "en-US"
speechRecognizer.setLanguage(locale: Locale(identifier: localeID))
speechRecognizer.clear()
translatedText = ""
translationError = nil
showTranslation = false
// Reset translation config so it can be re-triggered
manualTranslationConfig?.invalidate()
manualTranslationConfig = nil
}) {
Text("Clear all")
.foregroundColor(.red)
.padding()
}
}
Spacer()
}
// Bottom Area (Status & Flip Button)
VStack {
Spacer()
ZStack {
// Left & Right Controls
HStack {
// Left: Status
VStack(alignment: .leading, spacing: 4) {
if speechRecognizer.isRecording {
Text("Listening...")
.font(.caption)
.foregroundColor(.red)
}
if let error = speechRecognizer.error {
Text(error)
.font(.caption)
.foregroundColor(.red)
}
}
.padding(.leading, 20)
Spacer()
// Right: Flip Button
Button(action: {
onFlipBack()
}) {
Text("Flip")
.font(.system(size: 16, weight: .medium))
.foregroundColor(.white)
.padding(.horizontal, 20)
.padding(.vertical, 12)
.background(Color.blue)
.cornerRadius(25)
}
.padding(.trailing, 20)
}
// Simple Translate Button
if isSpanishMode {
Button(action: {
if targetLanguageIdentifier.isEmpty {
// No language selected. Prompt user to open settings.
showingSettingsAlert = true
} else {
if showTranslation {
showTranslation = false
} else {
performTranslation()
}
}
}) {
Group {
if targetLanguageIdentifier.isEmpty {
Text("Translate")
} else {
if let lang = languageManager.supportedLanguages.first(where: { $0.id == targetLanguageIdentifier }) {
if showTranslation {
Text("Original \(lang.flag)")
} else {
Text("Translate to 🇺🇸")
}
} else {
Text("Translate")
}
}
}
.fontWeight(.semibold)
.padding(.horizontal, 20)
.padding(.vertical, 12)
.background(Color.blue)
.foregroundColor(.white)
.cornerRadius(25)
.shadow(radius: 3)
}
.alert("Select Language", isPresented: $showingSettingsAlert) {
Button("Open Settings") {
showSettings = true
}
Button("Cancel", role: .cancel) { }
} message: {
Text("Please select a translation language in Settings.")
}
}
}
.padding(.bottom, 20)
.background(
LinearGradient(
gradient: Gradient(colors: [themeManager.backgroundColor.opacity(0), themeManager.backgroundColor]),
startPoint: .top,
endPoint: .bottom
)
.frame(height: 100)
)
}
}
.background(themeManager.backgroundColor)
.onAppear {
if #available(iOS 18.0, *) {
Task {
await languageManager.checkAvailability()
}
}
// Recalculate font size immediately in case there is existing text
adjustFontSize(containerSize: geometry.size)
}
.onChange(of: targetLanguageIdentifier) {
if isSpanishMode {
let locale = Locale(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier)
speechRecognizer.setLanguage(locale: locale)
speechRecognizer.clear()
}
}
.sheet(isPresented: $showSettings, onDismiss: {
if isSpanishMode {
let locale = Locale(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier)
speechRecognizer.setLanguage(locale: locale)
speechRecognizer.clear()
}
}) {
SettingsView()
.environmentObject(themeManager)
}
.onDisappear {
// speechRecognizer.stopTranscribing() - Removed to keep recording in background
}
// Batch Translation Task
#if canImport(Translation)
.translationTask(manualTranslationConfig) { session in
do {
// Perform single batch translation
let response = try await session.translate(speechRecognizer.transcript)
translatedText = response.targetText
translationError = nil
// showTranslation is already true
} catch {
// Suppress specific "empty" errors or generic noise
let errorMsg = error.localizedDescription
if !errorMsg.localizedCaseInsensitiveContains("empty") {
print("Translation error: \(error)")
translationError = errorMsg
} else {
translationError = nil
// If silently failed, revert view so user can try again or see transcript
showTranslation = false
}
}
}
#else
// Fallback or empty logic for older iOS versions handled by UI checks
#endif
.id(translationTaskID) // Force recreation of the task on every request
.gesture(
DragGesture()
.onEnded { value in
if value.translation.width > 50 {
// Swipe Right -> Flip Back
onFlipBack()
} else if value.translation.width < -50 {
// Swipe Left -> Clear & Reset
if showTranslation {
showTranslation = false
}
speechRecognizer.clear()
}
}
)
}
}
// Toggle Logic
private func toggleLanguage() {
isSpanishMode.toggle()
showTranslation = false
// Stop current recording
speechRecognizer.stopTranscribing()
// Update the locale in SpeechRecognizer immediately
// Update the locale in SpeechRecognizer immediately
let localeID = isSpanishMode ? (targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : "en-US"
speechRecognizer.setLanguage(locale: Locale(identifier: localeID))
// Clear old text and restart (clear() automatically restarts transcription using the current locale)
speechRecognizer.clear()
// Configure translation session if needed
if #available(iOS 18.0, *), isSpanishMode {
#if canImport(Translation)
// Invalidating and recreating config triggers the task
manualTranslationConfig = TranslationSession.Configuration(
source: Locale.Language(identifier: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier),
target: Locale.Language(identifier: "en-US")
)
#endif
} else {
#if canImport(Translation)
manualTranslationConfig?.invalidate()
manualTranslationConfig = nil
#endif
}
}
// Trigger translation
private func performTranslation() {
guard !speechRecognizer.transcript.isEmpty else { return }
translatedText = "Translating..."
translationError = nil
showTranslation = true // Show loading state immediately
if #available(iOS 18.0, *) {
#if canImport(Translation)
// Force a reset of the configuration
if manualTranslationConfig != nil {
manualTranslationConfig?.invalidate()
manualTranslationConfig = nil
}
let langId = targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier
// All languages use .translationTask modifier
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
translationTaskID = UUID()
manualTranslationConfig = TranslationSession.Configuration(
source: Locale.Language(identifier: langId),
target: Locale.Language(identifier: "en-US")
)
}
#endif
} else {
translationError = "Translation requires iOS 18"
}
}
private func startWithCurrentLanguage() {
let localeID = isSpanishMode ? (targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : "en-US"
speechRecognizer.startTranscribing(locale: Locale(identifier: localeID))
}
private func adjustFontSize(containerSize: CGSize) {
let maxFontSize: CGFloat = 40
let minFontSize: CGFloat = 12
// Use the displayed text for sizing
let text: String
if isSpanishMode {
if showTranslation {
text = translatedText.isEmpty ? "Translation will appear here..." : translatedText
} else {
text = speechRecognizer.transcript.isEmpty ? languageManager.getStartSpeakingText(for: targetLanguageIdentifier.isEmpty ? "es-MX" : targetLanguageIdentifier) : speechRecognizer.transcript
}
} else {
text = speechRecognizer.transcript.isEmpty ? "Start speaking..." : speechRecognizer.transcript
}
// Account for padding
let horizontalPadding: CGFloat = 32 // 16 * 2
let width = containerSize.width - horizontalPadding
let height = containerSize.height - 180 // Increased buffer for bottom controls
var bestSize = minFontSize
for size in stride(from: maxFontSize, through: minFontSize, by: -2) {
let font = UIFont.systemFont(ofSize: size, weight: .bold)
let attributes = [NSAttributedString.Key.font: font]
let boundingRect = NSString(string: text).boundingRect(
with: CGSize(width: width, height: .greatestFiniteMagnitude),
options: [.usesLineFragmentOrigin, .usesFontLeading],
attributes: attributes,
context: nil
)
if boundingRect.height <= height {
bestSize = size
break
}
}
if fontSize != bestSize {
withAnimation {
fontSize = bestSize
}
}
}
}

BIN
FlipTalk/appstore.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

BIN
FlipTalk/easteregg.mp4 Normal file

Binary file not shown.

BIN
FlipTalk/playstore.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 244 KiB

23
README.md Normal file
View File

@@ -0,0 +1,23 @@
#FlipTalk: The Bridge for Instant In-Person Communication
Effortless 1:1 conversations for the Deaf and Hard of Hearing. Type your message, flip the screen, and instantly see spoken words transcribed. Private, multilingual, and always ready.
## Full Description
**Bridging the Conversation Gap**
FlipTalk is designed for immediate, barrier-free communication between Deaf and hearing individuals. Whether you are ordering coffee or having a deep conversation, FlipTalk empowers you to communicate in over 20 languages without missing a beat.
**Key Features**
* **⚡️ Instant Transcription:** No "Start" button needed. Transcription is active the moment you open the app or flip the screen, ensuring you never miss the first word.
* **🔒 Privacy First:** Your security matters. All processing happens 100% on-device. Your conversations are never stored on a server, guaranteeing complete privacy.
* **🔄 The "Flip" Experience:** Seamlessly switch between your typed message and the hearing person's transcribed speech. Use the Split-View for a unified real-time conversation log.
* **🗣️ Continuous Speaking Mode:** Keep the conversation natural. Enable "Speak for me" to automatically vocalize your sentences as you finish typing them (triggered by punctuation), eliminating the need for constant tapping.
* **🌍 Multilingual Support:** Fluent in 20+ languages including English, Spanish, French, German, and Chinese. Ideal for travel and multilingual households.
* **🧠 Smart History & Favorites:** Swipe left to review past chats or swipe right to access your custom Quick Phrases (e.g., "Coffee please," "Emergency").
* **👁️ Designed for Readability:** Built with high-contrast optics, Dark Mode, and auto-sizing text that dynamically fills the screen for maximum clarity.
## How It Works
1. **Type:** You type your message.
2. **Flip:** Swipe right or tap to flip the screen towards the hearing person.
3. **Read:** They speak, and their words appear instantly on your screen.

BIN
Screenshots/iPad/1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 465 KiB

BIN
Screenshots/iPad/2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 503 KiB

BIN
Screenshots/iPad/3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 198 KiB

BIN
Screenshots/iPad/4.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 222 KiB

BIN
Screenshots/iPad/5.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 276 KiB

BIN
Screenshots/iPad/6.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

BIN
Screenshots/iPhone/1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 418 KiB

BIN
Screenshots/iPhone/2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 552 KiB

BIN
Screenshots/iPhone/3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 283 KiB

BIN
Screenshots/iPhone/4.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 255 KiB

BIN
Screenshots/iPhone/5.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 212 KiB

BIN
Screenshots/iPhone/6.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 234 KiB

BIN
Screenshots/iPhone/7.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 261 KiB

27
check_build.sh Executable file
View File

@@ -0,0 +1,27 @@
#!/bin/zsh
set -o pipefail # Fail if xcodebuild fails, even with xcbeautify
# --- Configuration ---
SCHEME="FlipTalk"
DEVICE_NAME="iPhone 17 Pro"
BUILD_PATH="./build"
echo "🔍 Checking compilation for $SCHEME..."
# Build Only (No Install/Launch)
# We use 'env -u' to hide Homebrew variables
# We use '-derivedDataPath' to keep it isolated
env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \
-scheme "$SCHEME" \
-destination "platform=iOS Simulator,name=$DEVICE_NAME" \
-configuration Debug \
-derivedDataPath "$BUILD_PATH" \
build | xcbeautify
# Check exit code of the pipeline
if [ $? -eq 0 ]; then
echo "✅ Build Succeeded. No errors found."
else
echo "❌ Build Failed."
exit 1
fi

BIN
fliptalk_orig.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 943 KiB

47
run_ios_simulator.sh Executable file
View File

@@ -0,0 +1,47 @@
#!/bin/zsh
set -e # Exit immediately if any command fails
# --- Configuration ---
SCHEME="FlipTalk"
BUNDLE_ID="com.jaredlog.Flip-Talk"
DEVICE_NAME="iPhone 17 Pro"
BUILD_PATH="./build" # This ensures Predictable Paths
echo "🚀 Starting Build for $DEVICE_NAME..."
# 1. Boot the simulator if it isn't already running
# We use 'grep' to check status so we don't try to boot an active device
if ! xcrun simctl list devices | grep "$DEVICE_NAME" | grep -q "(Booted)"; then
echo "⚙️ Booting Simulator..."
xcrun simctl boot "$DEVICE_NAME"
fi
open -a Simulator
# 2. Build the App
# We use 'env -u' to hide your Homebrew variables (CC, CXX) from Xcode
# We use '-derivedDataPath' to force the build into the local ./build folder
echo "🔨 Compiling..."
env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \
-scheme "$SCHEME" \
-destination "platform=iOS Simulator,name=$DEVICE_NAME" \
-configuration Debug \
-derivedDataPath "$BUILD_PATH" \
clean build | xcbeautify
# 3. Locate the .app bundle
# Since we used -derivedDataPath, we know EXACTLY where this is.
APP_PATH="$BUILD_PATH/Build/Products/Debug-iphonesimulator/$SCHEME.app"
if [ ! -d "$APP_PATH" ]; then
echo "❌ Error: App bundle not found at $APP_PATH"
exit 1
fi
# 4. Install and Launch
echo "📲 Installing..."
xcrun simctl install "$DEVICE_NAME" "$APP_PATH"
echo "▶️ Launching $BUNDLE_ID..."
xcrun simctl launch "$DEVICE_NAME" "$BUNDLE_ID"
echo "✅ Done!"