Initial commit

Add BeamScribe iOS app for real-time transcription with multipeer connectivity.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-19 22:03:21 -05:00
commit ce40831933
62 changed files with 7871 additions and 0 deletions

80
.gitignore vendored Normal file
View File

@@ -0,0 +1,80 @@
# Build
build/
DerivedData/
*.ipa
*.dSYM.zip
*.dSYM
# CocoaPods
Pods/
*.xcworkspace
!*.xcodeproj/project.xcworkspace
# Carthage
Carthage/Build/
Carthage/Checkouts/
# Swift Package Manager
.build/
.swiftpm/
Package.resolved
# Xcode user-specific files
*.xcuserstate
*.xcuserdatad/
xcuserdata/
*.xccheckout
*.moved-aside
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
# macOS
.DS_Store
.AppleDouble
.LSOverride
._*
# Node.js
node_modules/
dist/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# IDE
.idea/
*.swp
*.swo
*~
# Fastlane
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots/**/*.png
fastlane/test_output
# Code coverage
*.gcno
*.gcda
*.gcov
# Playgrounds
timeline.xctimeline
playground.xcworkspace
# Archives
*.xcarchive
# Secrets (never commit these)
*.pem
*.p12
*.mobileprovision
Secrets/
.env
.env.*

View File

@@ -0,0 +1,372 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXFileReference section */
7F7AC8092EE887A80068BD36 /* BeamScribe.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = BeamScribe.app; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */
7F7AC8432EE8894A0068BD36 /* Exceptions for "BeamScribe" folder in "BeamScribe" target */ = {
isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
membershipExceptions = (
Info.plist,
);
target = 7F7AC8082EE887A80068BD36 /* BeamScribe */;
};
/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
7F7AC80B2EE887A80068BD36 /* BeamScribe */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
7F7AC8432EE8894A0068BD36 /* Exceptions for "BeamScribe" folder in "BeamScribe" target */,
);
path = BeamScribe;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
7F7AC8062EE887A80068BD36 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7F7AC8002EE887A80068BD36 = {
isa = PBXGroup;
children = (
7F7AC80B2EE887A80068BD36 /* BeamScribe */,
7F7AC80A2EE887A80068BD36 /* Products */,
);
sourceTree = "<group>";
};
7F7AC80A2EE887A80068BD36 /* Products */ = {
isa = PBXGroup;
children = (
7F7AC8092EE887A80068BD36 /* BeamScribe.app */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7F7AC8082EE887A80068BD36 /* BeamScribe */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7F7AC8142EE887A90068BD36 /* Build configuration list for PBXNativeTarget "BeamScribe" */;
buildPhases = (
7F7AC8052EE887A80068BD36 /* Sources */,
7F7AC8062EE887A80068BD36 /* Frameworks */,
7F7AC8072EE887A80068BD36 /* Resources */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
7F7AC80B2EE887A80068BD36 /* BeamScribe */,
);
name = BeamScribe;
packageProductDependencies = (
);
productName = BeamScribe;
productReference = 7F7AC8092EE887A80068BD36 /* BeamScribe.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7F7AC8012EE887A80068BD36 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 2610;
LastUpgradeCheck = 2620;
TargetAttributes = {
7F7AC8082EE887A80068BD36 = {
CreatedOnToolsVersion = 26.1.1;
};
};
};
buildConfigurationList = 7F7AC8042EE887A80068BD36 /* Build configuration list for PBXProject "BeamScribe" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7F7AC8002EE887A80068BD36;
minimizedProjectReferenceProxies = 1;
preferredProjectObjectVersion = 77;
productRefGroup = 7F7AC80A2EE887A80068BD36 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7F7AC8082EE887A80068BD36 /* BeamScribe */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7F7AC8072EE887A80068BD36 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7F7AC8052EE887A80068BD36 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
7F7AC8122EE887A90068BD36 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 26.1;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
7F7AC8132EE887A90068BD36 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 26.1;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
7F7AC8152EE887A90068BD36 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 2;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = BeamScribe/Info.plist;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSLocalNetworkUsageDescription = "BeamScribe uses the local network to share transcripts with nearby devices.";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "BeamScribe needs microphone access to transcribe speech.";
INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "BeamScribe uses speech recognition to convert voice to text.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.4;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.BeamScribe;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
7F7AC8162EE887A90068BD36 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 2;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = BeamScribe/Info.plist;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSLocalNetworkUsageDescription = "BeamScribe uses the local network to share transcripts with nearby devices.";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "BeamScribe needs microphone access to transcribe speech.";
INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "BeamScribe uses speech recognition to convert voice to text.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.4;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.BeamScribe;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = YES;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7F7AC8042EE887A80068BD36 /* Build configuration list for PBXProject "BeamScribe" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F7AC8122EE887A90068BD36 /* Debug */,
7F7AC8132EE887A90068BD36 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7F7AC8142EE887A90068BD36 /* Build configuration list for PBXNativeTarget "BeamScribe" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F7AC8152EE887A90068BD36 /* Debug */,
7F7AC8162EE887A90068BD36 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7F7AC8012EE887A80068BD36 /* Project object */;
}

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "2620"
version = "1.7">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES"
buildArchitectures = "Automatic">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7F7AC8082EE887A80068BD36"
BuildableName = "BeamScribe.app"
BlueprintName = "BeamScribe"
ReferencedContainer = "container:BeamScribe.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES"
shouldAutocreateTestPlan = "YES">
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7F7AC8082EE887A80068BD36"
BuildableName = "BeamScribe.app"
BlueprintName = "BeamScribe"
ReferencedContainer = "container:BeamScribe.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<StoreKitConfigurationFileReference
identifier = "../../BeamScribe/BeamScribe.storekit">
</StoreKitConfigurationFileReference>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "7F7AC8082EE887A80068BD36"
BuildableName = "BeamScribe.app"
BlueprintName = "BeamScribe"
ReferencedContainer = "container:BeamScribe.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -0,0 +1 @@
{"images":[{"size":"60x60","expected-size":"180","filename":"180.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"40x40","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"60x60","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"57x57","expected-size":"57","filename":"57.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"87","filename":"87.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"57x57","expected-size":"114","filename":"114.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"60","filename":"60.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"1024x1024","filename":"1024.png","expected-size":"1024","idiom":"ios-marketing","folder":"Assets.xcassets/AppIcon.appiconset/","scale":"1x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"72x72","expected-size":"72","filename":"72.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"76x76","expected-size":"152","filename":"152.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"50x50","expected-size":"100","filename":"100.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"76x76","expected-size":"76","filename":"76.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"50x50","expected-size":"50","filename":"50.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"72x72","expected-size":"144","filename":"144.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"40x40","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"83.5x83.5","expected-size":"167","filename":"167.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"20x20","expected-size":"20","filename":"20.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"}]}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,56 @@
{
"identifier" : "6FAB981B",
"nonRenewingSubscriptions" : [
],
"products" : [
],
"settings" : {
},
"subscriptionGroups" : [
{
"id" : "21430985",
"localizations" : [
{
"description" : "Unlock unlimited transcription time",
"displayName" : "Unlimited Time",
"locale" : "en_US"
}
],
"name" : "unlock_unlimited_time",
"subscriptions" : [
{
"adHocOffers" : [
],
"codeOffers" : [
],
"displayPrice" : "1.99",
"familyShareable" : false,
"groupNumber" : 1,
"internalID" : "6756460743",
"introductoryOffer" : null,
"localizations" : [
{
"description" : "Unlimited transcription time",
"displayName" : "Unlock Unlimited Time",
"locale" : "en_US"
}
],
"productID" : "unlock_unlimited_time",
"recurringSubscriptionPeriod" : "P1Y",
"referenceName" : "Unlock Unlimited Time",
"subscriptionGroupID" : "21430985",
"type" : "RecurringSubscription"
}
]
}
],
"version" : {
"major" : 3,
"minor" : 0
}
}

View File

@@ -0,0 +1,56 @@
//
// BeamScribeApp.swift
// BeamScribe
//
// Created by Jared on 12/9/25.
//
import SwiftUI
import SwiftData
@main
struct BeamScribeApp: App {
@StateObject private var sessionState = SessionState()
@StateObject private var transcriptionManager = TranscriptionManager()
@StateObject private var multipeerManager = MultipeerManager()
@StateObject private var fileManager = FileStorageManager()
@StateObject private var subscriptionManager = SubscriptionManager()
@Environment(\.scenePhase) private var scenePhase
var body: some Scene {
WindowGroup {
ContentView()
.environmentObject(sessionState)
.environmentObject(transcriptionManager)
.environmentObject(multipeerManager)
.environmentObject(fileManager)
.environmentObject(subscriptionManager)
// Enforce light mode for now to ensure consistency
.preferredColorScheme(.light)
.onChange(of: scenePhase) { oldPhase, newPhase in
handleScenePhaseChange(from: oldPhase, to: newPhase)
}
}
}
private func handleScenePhaseChange(from oldPhase: ScenePhase, to newPhase: ScenePhase) {
switch newPhase {
case .active:
// App became active
// Keep awake setting is handled in SettingsView
break
case .inactive:
// App becoming inactive (transitioning)
break
case .background:
// App went to background
// Audio session interruption handling is done in TranscriptionManager
break
@unknown default:
break
}
}
}

View File

@@ -0,0 +1,231 @@
//
// ContentView.swift
// BeamScribe
//
// Main navigation container that switches between app phases.
//
import SwiftUI
import Combine
import MultipeerConnectivity
import AVFoundation
struct ContentView: View {
@EnvironmentObject var sessionState: SessionState
@StateObject private var transcriptionManager = TranscriptionManager()
@StateObject private var multipeerManager = MultipeerManager()
@StateObject private var fileManager = FileStorageManager()
@StateObject private var audioManager = AudioSessionManager()
@StateObject private var settings = SettingsModel()
var body: some View {
Group {
switch sessionState.appPhase {
case .roleSelection:
RoleSelectionView()
.transition(.move(edge: .leading))
case .hostSetup:
HostSetupView(
transcriptionManager: transcriptionManager,
multipeerManager: multipeerManager,
fileManager: fileManager
)
.transition(.move(edge: .trailing))
case .soloSetup:
SoloSetupView(
transcriptionManager: transcriptionManager,
fileManager: fileManager
)
.transition(.move(edge: .trailing))
case .guestBrowsing:
GuestBrowserView(
multipeerManager: multipeerManager,
fileManager: fileManager
)
.transition(.move(edge: .trailing))
case .activeSession:
TranscriptView(
transcriptionManager: transcriptionManager,
multipeerManager: multipeerManager,
fileManager: fileManager,
audioManager: audioManager
)
.transition(.opacity)
case .transcriptionHistory:
TranscriptionHistoryView(fileManager: fileManager)
.transition(.move(edge: .trailing))
}
}
.animation(.easeInOut(duration: 0.3), value: sessionState.appPhase)
.environmentObject(settings)
.onAppear {
setupCallbacks()
audioManager.startBatteryMonitoring()
}
}
// MARK: - Setup
private func setupCallbacks() {
// Handle transcription results (Host)
transcriptionManager.onPartialResult = { [weak sessionState] text in
guard let sessionState = sessionState else { return }
// Broadcast partial result to guests (skip if solo mode)
if !sessionState.isSoloMode {
multipeerManager.sendLiveChunk(
text: text,
eventName: sessionState.eventName,
isFinal: false
)
}
}
transcriptionManager.onFinalResult = { [weak sessionState] text in
guard let sessionState = sessionState else { return }
// Add to local state
Task { @MainActor in
sessionState.addSegment(TranscriptSegment(text: text, isFinal: true))
}
// Save to file
fileManager.appendText(text)
// Broadcast to guests (skip if solo mode)
if !sessionState.isSoloMode {
multipeerManager.sendLiveChunk(
text: text,
eventName: sessionState.eventName,
isFinal: true
)
}
}
transcriptionManager.onSessionResumed = { [weak sessionState] in
guard let sessionState = sessionState else { return }
Task { @MainActor in
sessionState.insertResumedMarker()
}
// Broadcast resume marker to guests (skip if solo mode)
if !sessionState.isSoloMode {
multipeerManager.sendAlert(
type: .sessionResumed,
eventName: sessionState.eventName
)
}
// Also append to file
let formatter = DateFormatter()
formatter.timeStyle = .short
let marker = "[Session Resumed at \(formatter.string(from: Date()))]"
fileManager.appendMarker(marker)
}
// Handle received packets (Guest)
multipeerManager.onPacketReceived = { [weak sessionState, weak transcriptionManager] packet in
guard let sessionState = sessionState, let transcriptionManager = transcriptionManager else { return }
Task { @MainActor in
switch packet.type {
case .fullHistory:
if let text = packet.text {
sessionState.loadFullHistory(text)
// Sync start time from host (using relative duration to avoid clock skew)
if let duration = packet.currentSessionDuration {
let calculatedStartTime = Date().addingTimeInterval(-duration)
sessionState.startTime = calculatedStartTime
}
// Cancel pending appends to avoid race conditions? No, just overwrite.
// Overwrite file to avoid duplication if full history is received multiple times
fileManager.overwriteCurrentFile(with: text, eventName: sessionState.eventName)
}
case .liveChunk:
if let text = packet.text {
let isFinal = packet.isFinal ?? true
if isFinal {
// Use updateLastPartialSegment even for final to ensure we replace any existing partial
// This prevents "Ghost" partials from staying on screen alongside the final result
sessionState.updateLastPartialSegment(text, isFinal: true)
fileManager.appendText(text)
} else {
sessionState.updateLastPartialSegment(text, isFinal: false)
}
}
case .alert:
switch packet.alertType {
case .hostDisconnected:
sessionState.showHostLostBanner = true
transcriptionManager.sessionEndTime = Date() // Freeze timer
case .hostBatteryLow:
sessionState.showBatteryWarning = true
case .sessionResumed:
sessionState.insertResumedMarker()
case .none:
break
}
}
}
}
// Handle host lost (Guest)
multipeerManager.onHostLost = { [weak sessionState, weak transcriptionManager] in
guard let sessionState = sessionState, let transcriptionManager = transcriptionManager else { return }
Task { @MainActor in
sessionState.showHostLostBanner = true
sessionState.isConnectedToHost = false
// Freeze the timer
transcriptionManager.sessionEndTime = Date()
}
}
// Handle peer count changes (UI update only)
multipeerManager.onPeerCountChanged = { [weak sessionState] count in
guard let sessionState = sessionState else { return }
Task { @MainActor in
sessionState.connectedPeerCount = count
}
}
// Handle specific peer join (Send History)
multipeerManager.onPeerJoined = { [weak sessionState] peerID in
guard let sessionState = sessionState else { return }
Task { @MainActor in
// If host, send full history to the new peer
if sessionState.userRole == .host {
print("New peer joined: \(peerID.displayName). Sending full history.")
multipeerManager.sendFullHistory(
to: peerID,
text: sessionState.fullTranscriptText,
eventName: sessionState.eventName,
startTime: sessionState.startTime
)
}
}
}
}
}
#Preview {
ContentView()
.environmentObject(SessionState())
}

17
BeamScribe/Info.plist Normal file
View File

@@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>com.jaredlog.BeamScribe</string>
<key>NSBonjourServices</key>
<array>
<string>_beamscribe._tcp</string>
<string>_beamscribe._udp</string>
</array>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
</dict>
</plist>

View File

@@ -0,0 +1,141 @@
//
// AudioSessionManager.swift
// BeamScribe
//
// Manages audio session and Bluetooth microphone detection.
//
import Foundation
import AVFoundation
import Combine
import UIKit
@MainActor
class AudioSessionManager: ObservableObject {
// MARK: - Published Properties
@Published var bluetoothMicAvailable: Bool = false
@Published var currentInputName: String = "Built-in Microphone"
@Published var showBluetoothPrompt: Bool = false
// MARK: - Private Properties
private let audioSession = AVAudioSession.sharedInstance()
// MARK: - Initialization
init() {
setupRouteChangeNotification()
checkCurrentInput()
}
// MARK: - Setup
private func setupRouteChangeNotification() {
NotificationCenter.default.addObserver(
forName: AVAudioSession.routeChangeNotification,
object: nil,
queue: .main
) { [weak self] notification in
// Extract Sendable values before async to satisfy Swift 6 requirements
let reasonValue = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt
// Bind weak self to local constant for Swift 6 concurrency
guard let weakSelf = self else { return }
Task { @MainActor in
weakSelf.handleRouteChange(reasonValue: reasonValue)
}
}
}
// MARK: - Input Detection
func checkCurrentInput() {
let currentRoute = audioSession.currentRoute
if let currentInput = currentRoute.inputs.first {
currentInputName = currentInput.portName
}
// Check if Bluetooth mic is available but not active
checkForAvailableBluetoothMic()
}
private func checkForAvailableBluetoothMic() {
guard let availableInputs = audioSession.availableInputs else { return }
let bluetoothInputs = availableInputs.filter { port in
port.portType == .bluetoothHFP || port.portType == .bluetoothA2DP
}
let currentRoute = audioSession.currentRoute
let isBluetoothActive = currentRoute.inputs.contains { port in
port.portType == .bluetoothHFP || port.portType == .bluetoothA2DP
}
if !bluetoothInputs.isEmpty && !isBluetoothActive {
bluetoothMicAvailable = true
showBluetoothPrompt = true
} else {
bluetoothMicAvailable = false
showBluetoothPrompt = false
}
}
// MARK: - Input Switching
func switchToBluetoothMic() {
guard let availableInputs = audioSession.availableInputs else { return }
if let bluetoothInput = availableInputs.first(where: { $0.portType == .bluetoothHFP || $0.portType == .bluetoothA2DP }) {
do {
try audioSession.setPreferredInput(bluetoothInput)
currentInputName = bluetoothInput.portName
showBluetoothPrompt = false
bluetoothMicAvailable = false
} catch {
print("Failed to switch to Bluetooth: \(error)")
}
}
}
func dismissBluetoothPrompt() {
showBluetoothPrompt = false
}
// MARK: - Route Change Handling
private func handleRouteChange(reasonValue: UInt?) {
guard let reasonValue = reasonValue,
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else {
return
}
switch reason {
case .newDeviceAvailable:
checkForAvailableBluetoothMic()
case .oldDeviceUnavailable:
checkCurrentInput()
default:
checkCurrentInput()
}
}
// MARK: - Battery Monitoring
func startBatteryMonitoring() {
UIDevice.current.isBatteryMonitoringEnabled = true
}
var batteryLevel: Float {
UIDevice.current.batteryLevel
}
var isBatteryLow: Bool {
UIDevice.current.batteryLevel < 0.10 && UIDevice.current.batteryLevel >= 0
}
deinit {
NotificationCenter.default.removeObserver(self)
}
}

View File

@@ -0,0 +1,298 @@
//
// FileStorageManager.swift
// BeamScribe
//
// Handles file persistence and PDF export.
//
import Foundation
import PDFKit
import UIKit
import Combine
@MainActor
class FileStorageManager: ObservableObject {
// MARK: - Published Properties
@Published var currentFileName: String?
// MARK: - Private Properties
private var currentFileURL: URL?
private let fileManager = FileManager.default
// MARK: - File Creation
/// Creates a new transcript file with the event name and date
func createTranscriptFile(eventName: String) throws -> URL {
let documentsDirectory = try fileManager.url(
for: .documentDirectory,
in: .userDomainMask,
appropriateFor: nil,
create: true
)
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "MMM d yyyy"
let dateString = dateFormatter.string(from: Date())
let fileName = "\(eventName) - \(dateString).txt"
let fileURL = documentsDirectory.appendingPathComponent(fileName)
// Create empty file if it doesn't exist
if !fileManager.fileExists(atPath: fileURL.path) {
let header = "Transcript: \(eventName)\nDate: \(dateString)\n\n"
try header.write(to: fileURL, atomically: true, encoding: .utf8)
}
currentFileURL = fileURL
currentFileName = fileName
return fileURL
}
// MARK: - Overwriting
/// Overwrites the current file with new text (used for full history sync)
func overwriteCurrentFile(with text: String, eventName: String) {
guard let fileURL = currentFileURL else { return }
do {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "MMM d yyyy"
let dateString = dateFormatter.string(from: Date())
let header = "Transcript: \(eventName)\nDate: \(dateString)\n\n"
let content = header + text
try content.write(to: fileURL, atomically: true, encoding: .utf8)
} catch {
print("Failed to overwrite file: \(error)")
}
}
// MARK: - Appending Text
/// Appends text to the current transcript file
func appendText(_ text: String) {
guard let fileURL = currentFileURL else { return }
do {
let fileHandle = try FileHandle(forWritingTo: fileURL)
fileHandle.seekToEndOfFile()
if let data = (text + " ").data(using: .utf8) {
fileHandle.write(data)
}
try fileHandle.close()
} catch {
// Silently fail - file operations are best-effort
}
}
/// Appends a marker (like session resumed) to the file
func appendMarker(_ marker: String) {
guard let fileURL = currentFileURL else { return }
do {
let fileHandle = try FileHandle(forWritingTo: fileURL)
fileHandle.seekToEndOfFile()
if let data = "\n\(marker)\n".data(using: .utf8) {
fileHandle.write(data)
}
try fileHandle.close()
} catch {
print("Failed to append marker: \(error)")
}
}
// MARK: - Reading
/// Reads the full content of the current transcript file
func readCurrentTranscript() -> String? {
guard let fileURL = currentFileURL else { return nil }
do {
return try String(contentsOf: fileURL, encoding: .utf8)
} catch {
print("Failed to read transcript: \(error)")
return nil
}
}
// MARK: - PDF Export
/// Exports the current transcript as a PDF
func exportToPDF(eventName: String, content: String) throws -> URL {
let documentsDirectory = try fileManager.url(
for: .documentDirectory,
in: .userDomainMask,
appropriateFor: nil,
create: true
)
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "MMM d, yyyy"
let dateString = dateFormatter.string(from: Date())
let pdfFileName = "Transcript - \(eventName) - \(dateString).pdf"
let pdfURL = documentsDirectory.appendingPathComponent(pdfFileName)
// Create PDF
let pdfMetaData = [
kCGPDFContextCreator: "BeamScribe",
kCGPDFContextTitle: "Transcript - \(eventName)"
]
let format = UIGraphicsPDFRendererFormat()
format.documentInfo = pdfMetaData as [String: Any]
let pageWidth: CGFloat = 612 // US Letter
let pageHeight: CGFloat = 792
let pageRect = CGRect(x: 0, y: 0, width: pageWidth, height: pageHeight)
let margin: CGFloat = 50
let renderer = UIGraphicsPDFRenderer(bounds: pageRect, format: format)
let data = renderer.pdfData { context in
context.beginPage()
// Title
let titleFont = UIFont.boldSystemFont(ofSize: 18)
let titleAttributes: [NSAttributedString.Key: Any] = [
.font: titleFont,
.foregroundColor: UIColor.black
]
let title = "Transcript: \(eventName)"
let subtitle = "Date: \(dateString)"
title.draw(at: CGPoint(x: margin, y: margin), withAttributes: titleAttributes)
let subtitleFont = UIFont.systemFont(ofSize: 12)
let subtitleAttributes: [NSAttributedString.Key: Any] = [
.font: subtitleFont,
.foregroundColor: UIColor.gray
]
subtitle.draw(at: CGPoint(x: margin, y: margin + 25), withAttributes: subtitleAttributes)
// Content
let contentFont = UIFont.systemFont(ofSize: 12)
let paragraphStyle = NSMutableParagraphStyle()
paragraphStyle.lineSpacing = 4
let contentAttributes: [NSAttributedString.Key: Any] = [
.font: contentFont,
.foregroundColor: UIColor.black,
.paragraphStyle: paragraphStyle
]
let textRect = CGRect(
x: margin,
y: margin + 60,
width: pageWidth - (margin * 2),
height: pageHeight - (margin * 2) - 60
)
content.draw(in: textRect, withAttributes: contentAttributes)
}
try data.write(to: pdfURL)
return pdfURL
}
// MARK: - Session Continuity
/// Saves current session info for restoration
func saveSessionInfo(eventName: String, fileURL: URL) {
UserDefaults.standard.set(eventName, forKey: "lastEventName")
UserDefaults.standard.set(fileURL.path, forKey: "lastFilePath")
}
/// Restores previous session if exists
func restoreSession() -> (eventName: String, fileURL: URL)? {
guard let eventName = UserDefaults.standard.string(forKey: "lastEventName"),
let filePath = UserDefaults.standard.string(forKey: "lastFilePath") else {
return nil
}
let fileURL = URL(fileURLWithPath: filePath)
if fileManager.fileExists(atPath: filePath) {
currentFileURL = fileURL
currentFileName = fileURL.lastPathComponent
return (eventName, fileURL)
}
return nil
}
/// Clears saved session info (when starting new)
func clearSessionInfo() {
UserDefaults.standard.removeObject(forKey: "lastEventName")
UserDefaults.standard.removeObject(forKey: "lastFilePath")
currentFileURL = nil
currentFileName = nil
}
// MARK: - Guest Mode
/// Creates a file for a guest receiving transcript
func createGuestTranscriptFile(eventName: String, fullHistory: String) throws -> URL {
let fileURL = try createTranscriptFile(eventName: eventName)
// Write the full history
try fullHistory.write(to: fileURL, atomically: true, encoding: .utf8)
return fileURL
}
// MARK: - Transcription History
/// Lists all saved transcription files from the Documents directory
func listSavedTranscriptions() -> [URL] {
do {
let documentsDirectory = try fileManager.url(
for: .documentDirectory,
in: .userDomainMask,
appropriateFor: nil,
create: false
)
let files = try fileManager.contentsOfDirectory(
at: documentsDirectory,
includingPropertiesForKeys: [.contentModificationDateKey, .fileSizeKey],
options: .skipsHiddenFiles
)
// Filter for .txt files and sort by modification date (newest first)
return files
.filter { $0.pathExtension == "txt" }
.sorted { url1, url2 in
let date1 = (try? url1.resourceValues(forKeys: [.contentModificationDateKey]))?.contentModificationDate ?? Date.distantPast
let date2 = (try? url2.resourceValues(forKeys: [.contentModificationDateKey]))?.contentModificationDate ?? Date.distantPast
return date1 > date2
}
} catch {
print("Failed to list transcriptions: \(error)")
return []
}
}
/// Reads the content of a transcription file at the given URL
func readTranscription(at url: URL) -> String? {
do {
return try String(contentsOf: url, encoding: .utf8)
} catch {
return nil
}
}
/// Deletes a transcription file at the given URL
func deleteTranscription(at url: URL) throws {
try fileManager.removeItem(at: url)
}
}

View File

@@ -0,0 +1,492 @@
//
// MultipeerManager.swift
// BeamScribe
//
// Handles Multipeer Connectivity for Host/Guest networking.
//
import Foundation
import MultipeerConnectivity
import Combine
import UIKit
@MainActor
class MultipeerManager: NSObject, ObservableObject {
// MARK: - Published Properties
@Published var availableHosts: [MCPeerID: String] = [:] // PeerID -> Event Name
@Published var connectedPeers: [MCPeerID] = []
@Published var isHosting: Bool = false
@Published var isBrowsing: Bool = false
@Published var isConnectedToHost: Bool = false
@Published var connectionStatus: String = "" // Visible status for UI debugging
// Internal set of peers that have been connected long enough to be considered stable
private var stablePeers: Set<MCPeerID> = []
// MARK: - Callbacks
var onPacketReceived: ((TranscriptPacket) -> Void)?
var onPeerCountChanged: ((Int) -> Void)?
var onPeerJoined: ((MCPeerID) -> Void)?
var onHostLost: (() -> Void)?
var onConnectionFailed: ((MCPeerID) -> Void)? // Called when all retry attempts exhausted
// MARK: - Private Properties
private let serviceType = "beamscribe"
private var peerID: MCPeerID!
private var session: MCSession!
private var advertiser: MCNearbyServiceAdvertiser?
private var browser: MCNearbyServiceBrowser?
private var eventName: String = ""
private var hostPeerID: MCPeerID?
// MARK: - Connection Stability Configuration
/// Timeout for peer invitation (increased for mixed network scenarios)
private let invitationTimeout: TimeInterval = 30 // Was 15s, increased for 5G+WiFi
/// Delay before considering a peer connection stable
private let stabilizationDelay: UInt64 = 4_000_000_000 // 4 seconds (increased for mixed 5G+WiFi networks)
/// Delay before stopping browser after connection stabilizes
private let browserStopDelay: UInt64 = 2_000_000_000 // 2 seconds
/// Maximum retry attempts for failed connections
private let maxRetryAttempts = 3
/// Time threshold to consider a connection "unstable" if it drops
private let unstableConnectionThreshold: TimeInterval = 5.0
// MARK: - Connection Retry State
private var connectionAttempts: [String: Int] = [:] // displayName -> attempt count
private var connectionStartTimes: [String: Date] = [:] // displayName -> when .connecting started
private var connectedAtTimes: [String: Date] = [:] // displayName -> when .connected happened
private var pendingRetry: [String: Bool] = [:] // displayName -> is retry pending
private var connectionTimeoutTask: Task<Void, Never>? // Watchdog for stuck connections
// MARK: - Initialization
override init() {
super.init()
setupPeerID()
}
private func setupPeerID() {
let deviceName = UIDevice.current.name
peerID = MCPeerID(displayName: deviceName)
// Keep .none encryption as requested
session = MCSession(peer: peerID, securityIdentity: nil, encryptionPreference: .none)
session.delegate = self
}
// MARK: - Host Mode
func startHosting(eventName: String) {
// Stop any existing advertising first
stopHosting()
self.eventName = eventName
print("Starting hosting for event: \(eventName)")
// Recreate identity and session to ensure fresh state (Fixes Re-Join issue)
// This generates a new underlying MCPeerID so the host treats us as a new connection
setupPeerID()
// Include event name in discovery info so guests see it
let discoveryInfo = ["eventName": eventName]
advertiser = MCNearbyServiceAdvertiser(
peer: peerID,
discoveryInfo: discoveryInfo,
serviceType: serviceType
)
advertiser?.delegate = self
advertiser?.startAdvertisingPeer()
isHosting = true
}
func stopHosting() {
print("Stopping hosting")
advertiser?.stopAdvertisingPeer()
advertiser = nil
session.disconnect()
isHosting = false
}
// MARK: - Guest Mode
func startBrowsing() {
// Stop any existing browser first
stopBrowsing()
print("Starting browsing")
// Clear any stale data
availableHosts.removeAll()
// Recreate identity and session to ensure fresh state (Fixes Re-Join issue)
setupPeerID()
browser = MCNearbyServiceBrowser(peer: peerID, serviceType: serviceType)
browser?.delegate = self
browser?.startBrowsingForPeers()
isBrowsing = true
}
func stopBrowsing() {
browser?.stopBrowsingForPeers()
browser = nil
// NOTE: Do NOT call session.disconnect() here - it kills active connections!
// Session disconnect is only done in the full disconnect() method
isBrowsing = false
}
func joinHost(_ hostPeer: MCPeerID) {
let peerName = hostPeer.displayName
let attempt = (connectionAttempts[peerName] ?? 0) + 1
connectionAttempts[peerName] = attempt
connectionStatus = "Joining \(peerName) (attempt \(attempt)/\(maxRetryAttempts))..."
self.hostPeerID = hostPeer
connectionStartTimes[peerName] = Date()
// Include device name and attempt count in context for host-side logging
let context = try? JSONEncoder().encode(["name": UIDevice.current.name, "attempt": String(attempt)])
browser?.invitePeer(hostPeer, to: session, withContext: context, timeout: invitationTimeout)
// Start watchdog timer - if no session callback fires within timeout, trigger retry
startConnectionWatchdog(for: hostPeer, attempt: attempt)
}
/// Watchdog timer that triggers retry if connection stalls (no session callbacks)
private func startConnectionWatchdog(for peer: MCPeerID, attempt: Int) {
// Cancel any existing watchdog
connectionTimeoutTask?.cancel()
let peerName = peer.displayName
let watchdogTimeout = invitationTimeout + 5 // Give 5s buffer beyond invitation timeout
connectionTimeoutTask = Task { @MainActor in
// Update status periodically while waiting
for elapsed in stride(from: 5, through: Int(watchdogTimeout), by: 5) {
try? await Task.sleep(nanoseconds: 5_000_000_000) // 5 seconds
guard !Task.isCancelled else { return }
guard self.hostPeerID?.displayName == peerName else { return }
guard !self.isConnectedToHost else {
self.connectionStatus = "Connected!"
return
}
self.connectionStatus = "Waiting for response... (\(elapsed)s)"
}
// Timeout reached - check if still trying to connect
guard !Task.isCancelled else { return }
guard self.hostPeerID?.displayName == peerName else { return }
guard !self.isConnectedToHost else { return }
// Connection timed out without any session callback
self.connectionStatus = "Connection timed out, retrying..."
// Trigger retry
self.retryJoinHost(peer)
}
}
/// Cancel the connection watchdog (called when connection succeeds or user cancels)
private func cancelConnectionWatchdog() {
connectionTimeoutTask?.cancel()
connectionTimeoutTask = nil
}
/// Retry connecting to host after a delay with exponential backoff
private func retryJoinHost(_ hostPeer: MCPeerID) {
let peerName = hostPeer.displayName
let attempts = connectionAttempts[peerName] ?? 0
guard attempts < maxRetryAttempts else {
pendingRetry[peerName] = false
onConnectionFailed?(hostPeer)
return
}
// Exponential backoff: 1s, 2s, 4s
let backoffSeconds = pow(2.0, Double(attempts - 1))
pendingRetry[peerName] = true
Task { @MainActor in
try? await Task.sleep(nanoseconds: UInt64(backoffSeconds * 1_000_000_000))
// Verify we still want to retry (user might have cancelled)
guard self.pendingRetry[peerName] == true,
self.hostPeerID?.displayName == peerName else {
return
}
// Find the current MCPeerID for this host (might have changed)
if let currentPeer = self.availableHosts.keys.first(where: { $0.displayName == peerName }) {
self.joinHost(currentPeer)
} else {
self.pendingRetry[peerName] = false
self.onConnectionFailed?(hostPeer)
}
}
}
/// Cancel any pending retry for a peer
func cancelRetry(for peerName: String) {
pendingRetry[peerName] = false
connectionAttempts[peerName] = 0
}
// MARK: - Data Transmission
func broadcastPacket(_ packet: TranscriptPacket) {
// Traffic Gate: Only send to peers that have stabilized
let targets = connectedPeers.filter { stablePeers.contains($0) }
guard !targets.isEmpty else { return }
do {
let data = try packet.encode()
try session.send(data, toPeers: targets, with: .reliable)
} catch {
print("Failed to broadcast packet: \(error)")
}
}
func sendFullHistory(to peer: MCPeerID, text: String, eventName: String, startTime: Date?) {
var duration: TimeInterval? = nil
if let start = startTime {
duration = Date().timeIntervalSince(start)
}
let packet = TranscriptPacket(
type: .fullHistory,
text: text,
eventName: eventName,
currentSessionDuration: duration
)
do {
// Traffic Gate Check
guard session.connectedPeers.contains(peer), stablePeers.contains(peer) else {
return
}
let data = try packet.encode()
try session.send(data, toPeers: [peer], with: .reliable)
} catch {
print("Failed to send full history: \(error)")
}
}
func sendLiveChunk(text: String, eventName: String, isFinal: Bool) {
let packet = TranscriptPacket(
type: .liveChunk,
text: text,
eventName: eventName,
isFinal: isFinal
)
broadcastPacket(packet)
}
func sendAlert(type: AlertType, eventName: String) {
let packet = TranscriptPacket(
type: .alert,
text: nil,
eventName: eventName,
alertType: type
)
broadcastPacket(packet)
}
// MARK: - Disconnect
func disconnect() {
print("Disconnecting all peers")
cancelConnectionWatchdog()
stopHosting()
stopBrowsing()
session.disconnect()
connectedPeers.removeAll()
availableHosts.removeAll()
isConnectedToHost = false
hostPeerID = nil
connectionStatus = ""
}
}
// MARK: - MCSessionDelegate
extension MultipeerManager: MCSessionDelegate {
nonisolated func session(_ session: MCSession, peer peerID: MCPeerID, didChange state: MCSessionState) {
Task { @MainActor in
let peerName = peerID.displayName
switch state {
case .connected:
// Cancel watchdog - we got a callback
self.cancelConnectionWatchdog()
self.connectionStatus = "Connected to \(peerName)!"
// Record when connection was established
self.connectedAtTimes[peerName] = Date()
// Clear retry state on successful connection
self.pendingRetry[peerName] = false
if !self.connectedPeers.contains(peerID) {
self.connectedPeers.append(peerID)
// Stabilization task with increased delays
Task { @MainActor in
// Wait for stabilization (4s) before considering the connection stable
try? await Task.sleep(nanoseconds: self.stabilizationDelay)
guard self.connectedPeers.contains(peerID) else {
return
}
// IMPORTANT: Do NOT stop the browser - keeping it active maintains the AWDL radio
// which is critical for connection stability on mixed 5G+WiFi networks
if self.connectedPeers.contains(peerID) {
self.stablePeers.insert(peerID)
self.onPeerJoined?(peerID)
}
}
}
if peerID == self.hostPeerID {
self.isConnectedToHost = true
}
self.onPeerCountChanged?(self.connectedPeers.count)
case .notConnected:
let wasConnected = self.connectedPeers.contains(peerID)
let wasStable = self.stablePeers.contains(peerID)
// Check if this is an early disconnect (within unstable threshold)
var shouldRetry = false
if let connectedAt = self.connectedAtTimes[peerName] {
let connectionDuration = Date().timeIntervalSince(connectedAt)
// If disconnected within 5 seconds of connecting, consider it unstable and retry
if connectionDuration < self.unstableConnectionThreshold && peerID == self.hostPeerID {
shouldRetry = true
}
} else if peerID == self.hostPeerID {
// Never connected successfully, should retry
shouldRetry = true
}
// Clean up state
self.connectedPeers.removeAll { $0 == peerID }
self.stablePeers.remove(peerID)
self.connectedAtTimes.removeValue(forKey: peerName)
if peerID == self.hostPeerID {
self.isConnectedToHost = false
// Attempt retry if applicable
if shouldRetry && self.pendingRetry[peerName] != true {
self.retryJoinHost(peerID)
} else if !shouldRetry {
self.onHostLost?()
}
}
self.onPeerCountChanged?(self.connectedPeers.count)
case .connecting:
// Record when connecting started (for first attempt)
if self.connectionStartTimes[peerName] == nil {
self.connectionStartTimes[peerName] = Date()
}
@unknown default:
break
}
}
}
nonisolated func session(_ session: MCSession, didReceive data: Data, fromPeer peerID: MCPeerID) {
Task { @MainActor in
do {
let packet = try TranscriptPacket.decode(from: data)
self.onPacketReceived?(packet)
} catch {
print("Failed to decode packet from \(peerID.displayName): \(error)")
}
}
}
nonisolated func session(_ session: MCSession, didReceive stream: InputStream, withName streamName: String, fromPeer peerID: MCPeerID) {
print("Received stream from \(peerID.displayName): \(streamName)")
}
nonisolated func session(_ session: MCSession, didStartReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, with progress: Progress) {
print("Started receiving resource from \(peerID.displayName): \(resourceName)")
}
nonisolated func session(_ session: MCSession, didFinishReceivingResourceWithName resourceName: String, fromPeer peerID: MCPeerID, at localURL: URL?, withError error: Error?) {
if let error = error {
print("Error receiving resource from \(peerID.displayName): \(error)")
} else {
print("Finished receiving resource from \(peerID.displayName): \(resourceName)")
}
}
}
// MARK: - MCNearbyServiceAdvertiserDelegate
extension MultipeerManager: MCNearbyServiceAdvertiserDelegate {
nonisolated func advertiser(_ advertiser: MCNearbyServiceAdvertiser, didReceiveInvitationFromPeer peerID: MCPeerID, withContext context: Data?, invitationHandler: @escaping (Bool, MCSession?) -> Void) {
// Auto-accept all invitations (as per spec)
Task { @MainActor in
print("Received invitation from peer: \(peerID.displayName). Accepting immediately (Removing delay to prevent timeout).")
// No Delay: Accept immediately to catch the handshake request before it times out.
// Stability is handled post-connection by the Traffic Gate (4s silence) and Delayed Browser Stop.
invitationHandler(true, self.session)
}
}
nonisolated func advertiser(_ advertiser: MCNearbyServiceAdvertiser, didNotStartAdvertisingPeer error: Error) {
print("Failed to start advertising: \(error)")
}
}
// MARK: - MCNearbyServiceBrowserDelegate
extension MultipeerManager: MCNearbyServiceBrowserDelegate {
nonisolated func browser(_ browser: MCNearbyServiceBrowser, foundPeer peerID: MCPeerID, withDiscoveryInfo info: [String : String]?) {
Task { @MainActor in
let eventName = info?["eventName"] ?? peerID.displayName
print("Found peer: \(peerID.displayName) with event: \(eventName)")
// Remove any existing entry with the same displayName to avoid stale data
// This handles the case where host restarts with a new event name
for (existingPeerID, _) in self.availableHosts {
if existingPeerID.displayName == peerID.displayName {
self.availableHosts.removeValue(forKey: existingPeerID)
}
}
self.availableHosts[peerID] = eventName
}
}
nonisolated func browser(_ browser: MCNearbyServiceBrowser, lostPeer peerID: MCPeerID) {
Task { @MainActor in
print("Lost peer: \(peerID.displayName)")
self.availableHosts.removeValue(forKey: peerID)
}
}
nonisolated func browser(_ browser: MCNearbyServiceBrowser, didNotStartBrowsingForPeers error: Error) {
print("Failed to start browsing: \(error)")
}
}

View File

@@ -0,0 +1,149 @@
//
// SubscriptionManager.swift
// BeamScribe
//
// Handles In-App Purchases and Subscription status.
//
import Foundation
import StoreKit
import Combine
@MainActor
class SubscriptionManager: ObservableObject {
@Published var isPremium: Bool = false
@Published var products: [Product] = []
@Published var isLoading: Bool = false
@Published var fetchError: String?
private let productIds: [String] = ["unlock_unlimited_time"]
private var updates: Task<Void, Never>? = nil
init() {
updates = newTransactionListenerTask()
Task {
await updatePurchasedProducts()
await fetchProducts()
}
}
deinit {
updates?.cancel()
}
// MARK: - Fetch Products
func fetchProducts() async {
isLoading = true
fetchError = nil
do {
let products = try await Product.products(for: productIds)
self.products = products
if products.isEmpty {
fetchError = "No products found. Check StoreKit configuration."
}
} catch {
print("Failed to fetch products: \(error)")
fetchError = error.localizedDescription
}
isLoading = false
}
// MARK: - Purchase
func purchase(_ product: Product) async throws {
let result = try await product.purchase()
switch result {
case .success(let verification):
// Check whether the transaction is verified. If it isn't,
// this function rethrows the verification error.
let transaction = try checkVerified(verification)
// The transaction is verified. Deliver content to the user.
await updatePurchasedProducts()
// Always finish a transaction.
await transaction.finish()
case .userCancelled, .pending:
break
@unknown default:
break
}
}
func restorePurchases() async {
// In StoreKit 2, you generally don't need a dedicated restore button for
// validating current entitlements, as `currentEntitlements` is always up to date.
// However, `AppStore.sync()` can be used to force a sync if needed.
try? await AppStore.sync()
await updatePurchasedProducts()
}
// MARK: - Listeners
private func newTransactionListenerTask() -> Task<Void, Never> {
Task.detached {
// Iterate through any transactions that don't come from a direct call to `purchase()`.
for await result in Transaction.updates {
do {
let transaction = try self.checkVerified(result)
// Deliver content to the user.
await self.updatePurchasedProducts()
// Always finish a transaction.
await transaction.finish()
} catch {
// StoreKit has a transaction that fails verification. Don't deliver content to the user.
print("Transaction failed verification")
}
}
}
}
func updatePurchasedProducts() async {
var hasActiveSubscription = false
// Iterate through all of the user's purchased products.
for await result in Transaction.currentEntitlements {
do {
// Check whether the transaction is verified. If it isn't,
// this function rethrows the verification error.
let transaction = try checkVerified(result)
// Check if this is the subscription we care about
if productIds.contains(transaction.productID) {
// Is it still valid? (StoreKit 2 usually only returns valid entitlements here,
// but checking revocation date is good practice)
if transaction.revocationDate == nil {
hasActiveSubscription = true
}
}
} catch {
print("Failed to verify transaction in currentEntitlements")
}
}
self.isPremium = hasActiveSubscription
}
nonisolated private func checkVerified<T>(_ result: VerificationResult<T>) throws -> T {
// Check whether the JWS passes StoreKit verification.
switch result {
case .unverified:
// StoreKit parsing failed verification.
throw StoreError.failedVerification
case .verified(let safe):
// The result is verified. Return the unwrapped value.
return safe
}
}
}
enum StoreError: Error {
case failedVerification
}

View File

@@ -0,0 +1,431 @@
//
// TranscriptionManager.swift
// BeamScribe
//
// Handles real-time speech-to-text transcription using SFSpeechRecognizer.
//
import Foundation
import Speech
import AVFoundation
import Combine
@MainActor
class TranscriptionManager: ObservableObject {
// MARK: - Published Properties
@Published var isAuthorized: Bool = false
@Published var isTranscribing: Bool = false
@Published var isOfflineMode: Bool = false // Tracks current mode for UI display if needed
@Published var partialResult: String = ""
@Published var errorMessage: String?
@Published var sessionEndTime: Date?
// MARK: - Callbacks
var onPartialResult: ((String) -> Void)?
var onFinalResult: ((String) -> Void)?
var onSessionResumed: (() -> Void)?
// MARK: - Private Properties
private let speechRecognizer: SFSpeechRecognizer?
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private var audioEngine = AVAudioEngine()
private var wasInterrupted = false
private var shouldContinueTranscribing = false // Controls auto-restart
private var lastPartialLength: Int = 0 // Track length to detect silent resets
private var lastSavedText: String = "" // Track last saved text to prevent duplication
// MARK: - Initialization
init() {
self.speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))
setupInterruptionHandling()
}
// MARK: - Authorization
func requestAuthorization() async -> Bool {
// Request speech recognition authorization
let speechStatus = await withCheckedContinuation { continuation in
SFSpeechRecognizer.requestAuthorization { status in
continuation.resume(returning: status)
}
}
guard speechStatus == .authorized else {
errorMessage = "Speech recognition not authorized"
return false
}
// Request microphone authorization
let micStatus: Bool
if #available(iOS 17.0, *) {
micStatus = await AVAudioApplication.requestRecordPermission()
} else {
micStatus = await withCheckedContinuation { continuation in
AVAudioSession.sharedInstance().requestRecordPermission { granted in
continuation.resume(returning: granted)
}
}
}
guard micStatus else {
errorMessage = "Microphone access not authorized"
return false
}
isAuthorized = true
return true
}
// MARK: - Transcription Control
// MARK: - Properties
// ... existing properties
private var sessionDurationTimer: Timer?
private var sessionStartTime: Date?
private let timeLimit: TimeInterval = 300 // 5 minutes
// ...
// MARK: - Transcription Control
func startTranscribing(isPremium: Bool, timeLimitOverride: TimeInterval? = nil) throws {
guard let speechRecognizer = speechRecognizer, speechRecognizer.isAvailable else {
throw TranscriptionError.recognizerUnavailable
}
// Reset limit check
self.sessionStartTime = Date()
self.sessionEndTime = nil
self.shouldContinueTranscribing = true
// Start Timer
startDurationTimer(isPremium: isPremium, overrideLimit: timeLimitOverride)
// Start the actual recognition (Default to Online)
try startRecognitionSession(allowOnline: true)
}
private func startDurationTimer(isPremium: Bool, overrideLimit: TimeInterval?) {
sessionDurationTimer?.invalidate()
// Use override if present, otherwise default to 5 minutes only if not premium
let actualLimit: TimeInterval
if let override = overrideLimit {
actualLimit = override
} else if !isPremium {
actualLimit = self.timeLimit
} else {
// Premium user with no override -> No timer
return
}
sessionDurationTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in
Task { @MainActor [weak self] in
guard let self = self, let startTime = self.sessionStartTime else { return }
let elapsed = Date().timeIntervalSince(startTime)
if elapsed >= actualLimit {
self.stopTranscribing()
self.errorMessage = "Time limit reached. Subscribe to continue."
self.sessionDurationTimer?.invalidate()
}
}
}
}
/// Internal method to start/restart recognition session
/// - Parameter allowOnline: If true, attempts to use Apple's servers. If false, forces on-device.
private func startRecognitionSession(allowOnline: Bool = true) throws {
guard let speechRecognizer = speechRecognizer, speechRecognizer.isAvailable else {
throw TranscriptionError.recognizerUnavailable
}
// Stop any existing session without clearing the continue flag
stopRecognitionSession()
// Create a fresh audio engine to avoid stale formats (fixes Simulator crash)
audioEngine = AVAudioEngine()
// Reset tracking for new session
lastPartialLength = 0
lastSavedText = ""
// Update published mode
if allowOnline {
self.isOfflineMode = false
} else {
self.isOfflineMode = true
}
// Configure audio session for background audio support
let audioSession = AVAudioSession.sharedInstance()
// CarPlay-friendly: .mixWithOthers allows transcription without lowering FM radio/NPR volume
try audioSession.setCategory(
.playAndRecord,
mode: .measurement,
options: [.mixWithOthers, .allowBluetoothHFP, .allowBluetoothA2DP, .defaultToSpeaker]
)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
// Create recognition request
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else {
throw TranscriptionError.requestCreationFailed
}
// Configure recognition mode logic
if allowOnline {
// Try Online Mode
recognitionRequest.requiresOnDeviceRecognition = false
} else {
// Force Offline Mode
// Simulator and some devices don't support on-device recognition
if speechRecognizer.supportsOnDeviceRecognition {
recognitionRequest.requiresOnDeviceRecognition = true
} else {
// If requested offline but not supported, we must fallback to online implicitly
// or throw an error. In this case, let's fallback to false which might be redundant but safe.
recognitionRequest.requiresOnDeviceRecognition = false
print("Warning: On-Device recognition requested but not supported on this device. Fallback to online.")
}
}
recognitionRequest.shouldReportPartialResults = true
recognitionRequest.addsPunctuation = true
// Get input node and validate format
let inputNode = audioEngine.inputNode
let recordingFormat = inputNode.outputFormat(forBus: 0)
// Install tap on input node
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { [weak self] buffer, _ in
self?.recognitionRequest?.append(buffer)
}
// Start recognition task
recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest) { [weak self] result, error in
Task { @MainActor in
guard let self = self else { return }
var sessionEnded = false
// Handle Errors & Retries
if let error = error {
// Check if we should fallback (if we were trying online and it failed)
if allowOnline {
print("Online transcription failed: \(error.localizedDescription). Retrying offline.")
// We must stop the current engine/request before retrying
self.stopRecognitionSession()
// Retry with on-device only
// Add a small delay to ensure cleanup completes
Task {
try? await Task.sleep(nanoseconds: 500_000_000) // 0.5 sec
if self.shouldContinueTranscribing {
try? self.startRecognitionSession(allowOnline: false)
}
}
return
}
// Check if it's just a cancellation to ignore
let nsError = error as NSError
if nsError.domain == "kAFAssistantErrorDomain" && nsError.code == 216 {
// Recognition was cancelled by user, not an error
return
}
// Save partial results before error exit
if !self.partialResult.isEmpty {
let textToSave = self.partialResult
self.onFinalResult?(textToSave)
self.partialResult = ""
}
sessionEnded = true
}
// Handle Success Results
if let result = result {
let text = result.bestTranscription.formattedString
if result.isFinal {
self.partialResult = ""
sessionEnded = true
let trimmedText = text.trimmingCharacters(in: .whitespacesAndNewlines)
// Only save if it's different from what we just saved via Silent Reset Detection
// This prevents duplication when Apple's ~60-second session ends
if !trimmedText.isEmpty && trimmedText != self.lastSavedText {
// Add newline for paragraph break when session restarts
self.onFinalResult?("\n" + text)
self.lastSavedText = trimmedText
}
// Reset for next session
self.lastSavedText = ""
} else {
// Silent Reset Detection
// Apple's speech recognizer can silently reset after ~60 seconds.
// When this happens, the accumulated text drops dramatically.
// We detect this by watching for a 50%+ drop in text length.
// Note: We avoid triggering on small corrections - only on significant drops.
let previousLength = self.lastPartialLength
let newLength = text.count
let previousText = self.partialResult
// Only trigger if previous text was substantial (>50 chars) AND new text is less than half
// This prevents false triggers during Apple's word correction phase
let isSignificantDrop = previousLength > 50 && newLength < previousLength / 2
if isSignificantDrop {
let trimmedPrevious = previousText.trimmingCharacters(in: .whitespacesAndNewlines)
if !trimmedPrevious.isEmpty {
// Add a newline to create paragraph breaks between speech segments
self.onFinalResult?("\n" + previousText)
self.lastSavedText = trimmedPrevious
}
}
self.partialResult = text
self.lastPartialLength = newLength
self.onPartialResult?(text)
}
}
// Auto-restart logic (retains current mode)
if sessionEnded && self.shouldContinueTranscribing {
Task {
try? await Task.sleep(nanoseconds: 100_000_000)
if self.shouldContinueTranscribing {
try? self.startRecognitionSession(allowOnline: allowOnline)
}
}
}
}
}
// Start audio engine
try audioEngine.start()
isTranscribing = true
}
/// Stops the current recognition session without affecting auto-restart
private func stopRecognitionSession() {
audioEngine.stop()
audioEngine.inputNode.removeTap(onBus: 0)
recognitionRequest?.endAudio()
recognitionRequest = nil
recognitionTask?.cancel()
recognitionTask = nil
}
/// Completely stops transcription (user action)
func stopTranscribing() {
self.sessionEndTime = Date()
shouldContinueTranscribing = false // Prevent auto-restart
sessionDurationTimer?.invalidate() // Stop timer
// Save any remaining partial result before stopping
if !partialResult.isEmpty {
let textToSave = partialResult.trimmingCharacters(in: .whitespacesAndNewlines)
if !textToSave.isEmpty {
onFinalResult?(partialResult)
}
partialResult = ""
}
stopRecognitionSession()
isTranscribing = false
}
// MARK: - Interruption Handling
private func setupInterruptionHandling() {
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: nil,
queue: .main
) { [weak self] notification in
// Extract Sendable values before async to satisfy Swift 6 requirements
let typeValue = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt
let optionsValue = notification.userInfo?[AVAudioSessionInterruptionOptionKey] as? UInt
// Bind weak self to local constant for Swift 6 concurrency
guard let weakSelf = self else { return }
Task { @MainActor in
weakSelf.handleInterruption(typeValue: typeValue, optionsValue: optionsValue)
}
}
}
private func handleInterruption(typeValue: UInt?, optionsValue: UInt?) {
guard let typeValue = typeValue,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
switch type {
case .began:
// Interruption began (phone call, etc.)
wasInterrupted = true
// Stop session but preserve shouldContinueTranscribing for later resume
stopRecognitionSession()
isTranscribing = false
case .ended:
// Interruption ended
guard wasInterrupted else { return }
wasInterrupted = false
// Check if we should resume
if let optionsValue = optionsValue {
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) && shouldContinueTranscribing {
// Resume transcription
Task {
try? await Task.sleep(nanoseconds: 500_000_000) // 0.5 second delay
try? self.startRecognitionSession()
self.isTranscribing = true
self.onSessionResumed?()
}
}
}
@unknown default:
break
}
}
deinit {
NotificationCenter.default.removeObserver(self)
}
}
// MARK: - Errors
enum TranscriptionError: Error, LocalizedError {
case recognizerUnavailable
case requestCreationFailed
case notAuthorized
case audioEngineError
var errorDescription: String? {
switch self {
case .recognizerUnavailable:
return "Speech recognizer is not available"
case .requestCreationFailed:
return "Failed to create recognition request"
case .notAuthorized:
return "Speech recognition not authorized"
case .audioEngineError:
return "Audio engine configuration failed"
}
}
}

View File

@@ -0,0 +1,150 @@
//
// SessionState.swift
// BeamScribe
//
// Central app state management for the transcription session.
//
import Foundation
import SwiftUI
import Combine
import MultipeerConnectivity
/// User's role in the session
enum UserRole: String, Codable {
case host
case guest
}
/// Current phase of the app flow
enum AppPhase {
case roleSelection
case hostSetup
case soloSetup // For transcribe-only mode
case guestBrowsing
case activeSession
case transcriptionHistory
}
/// Observable state container for the entire app
class SessionState: ObservableObject {
// MARK: - Session Info
@Published var userRole: UserRole?
@Published var eventName: String = ""
@Published var appPhase: AppPhase = .roleSelection
// MARK: - Transcript Data
@Published var transcriptSegments: [TranscriptSegment] = []
@Published var fullTranscriptText: String = ""
// MARK: - Connection Status
@Published var connectedPeerCount: Int = 0
@Published var isConnectedToHost: Bool = false
// MARK: - Alerts
@Published var showHostLostBanner: Bool = false
@Published var showBatteryWarning: Bool = false
@Published var lastResumeTimestamp: Date?
@Published var startTime: Date?
// MARK: - Networking State
// MARK: - Settings
@Published var keepAwakeEnabled: Bool = true
@Published var showSettings: Bool = false
// MARK: - Session Control
@Published var isTranscribing: Bool = false
@Published var isSoloMode: Bool = false // Transcribe only without broadcasting
// MARK: - Methods
/// Start a new host session with the given event name
func startHostSession(eventName: String) {
self.eventName = eventName
self.userRole = .host
self.appPhase = .activeSession
self.isTranscribing = true
self.isSoloMode = false
self.transcriptSegments = []
self.isSoloMode = false
self.transcriptSegments = []
self.fullTranscriptText = ""
self.startTime = Date()
}
/// Start a solo transcription session (no broadcasting)
func startSoloSession(eventName: String) {
self.eventName = eventName
self.userRole = .host // Solo uses host role but doesn't broadcast
self.appPhase = .activeSession
self.isTranscribing = true
self.isSoloMode = true
self.transcriptSegments = []
self.transcriptSegments = []
self.fullTranscriptText = ""
self.startTime = Date()
}
/// Join a guest session with the given event name
func joinGuestSession(eventName: String) {
self.eventName = eventName
self.userRole = .guest
self.appPhase = .activeSession
self.isConnectedToHost = true
// Clear any previous transcript data (e.g., from previous host session)
self.transcriptSegments = []
self.fullTranscriptText = ""
self.startTime = Date()
}
/// Add a new transcript segment (for live chunks)
func addSegment(_ segment: TranscriptSegment) {
transcriptSegments.append(segment)
if segment.isFinal {
fullTranscriptText += segment.text + " "
}
}
/// Replace the last partial segment with a new one
func updateLastPartialSegment(_ text: String, isFinal: Bool) {
// Remove the last partial segment if exists
if let lastIndex = transcriptSegments.lastIndex(where: { !$0.isFinal }) {
transcriptSegments.remove(at: lastIndex)
}
addSegment(TranscriptSegment(text: text, isFinal: isFinal))
}
/// Load full history from host (for late joiners)
func loadFullHistory(_ text: String) {
fullTranscriptText = text
transcriptSegments = [TranscriptSegment(text: text, isFinal: true)]
}
/// Insert a session resumed marker
func insertResumedMarker() {
let formatter = DateFormatter()
formatter.timeStyle = .short
let timeString = formatter.string(from: Date())
let marker = "\n[Session Resumed at \(timeString)]\n"
addSegment(TranscriptSegment(text: marker, isFinal: true))
lastResumeTimestamp = Date()
}
/// Reset to initial state
func reset() {
userRole = nil
eventName = ""
appPhase = .roleSelection
transcriptSegments = []
fullTranscriptText = ""
connectedPeerCount = 0
isConnectedToHost = false
showHostLostBanner = false
showBatteryWarning = false
isTranscribing = false
isTranscribing = false
isSoloMode = false
startTime = nil
}
}

View File

@@ -0,0 +1,102 @@
//
// SettingsModel.swift
// BeamScribe
//
// Manages user preferences for transcription appearance.
//
import SwiftUI
import Combine
enum AppTheme: String, CaseIterable, Identifiable {
case standard = "Default"
case dark = "Dark"
case lightbulb = "Lightbulb"
var id: String { self.rawValue }
var backgroundColor: Color {
switch self {
case .standard: return .white
case .dark: return .black
case .lightbulb: return .black
}
}
var textColor: Color {
switch self {
case .standard: return .black
case .dark: return .white
case .lightbulb: return .yellow
}
}
}
enum AppFont: String, CaseIterable, Identifiable {
case system = "System"
case rounded = "Rounded"
case serif = "Serif"
case monospaced = "Monospaced"
case markerFelt = "Marker Felt"
var id: String { self.rawValue }
func font(size: CGFloat) -> Font {
switch self {
case .system:
return .system(size: size)
case .rounded:
return .system(size: size, design: .rounded)
case .serif:
return .system(size: size, design: .serif)
case .monospaced:
return .system(size: size, design: .monospaced)
case .markerFelt:
return .custom("MarkerFelt-Thin", size: size)
}
}
}
class SettingsModel: ObservableObject {
@Published var selectedTheme: AppTheme {
didSet {
UserDefaults.standard.set(selectedTheme.rawValue, forKey: "selectedTheme")
}
}
@Published var selectedFont: AppFont {
didSet {
UserDefaults.standard.set(selectedFont.rawValue, forKey: "selectedFont")
}
}
@Published var fontSize: Double {
didSet {
UserDefaults.standard.set(fontSize, forKey: "fontSize")
}
}
init() {
let savedTheme = UserDefaults.standard.string(forKey: "selectedTheme") ?? ""
self.selectedTheme = AppTheme(rawValue: savedTheme) ?? .standard
let savedFont = UserDefaults.standard.string(forKey: "selectedFont") ?? ""
self.selectedFont = AppFont(rawValue: savedFont) ?? .system
let savedSize = UserDefaults.standard.double(forKey: "fontSize")
self.fontSize = savedSize > 0 ? savedSize : 18.0
}
// Computed properties for easy access in views
var backgroundColor: Color {
selectedTheme.backgroundColor
}
var textColor: Color {
selectedTheme.textColor
}
func font(size: CGFloat? = nil) -> Font {
selectedFont.font(size: size ?? fontSize)
}
}

View File

@@ -0,0 +1,71 @@
//
// TranscriptPacket.swift
// BeamScribe
//
// Network packet model for Multipeer Connectivity data transfer.
//
import Foundation
/// Packet types for network communication
enum PacketType: String, Codable {
/// Full transcript history sent to late joiners
case fullHistory
/// Real-time incremental text chunk
case liveChunk
/// System alerts (host disconnected, battery low, etc.)
case alert
}
/// Alert types for system notifications
enum AlertType: String, Codable {
case hostDisconnected
case hostBatteryLow
case sessionResumed
}
/// Main data structure for network packets
struct TranscriptPacket: Codable {
let type: PacketType
let text: String?
let eventName: String
let timestamp: Date
let isFinal: Bool? // For liveChunk: true = final result, false = partial
let alertType: AlertType? // For alert packets
let currentSessionDuration: TimeInterval? // Syncs timer for guests (relative duration)
init(type: PacketType, text: String?, eventName: String, isFinal: Bool? = nil, alertType: AlertType? = nil, currentSessionDuration: TimeInterval? = nil) {
self.type = type
self.text = text
self.eventName = eventName
self.timestamp = Date()
self.isFinal = isFinal
self.alertType = alertType
self.currentSessionDuration = currentSessionDuration
}
/// Encode packet to Data for network transmission
func encode() throws -> Data {
try JSONEncoder().encode(self)
}
/// Decode packet from received Data
static func decode(from data: Data) throws -> TranscriptPacket {
try JSONDecoder().decode(TranscriptPacket.self, from: data)
}
}
/// Represents a segment of transcript text with styling info
struct TranscriptSegment: Identifiable, Codable {
let id: UUID
let text: String
let isFinal: Bool
let timestamp: Date
init(text: String, isFinal: Bool) {
self.id = UUID()
self.text = text
self.isFinal = isFinal
self.timestamp = Date()
}
}

View File

@@ -0,0 +1,234 @@
//
// GuestBrowserView.swift
// BeamScribe
//
// Displays list of nearby hosts for guests to join.
//
import SwiftUI
import MultipeerConnectivity
struct GuestBrowserView: View {
@EnvironmentObject var sessionState: SessionState
@ObservedObject var multipeerManager: MultipeerManager
@ObservedObject var fileManager: FileStorageManager
@State private var isConnecting: Bool = false
@State private var selectedHost: MCPeerID?
@State private var connectionFailed: Bool = false
var body: some View {
ZStack {
// Background
LinearGradient(
colors: [Color.purple.opacity(0.05), Color.blue.opacity(0.05)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
VStack(spacing: 24) {
// Header
VStack(spacing: 8) {
Image(systemName: "antenna.radiowaves.left.and.right")
.font(.system(size: 50))
.foregroundColor(.purple)
.symbolEffect(.variableColor.iterative.reversing, options: .repeating)
Text("Finding Sessions...")
.font(.title)
.fontWeight(.bold)
Text("Looking for nearby hosts")
.font(.subheadline)
.foregroundColor(.secondary)
}
.padding(.top, 40)
// Connection Failed Alert
if connectionFailed {
HStack {
Image(systemName: "exclamationmark.triangle.fill")
.foregroundColor(.orange)
Text("Connection failed. Tap to try again.")
.font(.callout)
.foregroundColor(.primary)
}
.padding()
.background(Color.orange.opacity(0.15))
.cornerRadius(12)
.padding(.horizontal, 20)
}
// Host List
if multipeerManager.availableHosts.isEmpty {
Spacer()
VStack(spacing: 16) {
ProgressView()
.scaleEffect(1.5)
Text("Searching for active sessions")
.font(.callout)
.foregroundColor(.secondary)
Text("Make sure the host has started transcribing")
.font(.caption)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
}
Spacer()
} else {
ScrollView {
LazyVStack(spacing: 12) {
ForEach(Array(multipeerManager.availableHosts.keys), id: \.self) { peerID in
HostRow(
eventName: multipeerManager.availableHosts[peerID] ?? "Unknown",
deviceName: peerID.displayName,
isConnecting: isConnecting && selectedHost == peerID,
statusMessage: selectedHost == peerID ? multipeerManager.connectionStatus : ""
) {
joinHost(peerID)
}
}
}
.padding(.horizontal, 20)
}
}
// Cancel Button
Button(action: {
// Cancel any pending retries
if let host = selectedHost {
multipeerManager.cancelRetry(for: host.displayName)
}
multipeerManager.stopBrowsing()
withAnimation {
sessionState.appPhase = .roleSelection
}
}) {
Text("Cancel")
.foregroundColor(.secondary)
.padding(.vertical, 16)
}
.padding(.bottom, 20)
}
}
.onAppear {
multipeerManager.startBrowsing()
setupConnectionFailedHandler()
}
.onDisappear {
if sessionState.appPhase != .activeSession {
multipeerManager.stopBrowsing()
}
}
.onChange(of: multipeerManager.isConnectedToHost) { oldValue, newValue in
if newValue, let host = selectedHost,
let eventName = multipeerManager.availableHosts[host] {
// Connected successfully
// Note: We do NOT call stopBrowsing() here anymore because it kills the session.
// The MultipeerManager handles the radio shutdown safely after stabilization.
// Create a transcript file for the guest immediately
_ = try? fileManager.createTranscriptFile(eventName: eventName)
sessionState.joinGuestSession(eventName: eventName)
withAnimation {
sessionState.appPhase = .activeSession
}
}
}
}
private func joinHost(_ peerID: MCPeerID) {
selectedHost = peerID
isConnecting = true
connectionFailed = false
multipeerManager.joinHost(peerID)
}
private func setupConnectionFailedHandler() {
multipeerManager.onConnectionFailed = { [self] failedPeer in
Task { @MainActor in
if failedPeer == selectedHost {
isConnecting = false
connectionFailed = true
}
}
}
}
}
// MARK: - Host Row
struct HostRow: View {
let eventName: String
let deviceName: String
let isConnecting: Bool
let statusMessage: String
let action: () -> Void
var body: some View {
Button(action: action) {
HStack(spacing: 16) {
// Icon
ZStack {
Circle()
.fill(Color.purple.opacity(0.15))
.frame(width: 50, height: 50)
Image(systemName: "waveform")
.font(.title2)
.foregroundColor(.purple)
}
// Event Info
VStack(alignment: .leading, spacing: 4) {
Text(eventName)
.font(.headline)
.foregroundColor(.primary)
Text(deviceName)
.font(.caption)
.foregroundColor(.secondary)
// Show connection status when connecting
if isConnecting && !statusMessage.isEmpty {
Text(statusMessage)
.font(.caption2)
.foregroundColor(.orange)
.transition(.opacity)
}
}
Spacer()
// Join Indicator
if isConnecting {
ProgressView()
} else {
Image(systemName: "arrow.right.circle.fill")
.font(.title2)
.foregroundColor(.purple)
}
}
.padding(16)
.background(Color(.systemBackground))
.cornerRadius(16)
.shadow(color: .black.opacity(0.05), radius: 5, x: 0, y: 2)
}
.disabled(isConnecting)
}
}
#Preview {
GuestBrowserView(
multipeerManager: MultipeerManager(),
fileManager: FileStorageManager()
)
.environmentObject(SessionState())
}

View File

@@ -0,0 +1,214 @@
//
// HostSetupView.swift
// BeamScribe
//
// Host enters the event name before starting transcription.
//
import SwiftUI
struct HostSetupView: View {
@EnvironmentObject var sessionState: SessionState
@ObservedObject var transcriptionManager: TranscriptionManager
@ObservedObject var multipeerManager: MultipeerManager
@ObservedObject var fileManager: FileStorageManager
@State private var eventName: String = ""
@State private var isLoading: Bool = false
@State private var errorMessage: String?
@State private var showSubscription: Bool = false
@FocusState private var isTextFieldFocused: Bool
@EnvironmentObject var subscriptionManager: SubscriptionManager
var body: some View {
ZStack {
// Background
LinearGradient(
colors: [Color.blue.opacity(0.05), Color.purple.opacity(0.05)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
VStack(spacing: 10) {
// Header
VStack(spacing: 8) {
Image(systemName: "mic.circle.fill")
.font(.system(size: 60))
.foregroundColor(.blue)
Text("New Transcript")
.font(.largeTitle)
.fontWeight(.bold)
Text("Name your session for others to find")
.font(.subheadline)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
}
.padding(.top, 10)
// Event Name Input
VStack(alignment: .leading, spacing: 12) {
Text("Event Name")
.font(.headline)
.foregroundColor(.secondary)
TextField("e.g., Biology 101, Museum Tour", text: $eventName)
.font(.title3)
.padding()
.background(Color(.systemBackground))
.cornerRadius(12)
.overlay(
RoundedRectangle(cornerRadius: 12)
.stroke(Color.blue.opacity(0.3), lineWidth: 2)
)
.focused($isTextFieldFocused)
.submitLabel(.go)
.onSubmit {
if !eventName.isEmpty {
startSession()
}
}
}
.padding(.horizontal, 24)
// Error Message
if let error = errorMessage {
Text(error)
.font(.caption)
.foregroundColor(.red)
.padding(.horizontal, 24)
}
// Start Button
VStack(spacing: 16) {
Button(action: startSession) {
HStack {
if isLoading {
ProgressView()
.progressViewStyle(CircularProgressViewStyle(tint: .white))
} else {
Image(systemName: "play.fill")
Text("Start Transcribing")
}
}
.font(.headline)
.frame(maxWidth: .infinity)
.padding(.vertical, 16)
.background(
eventName.isEmpty ? Color.gray : Color.blue
)
.foregroundColor(.white)
.cornerRadius(12)
}
.disabled(eventName.isEmpty || isLoading)
Button(action: {
withAnimation {
sessionState.appPhase = .roleSelection
}
}) {
Text("Cancel")
.foregroundColor(.secondary)
}
}
.padding(.horizontal, 24)
// Subscription Link
Button(action: {
showSubscription = true
}) {
if subscriptionManager.isPremium {
VStack(spacing: 2) {
Text("You have unlimited transcribing time")
Text("Click here to manage your subscription.")
}
.font(.caption)
.foregroundColor(.green)
.multilineTextAlignment(.center)
} else {
Text("Free version: limited to 5 minutes. Tap to unlock.")
.font(.caption)
.foregroundColor(.red)
.multilineTextAlignment(.center)
}
}
.padding(.top, 10)
.padding(.horizontal, 32)
.padding(.bottom, 20)
.sheet(isPresented: $showSubscription) {
SubscriptionView()
}
Spacer()
}
}
.onAppear {
isTextFieldFocused = true
}
}
private func startSession() {
guard !eventName.isEmpty else { return }
isLoading = true
errorMessage = nil
Task {
// Request permissions
let authorized = await transcriptionManager.requestAuthorization()
guard authorized else {
errorMessage = "Microphone or speech recognition permission denied"
isLoading = false
return
}
// Create transcript file
do {
let fileURL = try fileManager.createTranscriptFile(eventName: eventName)
fileManager.saveSessionInfo(eventName: eventName, fileURL: fileURL)
} catch {
errorMessage = "Failed to create transcript file"
isLoading = false
return
}
// Start hosting (advertising)
multipeerManager.startHosting(eventName: eventName)
// Start transcription
do {
try transcriptionManager.startTranscribing(isPremium: subscriptionManager.isPremium)
} catch {
errorMessage = "Failed to start transcription: \(error.localizedDescription)"
isLoading = false
return
}
// Update session state
sessionState.startHostSession(eventName: eventName)
withAnimation {
sessionState.appPhase = .activeSession
}
isLoading = false
}
}
}
#Preview {
HostSetupView(
transcriptionManager: TranscriptionManager(),
multipeerManager: MultipeerManager(),
fileManager: FileStorageManager()
)
.environmentObject(SessionState())
}

View File

@@ -0,0 +1,238 @@
//
// RoleSelectionView.swift
// BeamScribe
//
// Initial screen for choosing Host (Speaker) or Guest (Listener) role.
//
import SwiftUI
struct RoleSelectionView: View {
@EnvironmentObject var sessionState: SessionState
var body: some View {
ZStack {
// Background gradient
LinearGradient(
colors: [Color.blue.opacity(0.1), Color.purple.opacity(0.1)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
VStack(spacing: 20) {
// Reduced top spacer to pull content up
Spacer().frame(height: 20)
// App Title
VStack(spacing: 12) {
Image(systemName: "waveform.circle.fill")
.font(.system(size: 80))
.foregroundStyle(
LinearGradient(
colors: [.blue, .purple],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
Text("BeamScribe")
.font(.largeTitle)
.fontWeight(.bold)
Text("Real-time speech transcription")
.font(.subheadline)
.foregroundColor(.secondary)
}
.padding(.top, 20)
Spacer()
// Role Selection Buttons
VStack(spacing: 20) {
Text("What's your role?")
.font(.headline)
.foregroundColor(.secondary)
// Host Button
Button(action: {
withAnimation {
sessionState.appPhase = .hostSetup
}
}) {
HStack(spacing: 16) {
Image(systemName: "antenna.radiowaves.left.and.right")
.font(.system(size: 40))
VStack(alignment: .leading, spacing: 4) {
Text("Host Session")
.font(.title2)
.fontWeight(.semibold)
Text("Host and broadcast transcript")
.font(.caption)
.foregroundColor(.white.opacity(0.8))
}
Spacer()
Image(systemName: "chevron.right")
.font(.title3)
.foregroundColor(.white.opacity(0.6))
}
.padding(.horizontal, 24)
.padding(.vertical, 20)
.frame(maxWidth: .infinity)
.background(
LinearGradient(
colors: [.blue, .blue.opacity(0.8)],
startPoint: .leading,
endPoint: .trailing
)
)
.foregroundColor(.white)
.cornerRadius(16)
.shadow(color: .blue.opacity(0.3), radius: 10, x: 0, y: 5)
}
// Guest Button
Button(action: {
withAnimation {
sessionState.appPhase = .guestBrowsing
}
}) {
HStack(spacing: 16) {
Image(systemName: "eye.fill")
.font(.system(size: 40))
VStack(alignment: .leading, spacing: 4) {
Text("Join Session")
.font(.title2)
.fontWeight(.semibold)
Text("Join and receive transcript")
.font(.caption)
.foregroundColor(.white.opacity(0.8))
}
Spacer()
Image(systemName: "chevron.right")
.font(.title3)
.foregroundColor(.white.opacity(0.6))
}
.padding(.horizontal, 24)
.padding(.vertical, 20)
.frame(maxWidth: .infinity)
.background(
LinearGradient(
colors: [.purple, .purple.opacity(0.8)],
startPoint: .leading,
endPoint: .trailing
)
)
.foregroundColor(.white)
.cornerRadius(16)
.shadow(color: .purple.opacity(0.3), radius: 10, x: 0, y: 5)
}
// Solo Transcribe Button
Button(action: {
withAnimation {
sessionState.appPhase = .soloSetup
}
}) {
HStack(spacing: 16) {
Image(systemName: "mic.fill")
.font(.system(size: 40))
VStack(alignment: .leading, spacing: 4) {
Text("Transcribe Only")
.font(.title2)
.fontWeight(.semibold)
Text("Local transcription, no broadcast")
.font(.caption)
.foregroundColor(.white.opacity(0.8))
}
Spacer()
Image(systemName: "chevron.right")
.font(.title3)
.foregroundColor(.white.opacity(0.6))
}
.padding(.horizontal, 24)
.padding(.vertical, 20)
.frame(maxWidth: .infinity)
.background(
LinearGradient(
colors: [.green, .green.opacity(0.8)],
startPoint: .leading,
endPoint: .trailing
)
)
.foregroundColor(.white)
.cornerRadius(16)
.shadow(color: .green.opacity(0.3), radius: 10, x: 0, y: 5)
}
// History Button
Button(action: {
withAnimation {
sessionState.appPhase = .transcriptionHistory
}
}) {
HStack(spacing: 16) {
Image(systemName: "clock.arrow.circlepath")
.font(.system(size: 40))
VStack(alignment: .leading, spacing: 4) {
Text("History")
.font(.title2)
.fontWeight(.semibold)
Text("Read past sessions")
.font(.caption)
.foregroundColor(.white.opacity(0.8))
}
Spacer()
Image(systemName: "chevron.right")
.font(.title3)
.foregroundColor(.white.opacity(0.6))
}
.padding(.horizontal, 24)
.padding(.vertical, 20)
.frame(maxWidth: .infinity)
.background(
LinearGradient(
colors: [.teal, .teal.opacity(0.8)],
startPoint: .leading,
endPoint: .trailing
)
)
.foregroundColor(.white)
.cornerRadius(16)
.shadow(color: .teal.opacity(0.3), radius: 10, x: 0, y: 5)
}
}
.padding(.horizontal, 24)
Spacer()
// Footer
Text("up to 7 guests can join a broadcast.")
.font(.footnote)
.foregroundColor(.secondary)
.padding(.bottom, 20)
}
}
}
}
#Preview {
RoleSelectionView()
.environmentObject(SessionState())
}

View File

@@ -0,0 +1,239 @@
//
// SettingsView.swift
// BeamScribe
//
// Settings sheet with Keep Awake toggle, export, and end session.
//
import SwiftUI
import UIKit
struct SettingsView: View {
@EnvironmentObject var sessionState: SessionState
@ObservedObject var fileManager: FileStorageManager
@ObservedObject var multipeerManager: MultipeerManager
@ObservedObject var transcriptionManager: TranscriptionManager
@EnvironmentObject var settings: SettingsModel
@Environment(\.dismiss) private var dismiss
@State private var showingExportSheet = false
@State private var exportURL: URL?
@State private var showingEndConfirmation = false
@State private var exportError: String?
var body: some View {
NavigationView {
List {
// Session Info
Section {
HStack {
Label("Event", systemImage: "calendar")
Spacer()
Text(sessionState.eventName)
.foregroundColor(.secondary)
}
HStack {
Label("Role", systemImage: "person.fill")
Spacer()
if sessionState.isSoloMode {
Text("Solo (Local Only)")
.foregroundColor(.secondary)
} else {
Text(sessionState.userRole == .host ? "Host (Broadcasting)" : "Guest (Listening)")
.foregroundColor(.secondary)
}
}
if sessionState.userRole == .host && !sessionState.isSoloMode {
HStack {
Label("Listeners", systemImage: "person.2.fill")
Spacer()
Text("\(multipeerManager.connectedPeers.count)")
.foregroundColor(.secondary)
}
}
} header: {
Text("Session")
}
// Settings
if sessionState.userRole == .host {
Section {
Toggle(isOn: $sessionState.keepAwakeEnabled) {
Label("Keep Screen Awake", systemImage: "sun.max.fill")
}
.onChange(of: sessionState.keepAwakeEnabled) { oldValue, newValue in
UIApplication.shared.isIdleTimerDisabled = newValue
}
} header: {
Text("Display")
} footer: {
Text("Prevents the screen from dimming while transcribing.")
}
}
// Appearance
Section {
Picker("Theme", selection: $settings.selectedTheme) {
ForEach(AppTheme.allCases) { theme in
Text(theme.rawValue).tag(theme)
}
}
Picker("Font", selection: $settings.selectedFont) {
ForEach(AppFont.allCases) { font in
Text(font.rawValue).tag(font)
}
}
VStack(alignment: .leading) {
HStack {
Text("Font Size")
Spacer()
Text("\(Int(settings.fontSize)) pts")
.foregroundColor(.secondary)
}
Slider(value: $settings.fontSize, in: 12...48, step: 1)
Text("Preview Text")
.font(settings.font())
.frame(maxWidth: .infinity, alignment: .center)
.padding(.vertical, 4)
.id("fontPreview") // Force redraw if needed
}
} header: {
Text("Appearance")
}
// Export
Section {
Button(action: exportPDF) {
Label("Export as PDF", systemImage: "arrow.up.doc.fill")
}
if let error = exportError {
Text(error)
.font(.caption)
.foregroundColor(.red)
}
} header: {
Text("Export")
} footer: {
Text("Creates a PDF with the transcript so far.")
}
// End Session
Section {
Button(action: {
showingEndConfirmation = true
}) {
Label("End Session", systemImage: "stop.circle.fill")
.foregroundColor(.red)
}
} footer: {
Text("Returns to the home screen. Your transcript is saved locally.")
}
}
.navigationTitle("Settings")
.navigationBarTitleDisplayMode(.inline)
.toolbar {
ToolbarItem(placement: .topBarTrailing) {
Button("Done") {
dismiss()
}
}
}
}
.sheet(isPresented: $showingExportSheet) {
if let url = exportURL {
ShareSheet(activityItems: [url])
}
}
.alert("End Session?", isPresented: $showingEndConfirmation) {
Button("Cancel", role: .cancel) { }
Button("End Session", role: .destructive) {
endSession()
}
} message: {
Text("Your transcript will be saved locally. You can start a new session anytime.")
}
.onAppear {
if sessionState.keepAwakeEnabled {
UIApplication.shared.isIdleTimerDisabled = true
}
}
}
private func exportPDF() {
do {
let url = try fileManager.exportToPDF(
eventName: sessionState.eventName,
content: sessionState.fullTranscriptText
)
exportURL = url
showingExportSheet = true
exportError = nil
} catch {
exportError = "Failed to export: \(error.localizedDescription)"
}
}
private func endSession() {
// Stop transcription (Host)
if sessionState.userRole == .host {
transcriptionManager.stopTranscribing()
}
// Save pending partial text (Guest)
if sessionState.userRole == .guest {
let pendingText = sessionState.transcriptSegments
.filter { !$0.isFinal }
.map { $0.text }
.joined(separator: " ")
if !pendingText.isEmpty {
fileManager.appendText(pendingText)
}
}
// Disconnect networking
multipeerManager.disconnect()
// Clear session info
fileManager.clearSessionInfo()
// Reset state
sessionState.reset()
// Re-enable idle timer
UIApplication.shared.isIdleTimerDisabled = false
dismiss()
}
}
// MARK: - Share Sheet
struct ShareSheet: UIViewControllerRepresentable {
let activityItems: [Any]
func makeUIViewController(context: Context) -> UIActivityViewController {
UIActivityViewController(activityItems: activityItems, applicationActivities: nil)
}
func updateUIViewController(_ uiViewController: UIActivityViewController, context: Context) {}
}
#Preview {
SettingsView(
fileManager: FileStorageManager(),
multipeerManager: MultipeerManager(),
transcriptionManager: TranscriptionManager()
)
.environmentObject(SessionState())
}

View File

@@ -0,0 +1,209 @@
//
// SoloSetupView.swift
// BeamScribe
//
// Setup view for solo transcription (no broadcasting).
//
import SwiftUI
struct SoloSetupView: View {
@EnvironmentObject var sessionState: SessionState
@ObservedObject var transcriptionManager: TranscriptionManager
@ObservedObject var fileManager: FileStorageManager
@EnvironmentObject var subscriptionManager: SubscriptionManager
@State private var eventName: String = ""
@State private var isLoading: Bool = false
@State private var errorMessage: String?
@State private var showSubscription: Bool = false
@FocusState private var isTextFieldFocused: Bool
var body: some View {
ZStack {
// Background gradient
LinearGradient(
colors: [Color.green.opacity(0.1), Color.green.opacity(0.05)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
VStack(spacing: 10) {
// Header
VStack(spacing: 8) {
Image(systemName: "text.badge.plus")
.font(.system(size: 60))
.foregroundColor(.green)
Text("Transcribe Only")
.font(.largeTitle)
.fontWeight(.bold)
Text("Transcribe for yourself without broadcasting")
.font(.subheadline)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
}
.padding(.top, 10)
// Event Name Input
VStack(alignment: .leading, spacing: 12) {
Text("Session Name")
.font(.headline)
.foregroundColor(.secondary)
TextField("e.g., Meeting Notes, Lecture", text: $eventName)
.font(.title3)
.padding()
.background(Color(.systemBackground))
.cornerRadius(12)
.overlay(
RoundedRectangle(cornerRadius: 12)
.stroke(Color.green.opacity(0.3), lineWidth: 1)
)
.focused($isTextFieldFocused)
.onSubmit {
if !eventName.isEmpty {
startSession()
}
}
}
.padding(.horizontal, 24)
// Error Message
if let error = errorMessage {
Text(error)
.font(.caption)
.foregroundColor(.red)
.padding(.horizontal, 24)
}
// Start Button
VStack(spacing: 16) {
Button(action: startSession) {
HStack {
if isLoading {
ProgressView()
.progressViewStyle(CircularProgressViewStyle(tint: .white))
} else {
Image(systemName: "play.fill")
Text("Start Transcribing")
}
}
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(
LinearGradient(
colors: [.green, .green.opacity(0.8)],
startPoint: .leading,
endPoint: .trailing
)
)
.foregroundColor(.white)
.cornerRadius(12)
}
.disabled(eventName.isEmpty || isLoading)
Button(action: {
withAnimation {
sessionState.appPhase = .roleSelection
}
}) {
Text("Cancel")
.foregroundColor(.secondary)
}
// Subscription Status / Upsell
Button(action: {
showSubscription = true
}) {
if subscriptionManager.isPremium {
VStack(spacing: 2) {
Text("You have unlimited transcribing time")
Text("Click here to manage your subscription.")
}
.font(.caption)
.foregroundColor(.green)
.multilineTextAlignment(.center)
} else {
Text("Free version: limited to 5 minutes. Tap to unlock.")
.font(.caption)
.foregroundColor(.red)
}
}
.padding(.top, 8)
}
.padding(.horizontal, 24)
.padding(.bottom, 40)
Spacer()
}
}
.sheet(isPresented: $showSubscription) {
SubscriptionView()
}
.onAppear {
isTextFieldFocused = true
}
}
private func startSession() {
guard !eventName.isEmpty else { return }
isLoading = true
errorMessage = nil
Task {
// Request permissions
let authorized = await transcriptionManager.requestAuthorization()
guard authorized else {
errorMessage = "Microphone or speech recognition permission denied"
isLoading = false
return
}
// Create transcript file
do {
let fileURL = try fileManager.createTranscriptFile(eventName: eventName)
fileManager.saveSessionInfo(eventName: eventName, fileURL: fileURL)
} catch {
errorMessage = "Failed to create transcript file: \(error.localizedDescription)"
isLoading = false
return
}
// Start transcription (no multipeer hosting!)
do {
try transcriptionManager.startTranscribing(isPremium: subscriptionManager.isPremium)
} catch {
errorMessage = "Failed to start transcription: \(error.localizedDescription)"
isLoading = false
return
}
// Update session state - use solo session
sessionState.startSoloSession(eventName: eventName)
withAnimation {
sessionState.appPhase = .activeSession
}
isLoading = false
}
}
}
#Preview {
SoloSetupView(
transcriptionManager: TranscriptionManager(),
fileManager: FileStorageManager()
)
.environmentObject(SessionState())
}

View File

@@ -0,0 +1,199 @@
//
// SubscriptionView.swift
// BeamScribe
//
// Screen for displaying subscription options and managing status.
//
import SwiftUI
import StoreKit
struct SubscriptionView: View {
@EnvironmentObject var subscriptionManager: SubscriptionManager
@Environment(\.dismiss) var dismiss
var body: some View {
ZStack {
// Background
LinearGradient(
colors: [Color.blue.opacity(0.05), Color.purple.opacity(0.05)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
VStack(spacing: 30) {
// Header
VStack(spacing: 16) {
Image(systemName: "crown.fill")
.font(.system(size: 60))
.foregroundColor(.yellow)
.shadow(color: .orange.opacity(0.5), radius: 10, x: 0, y: 5)
Text("Unlock Unlimited Time")
.font(.largeTitle)
.fontWeight(.bold)
.multilineTextAlignment(.center)
Text("Remove the 5-minute transcription limit and record for as long as you need.")
.font(.body)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
.padding(.horizontal, 32)
}
.padding(.top, 40)
Spacer()
// Subscription Status / Purchase Options
VStack(spacing: 20) {
if subscriptionManager.isPremium {
// User is Premium
VStack(spacing: 16) {
Image(systemName: "checkmark.seal.fill")
.font(.system(size: 50))
.foregroundColor(.green)
Text("You Have Unlimited Access!")
.font(.title2)
.fontWeight(.semibold)
.foregroundColor(.primary)
Text("Thank you for supporting BeamScribe.")
.foregroundColor(.secondary)
Link("Manage Subscription", destination: URL(string: "https://apps.apple.com/account/subscriptions")!)
.font(.headline)
.foregroundColor(.blue)
.padding(.top, 8)
}
.padding()
.background(Color(.systemBackground))
.cornerRadius(20)
.shadow(color: .black.opacity(0.05), radius: 10, x: 0, y: 5)
} else if let product = subscriptionManager.products.first {
// Product is available
VStack(spacing: 24) {
VStack(spacing: 8) {
Text(product.displayName)
.font(.headline)
Text(product.displayPrice + " / year")
.font(.title)
.fontWeight(.bold)
.foregroundColor(.blue)
Text(product.description)
.font(.caption)
.foregroundColor(.secondary)
}
Button(action: {
Task {
try? await subscriptionManager.purchase(product)
// Explicitly refresh after purchase attempt returns
await subscriptionManager.updatePurchasedProducts()
}
}) {
Text("Subscribe Now")
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.blue)
.foregroundColor(.white)
.cornerRadius(12)
}
}
.padding(24)
.background(Color(.systemBackground))
.cornerRadius(20)
.shadow(color: .black.opacity(0.05), radius: 10, x: 0, y: 5)
} else if subscriptionManager.isLoading {
// Loading products
ProgressView("Loading subscription options...")
.scaleEffect(1.2)
} else {
// Error or Empty State
VStack(spacing: 16) {
Image(systemName: "exclamationmark.triangle")
.font(.largeTitle)
.foregroundColor(.orange)
Text(subscriptionManager.fetchError ?? "Unable to load subscription options.")
.font(.subheadline)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
Button(action: {
Task {
await subscriptionManager.fetchProducts()
}
}) {
HStack {
Image(systemName: "arrow.clockwise")
Text("Retry")
}
.font(.headline)
.foregroundColor(.blue)
.padding(.vertical, 8)
.padding(.horizontal, 16)
.background(Color.blue.opacity(0.1))
.cornerRadius(8)
}
}
.padding()
}
}
.padding(.horizontal, 24)
Spacer()
// Footer
VStack(spacing: 16) {
if !subscriptionManager.isPremium {
Button("Restore Purchases") {
Task {
await subscriptionManager.restorePurchases()
}
}
.font(.subheadline)
.foregroundColor(.blue)
}
HStack(spacing: 20) {
Link("Privacy Policy", destination: URL(string: "https://docs.google.com/document/d/1daLwvHBXhh5fBZYiyL2-NEiGWQ-Snno-20TChc8LSQU/edit?usp=sharing")!)
Link("Terms of Service", destination: URL(string: "https://www.apple.com/legal/internet-services/itunes/dev/stdeula/")!)
}
.font(.caption)
.foregroundColor(.secondary)
}
.padding(.bottom, 20)
}
// Close Button
VStack {
HStack {
Spacer()
Button(action: {
dismiss()
}) {
Image(systemName: "xmark.circle.fill")
.font(.title)
.foregroundColor(.gray.opacity(0.5))
.padding()
}
}
Spacer()
}
}
}
}
#Preview {
SubscriptionView()
.environmentObject(SubscriptionManager())
}

View File

@@ -0,0 +1,420 @@
//
// TranscriptView.swift
// BeamScribe
//
// Main transcript display with smart auto-scrolling.
//
import SwiftUI
struct TranscriptView: View {
@EnvironmentObject var sessionState: SessionState
@ObservedObject var transcriptionManager: TranscriptionManager
@ObservedObject var multipeerManager: MultipeerManager
@ObservedObject var fileManager: FileStorageManager
@ObservedObject var audioManager: AudioSessionManager
@EnvironmentObject var settings: SettingsModel
@State private var isAutoScrollEnabled: Bool = true
@State private var scrollProxy: ScrollViewProxy?
@State private var lastSegmentID: UUID?
@State private var showingEndConfirmation: Bool = false
@State private var showSubscriptionSheet: Bool = false
var body: some View {
ZStack(alignment: .bottom) {
VStack(spacing: 0) {
// Top Bar
topBar
// Host Lost Banner
if sessionState.showHostLostBanner {
hostLostBanner
}
// Bluetooth Mic Prompt
if audioManager.showBluetoothPrompt && sessionState.userRole == .host {
bluetoothPrompt
}
// Transcript Content
ScrollViewReader { proxy in
ScrollView {
LazyVStack(alignment: .leading, spacing: 8) {
ForEach(sessionState.transcriptSegments) { segment in
transcriptText(for: segment)
.id(segment.id)
}
// Partial result (Host only)
if sessionState.userRole == .host && !transcriptionManager.partialResult.isEmpty {
// Add line breaks after sentences for readability
let formattedText = transcriptionManager.partialResult
.replacingOccurrences(of: ". ", with: ".\n\n")
.replacingOccurrences(of: "? ", with: "?\n\n")
.replacingOccurrences(of: "! ", with: "!\n\n")
Text(formattedText)
.foregroundColor(settings.textColor.opacity(0.7))
.font(settings.font())
.id("partial")
}
// Scroll anchor
Color.clear
.frame(height: 1)
.id("bottom")
}
.padding(.horizontal, 20)
.padding(.vertical, 16)
}
.onAppear {
scrollProxy = proxy
}
.onChange(of: sessionState.transcriptSegments.last?.id) { oldValue, newValue in
if isAutoScrollEnabled {
withAnimation(.easeOut(duration: 0.2)) {
proxy.scrollTo("bottom", anchor: .bottom)
}
}
}
.onChange(of: transcriptionManager.partialResult) { oldValue, newValue in
if isAutoScrollEnabled && !newValue.isEmpty {
withAnimation(.easeOut(duration: 0.2)) {
proxy.scrollTo("partial", anchor: .bottom)
}
}
}
.simultaneousGesture(
DragGesture().onChanged { _ in
// User started scrolling, disable auto-scroll
isAutoScrollEnabled = false
}
)
}
// Session Timer Footer (Moved into VStack to reserve space)
sessionTimerFooter
.padding(.top, 16) // Add separation from transcript
.padding(.bottom, 8)
.background(settings.backgroundColor.opacity(0.9)) // Matches theme background
}
// Jump to Live Button
if !isAutoScrollEnabled {
jumpToLiveButton
.padding(.bottom, 80) // Push up above timer
}
}
.background(settings.backgroundColor.ignoresSafeArea())
.sheet(isPresented: $sessionState.showSettings) {
SettingsView(
fileManager: fileManager,
multipeerManager: multipeerManager,
transcriptionManager: transcriptionManager
)
}
.alert("End Session?", isPresented: $showingEndConfirmation) {
Button("Cancel", role: .cancel) { }
Button("End Session", role: .destructive) {
endSession()
}
} message: {
Text("Your transcript will be saved locally. You can start a new session anytime.")
}
// Error Alert (e.g. Time Limit)
.alert("Transcription Stopped", isPresented: Binding<Bool>(
get: { transcriptionManager.errorMessage != nil },
set: { if !$0 { transcriptionManager.errorMessage = nil } }
)) {
if transcriptionManager.errorMessage?.contains("Subscribe") == true {
Button("Unlock Unlimited Time") {
showSubscriptionSheet = true
transcriptionManager.errorMessage = nil
}
Button("Cancel", role: .cancel) {
transcriptionManager.errorMessage = nil
}
} else {
Button("OK", role: .cancel) {
transcriptionManager.errorMessage = nil
}
}
} message: {
Text(transcriptionManager.errorMessage ?? "An unknown error occurred.")
}
.sheet(isPresented: $showSubscriptionSheet) {
SubscriptionView()
}
}
// MARK: - Components
private var topBar: some View {
HStack {
// Event Name
VStack(alignment: .leading, spacing: 2) {
Text(sessionState.eventName)
.font(.headline)
.fontWeight(.semibold)
// Connection Status
if sessionState.userRole == .host {
if sessionState.isSoloMode {
Text("Local only")
.font(.caption)
.foregroundColor(.secondary)
} else {
Text("\(multipeerManager.connectedPeers.count) listener\(multipeerManager.connectedPeers.count == 1 ? "" : "s") connected")
.font(.caption)
.foregroundColor(.secondary)
}
} else {
HStack(spacing: 4) {
Circle()
.fill(multipeerManager.isConnectedToHost ? Color.green : Color.red)
.frame(width: 8, height: 8)
Text(multipeerManager.isConnectedToHost ? "Connected" : "Disconnected")
.font(.caption)
.foregroundColor(.secondary)
}
}
}
Spacer()
// Recording Indicator (Host only)
if sessionState.userRole == .host && transcriptionManager.isTranscribing {
HStack(spacing: 6) {
Circle()
.fill(Color.red)
.frame(width: 10, height: 10)
.opacity(pulsingOpacity)
Text("Live")
.font(.caption)
.foregroundColor(.red)
.fontWeight(.medium)
}
}
// End Session Button (Host only)
if sessionState.userRole == .host {
Button(action: {
showingEndConfirmation = true
}) {
Text("End")
.font(.custom("System", size: 14)) // Slightly smaller to fit
.fontWeight(.bold)
.foregroundColor(.white)
.padding(.horizontal, 12)
.padding(.vertical, 6)
.background(Color.red)
.cornerRadius(8)
}
}
// Settings Button
Button(action: {
sessionState.showSettings = true
}) {
Image(systemName: "gearshape.fill")
.font(.title3)
.foregroundColor(.secondary)
}
}
.padding(.horizontal, 20)
.padding(.vertical, 12)
.background(Color(.systemBackground))
.shadow(color: .black.opacity(0.05), radius: 2, x: 0, y: 1)
}
@State private var pulsingOpacity: Double = 1.0
private var hostLostBanner: some View {
HStack(spacing: 12) {
Image(systemName: "exclamationmark.triangle.fill")
.foregroundColor(.white)
Text("Host signal lost. Transcript saved locally.")
.font(.callout)
.foregroundColor(.white)
Spacer()
}
.padding(.horizontal, 20)
.padding(.vertical, 12)
.background(Color.red)
.onAppear {
startPulsingAnimation()
}
}
private var bluetoothPrompt: some View {
HStack(spacing: 12) {
Image(systemName: "headphones")
.foregroundColor(.blue)
Text("Bluetooth mic detected")
.font(.callout)
Spacer()
Button("Switch") {
audioManager.switchToBluetoothMic()
}
.font(.caption)
.fontWeight(.semibold)
.padding(.horizontal, 12)
.padding(.vertical, 6)
.background(Color.blue)
.foregroundColor(.white)
.cornerRadius(8)
Button(action: {
audioManager.dismissBluetoothPrompt()
}) {
Image(systemName: "xmark")
.foregroundColor(.secondary)
}
}
.padding(.horizontal, 20)
.padding(.vertical, 12)
.background(Color.blue.opacity(0.1))
}
private var jumpToLiveButton: some View {
Button(action: {
isAutoScrollEnabled = true
withAnimation {
scrollProxy?.scrollTo("bottom", anchor: .bottom)
}
}) {
HStack(spacing: 8) {
Image(systemName: "arrow.down.circle.fill")
Text("Jump to Live")
}
.font(.callout)
.fontWeight(.medium)
.padding(.horizontal, 20)
.padding(.vertical, 12)
.background(.ultraThinMaterial)
.cornerRadius(25)
.shadow(color: .black.opacity(0.1), radius: 5, x: 0, y: 2)
}
// .padding(.bottom, 20) handled in call site
}
@ViewBuilder
private func transcriptText(for segment: TranscriptSegment) -> some View {
// Check if this segment starts a new paragraph (from Silent Reset Detection)
let isNewParagraph = segment.text.hasPrefix("\n")
let baseText = isNewParagraph ? String(segment.text.dropFirst()) : segment.text
// Add line breaks after sentences for readability (applies to both host and guest)
let displayText = baseText
.replacingOccurrences(of: ". ", with: ".\n\n")
.replacingOccurrences(of: "? ", with: "?\n\n")
.replacingOccurrences(of: "! ", with: "!\n\n")
Text(displayText)
.font(settings.font())
.foregroundColor(segment.isFinal ? settings.textColor : settings.textColor.opacity(0.6))
.padding(.top, isNewParagraph ? 16 : 0) // Extra spacing for paragraph breaks
}
private func startPulsingAnimation() {
withAnimation(.easeInOut(duration: 0.8).repeatForever(autoreverses: true)) {
pulsingOpacity = 0.4
}
}
private func endSession() {
// Stop transcription (Host)
if sessionState.userRole == .host {
transcriptionManager.stopTranscribing()
}
// Save pending partial text (Guest)
if sessionState.userRole == .guest {
let pendingText = sessionState.transcriptSegments
.filter { !$0.isFinal }
.map { $0.text }
.joined(separator: " ")
if !pendingText.isEmpty {
fileManager.appendText(pendingText)
}
}
// Disconnect networking
multipeerManager.disconnect()
// Clear session info
fileManager.clearSessionInfo()
// Reset state
sessionState.reset()
// Re-enable idle timer
UIApplication.shared.isIdleTimerDisabled = false
}
private var sessionTimerFooter: some View {
HStack {
Image(systemName: "timer")
.foregroundColor(settings.textColor)
let isTimerRunning = transcriptionManager.isTranscribing ||
(sessionState.userRole == .guest && multipeerManager.isConnectedToHost)
if isTimerRunning {
TimelineView(.periodic(from: .now, by: 1.0)) { context in
Text(timeString(from: sessionState.startTime ?? Date()))
.font(.caption)
.monospacedDigit()
.foregroundColor(settings.textColor)
}
} else {
// Stopped: Show final duration
Text(timeString(from: sessionState.startTime ?? Date(), to: transcriptionManager.sessionEndTime ?? Date()))
.font(.caption)
.monospacedDigit()
.foregroundColor(settings.textColor)
}
}
.padding(.vertical, 8)
.padding(.horizontal, 16)
.background(settings.textColor.opacity(0.1)) // Subtle tint based on theme text color
.cornerRadius(20)
}
private func timeString(from startDate: Date, to endDate: Date = Date()) -> String {
let elapsed = endDate.timeIntervalSince(startDate)
let totalSeconds = Int(elapsed)
let hours = totalSeconds / 3600
let minutes = (totalSeconds % 3600) / 60
let seconds = totalSeconds % 60
if hours > 0 {
return String(format: "%02d:%02d:%02d", hours, minutes, seconds)
} else {
return String(format: "%02d:%02d", minutes, seconds)
}
}
}
#Preview {
TranscriptView(
transcriptionManager: TranscriptionManager(),
multipeerManager: MultipeerManager(),
fileManager: FileStorageManager(),
audioManager: AudioSessionManager()
)
.environmentObject(SessionState())
}

View File

@@ -0,0 +1,321 @@
//
// TranscriptionHistoryView.swift
// BeamScribe
//
// View for browsing and viewing past transcription files.
//
import SwiftUI
// Wrapper to make URL Identifiable for sheet presentation
struct IdentifiableURL: Identifiable {
let id = UUID()
let url: URL
}
struct TranscriptionHistoryView: View {
@EnvironmentObject var sessionState: SessionState
@ObservedObject var fileManager: FileStorageManager
@State private var transcriptions: [URL] = []
@State private var selectedTranscription: IdentifiableURL? // Changed to Identifiable wrapper
@State private var showingShareSheet = false
@State private var pdfURL: URL?
@State private var showingDeleteAlert = false
@State private var transcriptionToDelete: URL?
@State private var showingClearAllAlert = false
var body: some View {
NavigationView {
ZStack {
// Background gradient
LinearGradient(
colors: [Color.blue.opacity(0.1), Color.purple.opacity(0.1)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
if transcriptions.isEmpty {
emptyStateView
} else {
transcriptionListView
}
}
.navigationTitle("Past Transcriptions")
.navigationBarTitleDisplayMode(.large)
.toolbar {
ToolbarItem(placement: .navigationBarLeading) {
Button(action: {
withAnimation {
sessionState.appPhase = .roleSelection
}
}) {
HStack(spacing: 4) {
Image(systemName: "chevron.left")
Text("Back")
}
}
}
if !transcriptions.isEmpty {
ToolbarItem(placement: .navigationBarTrailing) {
Button(role: .destructive) {
showingClearAllAlert = true
} label: {
Text("Clear All")
.foregroundColor(.red)
}
}
}
}
.onAppear {
loadTranscriptions()
}
.sheet(item: $selectedTranscription) { identifiableURL in
TranscriptionDetailView(
url: identifiableURL.url,
fileManager: fileManager,
onDismiss: { selectedTranscription = nil }
)
}
.alert("Delete Transcription?", isPresented: $showingDeleteAlert) {
Button("Cancel", role: .cancel) { }
Button("Delete", role: .destructive) {
if let url = transcriptionToDelete {
deleteTranscription(at: url)
}
}
} message: {
Text("This action cannot be undone.")
}
.alert("Clear All Transcriptions?", isPresented: $showingClearAllAlert) {
Button("Cancel", role: .cancel) { }
Button("Clear All", role: .destructive) {
clearAllTranscriptions()
}
} message: {
Text("This will permanently delete all \(transcriptions.count) transcription(s).")
}
}
}
private var emptyStateView: some View {
VStack(spacing: 16) {
Image(systemName: "doc.text.magnifyingglass")
.font(.system(size: 60))
.foregroundColor(.secondary)
Text("No Transcriptions Yet")
.font(.title2)
.fontWeight(.semibold)
Text("Your saved transcriptions will appear here")
.font(.subheadline)
.foregroundColor(.secondary)
.multilineTextAlignment(.center)
}
.padding()
}
private var transcriptionListView: some View {
List {
ForEach(transcriptions, id: \.absoluteString) { url in
TranscriptionRow(url: url)
.contentShape(Rectangle())
.onTapGesture {
selectTranscription(url)
}
.swipeActions(edge: .trailing, allowsFullSwipe: true) {
Button(role: .destructive) {
transcriptionToDelete = url
showingDeleteAlert = true
} label: {
Label("Delete", systemImage: "trash")
}
}
}
}
.listStyle(.insetGrouped)
}
private func loadTranscriptions() {
transcriptions = fileManager.listSavedTranscriptions()
}
private func selectTranscription(_ url: URL) {
selectedTranscription = IdentifiableURL(url: url)
}
private func deleteTranscription(at url: URL) {
do {
try fileManager.deleteTranscription(at: url)
loadTranscriptions()
} catch {
print("Failed to delete: \(error)")
}
}
private func clearAllTranscriptions() {
for url in transcriptions {
do {
try fileManager.deleteTranscription(at: url)
} catch {
print("Failed to delete \(url): \(error)")
}
}
loadTranscriptions()
}
}
// MARK: - Transcription Row
struct TranscriptionRow: View {
let url: URL
private var fileName: String {
url.deletingPathExtension().lastPathComponent
}
private var modificationDate: String {
guard let values = try? url.resourceValues(forKeys: [.contentModificationDateKey]),
let date = values.contentModificationDate else {
return ""
}
let formatter = DateFormatter()
formatter.dateStyle = .medium
formatter.timeStyle = .short
return formatter.string(from: date)
}
private var fileSize: String {
guard let values = try? url.resourceValues(forKeys: [.fileSizeKey]),
let size = values.fileSize else {
return ""
}
let formatter = ByteCountFormatter()
formatter.countStyle = .file
return formatter.string(fromByteCount: Int64(size))
}
var body: some View {
HStack(spacing: 12) {
Image(systemName: "doc.text.fill")
.font(.title2)
.foregroundStyle(
LinearGradient(
colors: [.blue, .purple],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
VStack(alignment: .leading, spacing: 4) {
Text(fileName)
.font(.headline)
.lineLimit(2)
HStack(spacing: 8) {
Text(modificationDate)
Text("")
Text(fileSize)
}
.font(.caption)
.foregroundColor(.secondary)
}
Spacer()
Image(systemName: "chevron.right")
.font(.caption)
.foregroundColor(.secondary)
}
.padding(.vertical, 4)
}
}
// MARK: - Transcription Detail View
struct TranscriptionDetailView: View {
let url: URL // Now non-optional since passed directly from sheet(item:)
@ObservedObject var fileManager: FileStorageManager
let onDismiss: () -> Void
@State private var content: String = ""
@State private var pdfToShare: IdentifiableURL? // Use wrapper for item-based sheet
@State private var exportError: String?
@State private var showingExportError = false
private var fileName: String {
url.deletingPathExtension().lastPathComponent
}
var body: some View {
NavigationView {
ZStack {
LinearGradient(
colors: [Color.blue.opacity(0.05), Color.purple.opacity(0.05)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
.ignoresSafeArea()
ScrollView {
Text(content)
.font(.body)
.foregroundColor(.primary)
.padding()
.frame(maxWidth: .infinity, alignment: .leading)
}
}
.onAppear {
loadContent()
}
.navigationTitle(fileName)
.navigationBarTitleDisplayMode(.inline)
.toolbar {
ToolbarItem(placement: .navigationBarLeading) {
Button("Done") {
onDismiss()
}
}
ToolbarItem(placement: .navigationBarTrailing) {
Button(action: exportToPDF) {
Image(systemName: "square.and.arrow.up")
}
}
}
.sheet(item: $pdfToShare) { identifiablePDF in
ShareSheet(activityItems: [identifiablePDF.url])
}
.alert("Export Failed", isPresented: $showingExportError) {
Button("OK", role: .cancel) { }
} message: {
Text(exportError ?? "Unknown error")
}
}
}
private func loadContent() {
content = fileManager.readTranscription(at: url) ?? "Unable to read file"
}
private func exportToPDF() {
do {
let exportedURL = try fileManager.exportToPDF(eventName: fileName, content: content)
pdfToShare = IdentifiableURL(url: exportedURL)
} catch {
exportError = error.localizedDescription
showingExportError = true
}
}
}
#Preview {
TranscriptionHistoryView(fileManager: FileStorageManager())
.environmentObject(SessionState())
}

BIN
BeamScribe/appstore.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

2713
BeamScribe/at-table-app.txt Normal file

File diff suppressed because it is too large Load Diff

BIN
BeamScribe/playstore.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 420 KiB

52
README.md Normal file
View File

@@ -0,0 +1,52 @@
# BeamScribe 🎤📡
**BeamScribe** is an accessibility-focused iOS application designed to facilitate immediate, real-time communication between hearing speakers and Deaf or Hard of Hearing individuals.
It functions as a **live transcription broadcast system**, allowing a host to transcribe speech via their iPhone and broadcast the text in real-time to up to **7 connected guest devices** nearby.
## 🌟 Key Features
### 📡 Live Broadcasting (Multipeer Connectivity)
* **Host Mode:** A hearing person hosts a session. Their speech is converted to text instantly.
* **Guest Mode:** Up to **7 Deaf or Hard of Hearing participants** can join the host's session wirelessly.
* **Real-Time Sync:** Guests see the transcription appear on their iPhones in real-time as the host speaks.
* **No Internet Required for Connection:** Uses Apple's Multipeer Connectivity framework (Wi-Fi/Bluetooth peer-to-peer) to link devices, meaning guests don't need a data plan or active Wi-Fi connection to the internet to receive text (though the Host needs a connection for high-accuracy online transcription).
* **Late Joiner Support:** Guests who join a session late automatically receive the full transcript history so they don't miss context.
### 📝 Transcription Engine
* **High-Accuracy Speech-to-Text:** Powered by Apple's `SFSpeechRecognizer`.
* **Offline Support:** capable of falling back to on-device speech recognition if the internet is unavailable.
* **Smart Formatting:** Auto-punctuation and intelligent paragraph handling.
* **Silent Reset Detection:** Automatically handles internal speech engine timeouts to ensure continuous, uninterrupted recording.
### 🎧 Audio & Hardware Support
* **Bluetooth Microphone Support:** Detects external Bluetooth microphones (like lapel mics or headsets) and prompts the user to switch inputs for better audio clarity in noisy environments.
* **Background Audio:** Continues transcribing even if the screen is locked or the app is backgrounded.
* **Battery Monitoring:** Alerts guests if the Host's battery is critically low.
### 🎨 Accessibility & Customization
* **High Contrast Themes:** Includes Light, Dark, and High-Contrast (Lightbulb) themes.
* **Typography Control:** Users can adjust font size (up to 48pt) and choose specific fonts (Rounded, Serif, Monospaced, Marker Felt) for better readability.
* **Keep Awake:** Option to prevent the screen from dimming during long sessions.
### 📂 File Management & Export
* **Solo Mode:** Use the app for personal note-taking without broadcasting.
* **Auto-Save:** Transcripts are automatically saved to the device.
* **History Browser:** View, manage, and delete past sessions.
* **PDF Export:** Generate clean, formatted PDFs of transcripts to share via email or other apps.
## 📱 How to Use
### For the Host (Speaker)
1. Open BeamScribe and select **"Host Session"**.
2. Enter a name for the event (e.g., "Biology Lecture" or "Family Dinner").
3. Tap **Start Transcribing**.
4. If a Bluetooth mic is connected, confirm the prompt to use it for better quality.
5. Wait for guests to join. You will see a counter of connected listeners.
### For the Guest (Listener)
1. Open BeamScribe and select **"Join Session"**.
2. The app will browse for nearby hosts.
3. Tap the desired session name (e.g., "Biology Lecture").
4. Once connected, the text will appear on your screen automatically.
5. You can customize the font size and theme in **Settings** (Gear icon) for easier reading.

BIN
Screenshots/iPad/1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

BIN
Screenshots/iPad/2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

BIN
Screenshots/iPad/3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

BIN
Screenshots/iPhone/1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 MiB

BIN
Screenshots/iPhone/2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 862 KiB

BIN
Screenshots/iPhone/3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 166 KiB

BIN
Screenshots/iPhone/4.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 680 KiB

BIN
Screenshots/iPhone/5.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

BIN
Screenshots/iPhone/6.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 172 KiB

27
check_build.sh Executable file
View File

@@ -0,0 +1,27 @@
#!/bin/zsh
set -o pipefail # Fail if xcodebuild fails, even with xcbeautify
# --- Configuration ---
SCHEME="BeamScribe"
DEVICE_NAME="iPhone 17 Pro"
BUILD_PATH="./build"
echo "🔍 Checking compilation for $SCHEME..."
# Build Only (No Install/Launch)
# We use 'env -u' to hide Homebrew variables
# We use '-derivedDataPath' to keep it isolated
env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \
-scheme "$SCHEME" \
-destination "platform=iOS Simulator,name=$DEVICE_NAME" \
-configuration Debug \
-derivedDataPath "$BUILD_PATH" \
build | xcbeautify
# Check exit code of the pipeline
if [ $? -eq 0 ]; then
echo "✅ Build Succeeded. No errors found."
else
echo "❌ Build Failed."
exit 1
fi

204
future_to_do_HIPAA.md Normal file
View File

@@ -0,0 +1,204 @@
# HIPAA Compliance Checklist for BeamScribe
This document outlines the changes required to make BeamScribe compliant with the Health Insurance Portability and Accountability Act (HIPAA) for handling Protected Health Information (PHI).
---
## 🔴 Critical Technical Requirements
### 1. Enable Encryption in Transit
**File:** `BeamScribe/Managers/MultipeerManager.swift`
**Priority:** Critical
**Effort:** Low
Change line 52 from:
```swift
session = MCSession(peer: peerID, securityIdentity: nil, encryptionPreference: .none)
```
To:
```swift
session = MCSession(peer: peerID, securityIdentity: nil, encryptionPreference: .required)
```
> [!CAUTION]
> Data transmitted between devices is currently **unencrypted**. Anyone within Bluetooth/Wi-Fi range could intercept transcript data.
---
### 2. Encrypt Transcripts at Rest
**File:** `BeamScribe/Managers/FileStorageManager.swift`
**Priority:** Critical
**Effort:** Medium
Currently, transcripts are stored as plain `.txt` files in the Documents directory. Changes needed:
- [ ] Use iOS Data Protection by setting file attributes:
```swift
try data.write(to: fileURL, options: .completeFileProtection)
```
- [ ] Consider using CryptoKit (`AES.GCM`) for additional encryption layer
- [ ] Store encryption keys in Keychain with appropriate access controls
---
### 3. Prevent iCloud Backup of PHI
**File:** `BeamScribe/Managers/FileStorageManager.swift`
**Priority:** Critical
**Effort:** Low
Add after creating transcript files:
```swift
var resourceValues = URLResourceValues()
resourceValues.isExcludedFromBackup = true
try fileURL.setResourceValues(resourceValues)
```
> [!WARNING]
> Without this, transcripts containing PHI may be backed up to iCloud, which is not HIPAA-compliant unless you have a BAA with Apple.
---
### 4. Add Authentication to Access Transcripts
**Files:** New file + `BeamScribe/Views/HistoryView.swift`
**Priority:** Critical
**Effort:** Medium
- [ ] Create `AuthenticationManager.swift` using LocalAuthentication framework
- [ ] Require Face ID/Touch ID/Passcode before viewing History
- [ ] Add timeout that requires re-authentication after inactivity
- [ ] Provide fallback for devices without biometrics
---
## 🟡 Important Technical Requirements
### 5. Implement Audit Logging
**File:** New `BeamScribe/Managers/AuditLogManager.swift`
**Priority:** High
**Effort:** Medium
Create an audit log that records:
- [ ] When a transcript is created (timestamp, event name)
- [ ] When a transcript is viewed (timestamp, file name)
- [ ] When a transcript is exported/shared (timestamp, file name, export method)
- [ ] When a transcript is deleted (timestamp, file name)
- [ ] When a guest connects to receive transcript (timestamp, peer info)
Store logs securely with same encryption as transcripts.
---
### 6. Auto-Lock / Session Timeout
**File:** `BeamScribe/BeamScribeApp.swift` or `ContentView.swift`
**Priority:** High
**Effort:** Low
- [ ] Track time since last user interaction
- [ ] Auto-lock app and require re-authentication after 5 minutes of inactivity
- [ ] Clear sensitive data from memory on background
---
### 7. Secure Data Deletion
**File:** `BeamScribe/Managers/FileStorageManager.swift`
**Priority:** High
**Effort:** Low
When deleting transcripts:
- [ ] Overwrite file contents before deletion (secure wipe)
- [ ] Clear any cached copies
- [ ] Remove from UserDefaults if applicable
---
### 8. Minimum Necessary Access
**File:** Various
**Priority:** Medium
**Effort:** Medium
- [ ] Add option to disable transcript saving entirely (live view only mode)
- [ ] Add auto-delete policy (e.g., delete transcripts older than 30 days)
- [ ] Clear transcripts from guest devices when session ends (configurable)
---
## 🔵 Administrative Requirements
### 9. Update Privacy Policy
**File:** `privacy-policy.md`
**Priority:** High
**Effort:** Low
Add sections covering:
- [ ] HIPAA compliance statement
- [ ] How PHI is protected (encryption, access controls)
- [ ] Data retention and deletion policies
- [ ] User rights regarding their health information
- [ ] Breach notification procedures
---
### 10. User Consent Flow
**Files:** `BeamScribe/Views/OnboardingView.swift` or new consent view
**Priority:** High
**Effort:** Medium
- [ ] Display clear consent screen before first use
- [ ] Explain that app may record health-related conversations
- [ ] Require explicit "I Agree" action
- [ ] Store consent timestamp in audit log
---
### 11. Business Associate Agreement (BAA)
**Priority:** Critical (if using cloud speech recognition)
**Effort:** External process
> [!IMPORTANT]
> If `requiresOnDeviceRecognition` is set to `false` (or not set), audio data may be sent to Apple's servers. You would need a BAA with Apple to remain HIPAA-compliant. Consider forcing on-device recognition for healthcare use.
**File:** `BeamScribe/Managers/TranscriptionManager.swift`
Check/add:
```swift
recognitionRequest.requiresOnDeviceRecognition = true
```
---
### 12. Incident Response Documentation
**File:** New `INCIDENT_RESPONSE.md`
**Priority:** Medium
**Effort:** Medium
Document procedures for:
- [ ] Identifying a potential breach
- [ ] Containing and investigating the breach
- [ ] Notifying affected individuals (within 60 days per HIPAA)
- [ ] Notifying HHS if breach affects 500+ individuals
- [ ] Documenting corrective actions
---
## Implementation Order (Recommended)
| Phase | Items | Effort |
|-------|-------|--------|
| **Phase 1** | #1 (Encryption in Transit), #3 (Backup Exclusion), #11 (On-device recognition) | Low |
| **Phase 2** | #2 (Encryption at Rest), #4 (Authentication) | Medium |
| **Phase 3** | #5 (Audit Logging), #6 (Auto-Lock), #7 (Secure Delete) | Medium |
| **Phase 4** | #9 (Privacy Policy), #10 (Consent Flow), #8 (Access Controls) | Medium |
| **Phase 5** | #12 (Incident Response) | Low |
---
## Additional Considerations
- **Apple BAA**: Apple offers a BAA for certain services. Review Apple's [Business Program](https://www.apple.com/business/compliance/) for healthcare compliance.
- **Penetration Testing**: Consider a security audit before deploying in healthcare settings.
- **Staff Training**: Document how healthcare staff should use the app to maintain compliance.
- **Regular Reviews**: HIPAA requires ongoing risk assessments; schedule quarterly reviews.
---
*Last Updated: December 26, 2025*

49
privacy-policy.md Normal file
View File

@@ -0,0 +1,49 @@
# Privacy Policy for BeamScribe
**Last Updated:** December 12, 2025
Your privacy is important to us. This Privacy Policy explains how BeamScribe ("we", "us", or "our") handles your information when you use our mobile application.
## 1. Information We Collect
### Audio Data
BeamScribe requires access to your device's microphone to perform its core function: real-time speech transcription.
- **On-Device Processing**: Whenever supported by your device hardware, audio is processed entirely on your device locally.
- **Apple Speech Recognition**: If on-device processing is not supported or if you are using specific languages, audio data may be sent to Apple's servers to be converted into text. This is handled via Apple's `SFSpeechRecognizer` API. We do not have access to this raw audio data, nor do we store it on our servers.
### Transcripts
The text generated from your speech (transcripts) is stored **locally on your device**.
- We do not upload your transcripts to any cloud servers.
- We do not analyze, mine, or sell your transcript data.
### Local Connectivity Data
BeamScribe uses Apple's Multipeer Connectivity framework to broadcast transcripts to other nearby devices.
- This data transmission occurs directly between devices (Peer-to-Peer) over Wi-Fi or Bluetooth.
- This data is not routed through the internet or any external servers.
## 2. How We Use Your Information
We use the information collected solely to provide the functionality of the app:
- **Audio**: To generate text transcripts in real-time.
- **Transcripts**: To display the text on your screen and broadcast it to connected Guest devices at your request.
## 3. Data Retention
All data is stored locally on your device. You have full control over your data.
- You can delete saved transcripts from the "History" section of the app at any time.
- Deleting the app will remove all locally stored transcripts.
## 4. Third-Party Services
BeamScribe relies on **Apple Inc.** for speech recognition services.
- By using BeamScribe, you are subject to Apple's Privacy Policy regarding Siri and Dictation.
- Apple may use audio data to improve its speech recognition services, depending on your device settings. You can manage this in your iOS System Settings under **Privacy & Security > Analytics & Improvements**.
## 5. Childrens Privacy
BeamScribe does not knowingly collect personal information from children under the age of 13. Since the app does not require account creation and stores data locally, we do not maintain records of user age or identity.
## 6. Contact Us
If you have any questions about this Privacy Policy, please contact us at:
[Insert Support Email Address Here]