Initial commit

AtTable iOS app with multipeer connectivity for mesh messaging.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-19 22:06:32 -05:00
commit 80de9fe057
51 changed files with 3466 additions and 0 deletions

73
.gitignore vendored Normal file
View File

@@ -0,0 +1,73 @@
# Xcode
build/
DerivedData/
*.xcodeproj/project.xcworkspace/xcuserdata/
*.xcodeproj/xcuserdata/
*.xcworkspace/xcuserdata/
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata/
# Xcode Scheme
*.xcscheme
# Swift Package Manager
.build/
.swiftpm/
Package.resolved
# CocoaPods
Pods/
Podfile.lock
# Carthage
Carthage/Build/
Carthage/Checkouts/
# Node
node_modules/
dist/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# macOS
.DS_Store
.AppleDouble
.LSOverride
._*
# Thumbnails
Thumbs.db
# IDE
.idea/
*.swp
*.swo
*~
# Archives
*.ipa
*.dSYM.zip
*.dSYM
# Playgrounds
timeline.xctimeline
playground.xcworkspace
# Fastlane
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots/**/*.png
fastlane/test_output
# Environment files
.env
.env.local
.env.*.local

BIN
At-Table_orig.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

View File

@@ -0,0 +1,357 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXFileReference section */
7F9DE17C2EFC324800008582 /* AtTable.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AtTable.app; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
7F9DE17E2EFC324800008582 /* AtTable */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = AtTable;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
7F9DE1792EFC324800008582 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7F9DE1732EFC324800008582 = {
isa = PBXGroup;
children = (
7F9DE17E2EFC324800008582 /* AtTable */,
7F9DE17D2EFC324800008582 /* Products */,
);
sourceTree = "<group>";
};
7F9DE17D2EFC324800008582 /* Products */ = {
isa = PBXGroup;
children = (
7F9DE17C2EFC324800008582 /* AtTable.app */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7F9DE17B2EFC324800008582 /* AtTable */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7F9DE1872EFC324800008582 /* Build configuration list for PBXNativeTarget "AtTable" */;
buildPhases = (
7F9DE1782EFC324800008582 /* Sources */,
7F9DE1792EFC324800008582 /* Frameworks */,
7F9DE17A2EFC324800008582 /* Resources */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
7F9DE17E2EFC324800008582 /* AtTable */,
);
name = AtTable;
packageProductDependencies = (
);
productName = AtTable;
productReference = 7F9DE17C2EFC324800008582 /* AtTable.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7F9DE1742EFC324800008582 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 2620;
LastUpgradeCheck = 2620;
TargetAttributes = {
7F9DE17B2EFC324800008582 = {
CreatedOnToolsVersion = 26.2;
};
};
};
buildConfigurationList = 7F9DE1772EFC324800008582 /* Build configuration list for PBXProject "AtTable" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7F9DE1732EFC324800008582;
minimizedProjectReferenceProxies = 1;
preferredProjectObjectVersion = 77;
productRefGroup = 7F9DE17D2EFC324800008582 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7F9DE17B2EFC324800008582 /* AtTable */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7F9DE17A2EFC324800008582 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7F9DE1782EFC324800008582 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
7F9DE1852EFC324800008582 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 26.2;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
7F9DE1862EFC324800008582 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 26.2;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
7F9DE1882EFC324800008582 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = MeshInfo.plist;
INFOPLIST_KEY_CFBundleDisplayName = "At-Table";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSLocalNetworkUsageDescription = "We use the local network to find and connect with other devices for chat.";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "We need access to your microphone to transcribe your speech.";
INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "We use speech recognition to convert your speech into text for the chat.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.2;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.AtTable;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
7F9DE1892EFC324800008582 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = MeshInfo.plist;
INFOPLIST_KEY_CFBundleDisplayName = "At-Table";
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities";
INFOPLIST_KEY_NSLocalNetworkUsageDescription = "We use the local network to find and connect with other devices for chat.";
INFOPLIST_KEY_NSMicrophoneUsageDescription = "We need access to your microphone to transcribe your speech.";
INFOPLIST_KEY_NSSpeechRecognitionUsageDescription = "We use speech recognition to convert your speech into text for the chat.";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.2;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.AtTable;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7F9DE1772EFC324800008582 /* Build configuration list for PBXProject "AtTable" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F9DE1852EFC324800008582 /* Debug */,
7F9DE1862EFC324800008582 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7F9DE1872EFC324800008582 /* Build configuration list for PBXNativeTarget "AtTable" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F9DE1882EFC324800008582 /* Debug */,
7F9DE1892EFC324800008582 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7F9DE1742EFC324800008582 /* Project object */;
}

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 512 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -0,0 +1 @@
{"images":[{"size":"60x60","expected-size":"180","filename":"180.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"40x40","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"60x60","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"57x57","expected-size":"57","filename":"57.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"87","filename":"87.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"57x57","expected-size":"114","filename":"114.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"60","filename":"60.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"1024x1024","filename":"1024.png","expected-size":"1024","idiom":"ios-marketing","folder":"Assets.xcassets/AppIcon.appiconset/","scale":"1x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"72x72","expected-size":"72","filename":"72.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"76x76","expected-size":"152","filename":"152.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"50x50","expected-size":"100","filename":"100.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"76x76","expected-size":"76","filename":"76.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"50x50","expected-size":"50","filename":"50.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"72x72","expected-size":"144","filename":"144.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"40x40","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"83.5x83.5","expected-size":"167","filename":"167.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"20x20","expected-size":"20","filename":"20.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"}]}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,21 @@
{
"images" : [
{
"filename" : "qrcode.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"scale" : "2x"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 412 B

47
AtTable/AtTableApp.swift Normal file
View File

@@ -0,0 +1,47 @@
//
// AtTableApp.swift
// AtTable
//
// Created by Jared Evans on 12/24/25.
//
import SwiftUI
@main
struct AtTableApp: App {
@AppStorage("isOnboardingComplete") var isOnboardingComplete: Bool = false
@AppStorage("userName") var userName: String = ""
@AppStorage("userRole") var userRole: UserRole = .hearing
@AppStorage("userColorHex") var userColorHex: String = "#00008B"
// APP-LEVEL SESSION: Ensures stable identity (Instance ID) across view reloads
@StateObject var multipeerSession = MultipeerSession()
init() {
// Reset navigation state on launch so users always see the "Login" screen first.
// This prevents getting "stuck" in a connecting loop if the app crashed.
UserDefaults.standard.set(false, forKey: "isOnboardingComplete")
}
var body: some Scene {
WindowGroup {
Group {
if isOnboardingComplete {
ChatView(
userName: userName,
userRole: userRole,
userColorHex: userColorHex
)
} else {
OnboardingView(
isOnboardingComplete: $isOnboardingComplete,
userName: $userName,
userRole: $userRole,
userColorHex: $userColorHex
)
}
}
.environmentObject(multipeerSession)
}
}
}

323
AtTable/ChatView.swift Normal file
View File

@@ -0,0 +1,323 @@
import SwiftUI
import MultipeerConnectivity
import Combine
struct ChatView: View {
@EnvironmentObject var multipeerSession: MultipeerSession
@StateObject var speechRecognizer = SpeechRecognizer()
@ObservedObject var networkMonitor = NetworkMonitor.shared
let userName: String
let userRole: UserRole
let userColorHex: String
@AppStorage("isOnboardingComplete") var isOnboardingComplete: Bool = true
@State private var messageText: String = ""
@State private var waitingDots = 0
let timer = Timer.publish(every: 0.5, on: .main, in: .common).autoconnect()
var body: some View {
ZStack {
// 1. Animated Mesh Background
MeshBackground(colors: [
Color(hex: userColorHex),
userRole == .deaf ? .purple : .cyan,
.black
])
VStack(spacing: 0) {
// 2. Floating Dynamic Header
HStack {
Button(action: {
speechRecognizer.stopRecording()
multipeerSession.stop()
isOnboardingComplete = false
}) {
HStack(spacing: 6) {
Image(systemName: "chevron.left")
Text("Leave")
}
.font(.system(.subheadline, design: .rounded, weight: .bold))
.foregroundColor(.white)
.padding(.horizontal, 16)
.padding(.vertical, 8)
.background(.ultraThinMaterial)
.cornerRadius(20)
}
Spacer()
VStack(spacing: 2) {
Text("MESH")
.font(.system(size: 10, weight: .black, design: .monospaced))
.tracking(2)
.foregroundColor(.white.opacity(0.6))
Text("\(multipeerSession.connectedPeers.count) Active")
.font(.caption2)
.fontWeight(.bold)
.foregroundColor(.green)
}
.padding(.horizontal, 16)
.padding(.vertical, 6)
.background(.ultraThinMaterial)
.cornerRadius(20)
}
.padding(.horizontal)
.padding(.top, 50) // Safe Area
.padding(.bottom, 10)
// 3. Peer Status / Live Transcriptions
if multipeerSession.connectedPeers.isEmpty {
VStack {
Spacer()
// Show "Warming up..." on first connection when not on WiFi
if !networkMonitor.isWiFi {
Text("Warming up" + String(repeating: ".", count: waitingDots))
.font(.title3)
.fontWeight(.medium)
.foregroundColor(.white.opacity(0.7))
.onReceive(timer) { _ in
waitingDots = (waitingDots + 1) % 4
}
Text("No WiFi: connection may take up to 60 seconds")
.font(.caption)
.foregroundColor(.orange)
.padding(.top, 8)
} else {
Text("Searching for others" + String(repeating: ".", count: waitingDots))
.font(.title3)
.fontWeight(.medium)
.foregroundColor(.white.opacity(0.7))
.onReceive(timer) { _ in
waitingDots = (waitingDots + 1) % 4
}
}
Spacer()
}
} else {
// Connected Peers Row (Small Pills)
if !multipeerSession.connectedPeerUsers.isEmpty {
HStack(spacing: 8) {
ScrollView(.horizontal, showsIndicators: false) {
HStack(spacing: 12) {
ForEach(multipeerSession.connectedPeerUsers, id: \.self) { peer in
HStack(spacing: 6) {
Circle()
.fill(Color(hex: peer.colorHex))
.frame(width: 8, height: 8)
Text(peer.name)
.font(.caption)
.fontWeight(.bold)
.foregroundColor(.white)
}
.padding(.horizontal, 10)
.padding(.vertical, 6)
.background(.ultraThinMaterial)
.cornerRadius(12)
}
}
.padding(.leading)
}
// Full Mesh Indicator
if multipeerSession.isAtCapacity {
Text("FULL")
.font(.system(size: 10, weight: .black, design: .monospaced))
.foregroundColor(.orange)
.padding(.horizontal, 8)
.padding(.vertical, 4)
.background(Color.orange.opacity(0.2))
.cornerRadius(8)
.padding(.trailing)
}
}
.padding(.vertical, 8)
}
// 4. Message List
ScrollViewReader { proxy in
ScrollView {
LazyVStack(spacing: 12) {
ForEach(multipeerSession.receivedMessages) { message in
MessageBubble(
message: message,
isMyMessage: message.senderNodeID == multipeerSession.myNodeIDPublic
)
.id(message.id)
.transition(.opacity.combined(with: .scale(scale: 0.95)))
}
}
.padding(.bottom, 20)
}
.scrollIndicators(.hidden)
.onChange(of: multipeerSession.receivedMessages.count) {
if let lastId = multipeerSession.receivedMessages.last?.id {
withAnimation {
proxy.scrollTo(lastId, anchor: .bottom)
}
}
if userRole == .deaf {
if let lastMsg = multipeerSession.receivedMessages.last, lastMsg.senderNodeID != multipeerSession.myNodeIDPublic {
let generator = UINotificationFeedbackGenerator()
generator.notificationOccurred(.success)
}
}
}
// Auto-scroll when transcription card appears to prevent blocking
.onChange(of: multipeerSession.liveTranscripts.isEmpty) {
if !multipeerSession.liveTranscripts.isEmpty {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {
if let lastId = multipeerSession.receivedMessages.last?.id {
withAnimation {
proxy.scrollTo(lastId, anchor: .bottom)
}
}
}
}
}
}
}
// Live Transcription Cards (Moved to Bottom)
// Live Transcription Cards (Moved to Bottom, Stacked Vertically)
if userRole == .deaf && !multipeerSession.liveTranscripts.isEmpty {
VStack(spacing: 8) {
ForEach(multipeerSession.liveTranscripts.sorted(by: { $0.key < $1.key }), id: \.key) { nodeIDKey, text in
// Look up friendly name from connectedPeerUsers by nodeID
let displayName = multipeerSession.connectedPeerUsers.first(where: { $0.nodeID == nodeIDKey })?.name ?? nodeIDKey
VStack(alignment: .leading, spacing: 6) {
Text(displayName)
.font(.caption2)
.fontWeight(.bold)
.foregroundColor(Color.cyan)
ScrollViewReader { proxy in
ScrollView {
VStack(alignment: .leading) {
Text(text)
.font(.caption)
.foregroundColor(.white)
.shadow(color: .black, radius: 1)
.multilineTextAlignment(.leading)
.id("text-\(nodeIDKey)")
Spacer(minLength: 0)
.id("bottom-\(nodeIDKey)")
}
}
.onChange(of: text) {
withAnimation(.easeOut(duration: 0.1)) {
proxy.scrollTo("bottom-\(nodeIDKey)", anchor: .bottom)
}
}
}
}
.padding(12)
.frame(maxWidth: .infinity)
.frame(height: 70) // Fixed height for approx 2 lines
.liquidCard()
}
}
.padding(.horizontal)
.padding(.bottom, 10)
.transition(.move(edge: .bottom).combined(with: .opacity))
}
// 5. Input Bar
InputBar(
userRole: userRole,
text: $messageText,
isRecording: $speechRecognizer.isRecording,
transcript: speechRecognizer.transcript,
onSend: sendMessage,
onToggleRecording: toggleRecording
)
}
}
.ignoresSafeArea(.container, edges: .all)
.onAppear {
UIApplication.shared.isIdleTimerDisabled = true
multipeerSession.start()
// Re-apply identity if needed (though App state should handle it)
if multipeerSession.userName != userName {
multipeerSession.setIdentity(name: userName, colorHex: userColorHex, role: userRole)
}
// Configure Speech Recognizer callback
speechRecognizer.onFinalResult = { resultText in
let message = MeshMessage(
id: UUID(),
senderID: userName,
senderNodeID: multipeerSession.myNodeIDPublic,
senderInstance: multipeerSession.myInstancePublic,
senderRole: userRole,
senderColorHex: userColorHex,
content: resultText,
isTranscription: true,
isPartial: false,
timestamp: Date()
)
multipeerSession.send(message: message)
}
// Configure Partial Result callback (No Throttling)
speechRecognizer.onPartialResult = { partialText in
let message = MeshMessage(
id: UUID(),
senderID: userName,
senderNodeID: multipeerSession.myNodeIDPublic, // Added NodeID for consistent transcript keying
senderInstance: multipeerSession.myInstancePublic,
senderRole: userRole,
senderColorHex: userColorHex,
content: partialText,
isTranscription: true,
isPartial: true,
timestamp: Date()
)
multipeerSession.send(message: message)
}
// Auto-start recording for Hearing users
if userRole == .hearing {
speechRecognizer.startRecording()
}
}
.onDisappear {
UIApplication.shared.isIdleTimerDisabled = false
}
}
func sendMessage() {
guard !messageText.isEmpty else { return }
let message = MeshMessage(
id: UUID(),
senderID: userName,
senderNodeID: multipeerSession.myNodeIDPublic, // Added NodeID
senderInstance: multipeerSession.myInstancePublic,
senderRole: userRole,
senderColorHex: userColorHex,
content: messageText,
isTranscription: false,
isPartial: false,
timestamp: Date()
)
multipeerSession.send(message: message)
messageText = ""
}
func toggleRecording() {
if speechRecognizer.isRecording {
speechRecognizer.stopRecording()
} else {
speechRecognizer.startRecording()
}
}
}

5
AtTable/Constants.swift Normal file
View File

@@ -0,0 +1,5 @@
import Foundation
struct Constants {
static let serviceType = "access-mesh" // Must match Info.plist _access-mesh._tcp
}

216
AtTable/DesignSystem.swift Normal file
View File

@@ -0,0 +1,216 @@
import SwiftUI
// MARK: - Colors & Gradients
struct DesignSystem {
struct Colors {
static let deepSpace = Color(hex: "0a0a12")
static let neonBlue = Color(hex: "00f2ff")
static let neonPurple = Color(hex: "bd00ff")
static let neonTeal = Color(hex: "00ff9d")
static func roleGradient(for role: UserRole) -> LinearGradient {
switch role {
case .deaf:
return LinearGradient(
gradient: Gradient(colors: [Color(hex: "43cea2"), Color(hex: "185a9d")]),
startPoint: .topLeading,
endPoint: .bottomTrailing
)
case .hearing:
return LinearGradient(
gradient: Gradient(colors: [Color(hex: "FDB99B"), Color(hex: "CF8BF3"), Color(hex: "A770EF")]),
startPoint: .topLeading,
endPoint: .bottomTrailing
)
}
}
}
struct Typography {
static func title(_ text: String) -> Text {
Text(text)
.font(.system(.largeTitle, design: .rounded))
.fontWeight(.heavy)
}
}
}
// MARK: - Modifiers
struct LiquidGlassCard: ViewModifier {
func body(content: Content) -> some View {
content
.background(.ultraThinMaterial)
.cornerRadius(24)
.shadow(color: Color.black.opacity(0.15), radius: 10, x: 0, y: 5)
.overlay(
RoundedRectangle(cornerRadius: 24)
.stroke(.white.opacity(0.2), lineWidth: 1)
)
}
}
struct GlowingEdge: ViewModifier {
var color: Color
func body(content: Content) -> some View {
content
.shadow(color: color.opacity(0.6), radius: 20, x: 0, y: 0)
.shadow(color: color.opacity(0.3), radius: 40, x: 0, y: 0)
}
}
// MARK: - Views
struct MeshBackground: View {
@State private var start = UnitPoint(x: 0, y: -2)
@State private var end = UnitPoint(x: 4, y: 0)
// Dynamic Orb States
@State private var orb1Offset = CGSize.zero
@State private var orb2Offset = CGSize.zero
@State private var orb3Offset = CGSize.zero // Pulsing Blue Orb
@State private var orb1Scale: CGFloat = 1.0
// Orb 2 Shape Shifting (Independent Scales)
@State private var orb2ScaleX: CGFloat = 1.0
@State private var orb2ScaleY: CGFloat = 1.0
@State private var orb3Scale: CGFloat = 1.0 // Pulsing Blue Orb
@State private var orb3Opacity: Double = 0.3 // Pulsing Opacity
let colors: [Color]
var body: some View {
GeometryReader { proxy in
ZStack {
Color.black.ignoresSafeArea()
// 1. Moving Gradient Background
LinearGradient(gradient: Gradient(colors: colors), startPoint: start, endPoint: end)
.opacity(0.4)
.ignoresSafeArea()
.blur(radius: 100)
.onAppear {
withAnimation(.easeInOut(duration: 10).repeatForever(autoreverses: true)) {
start = UnitPoint(x: 4, y: 0)
end = UnitPoint(x: 0, y: 2)
}
}
// 2. Floating Orbs for Depth
// Orb 3: Pulsing Blue Orb (Lightens/Darkens)
Circle()
.fill(Color.blue)
.frame(width: proxy.size.width * 1.0, height: proxy.size.width * 1.0)
.blur(radius: 150) // Highly blurred
.opacity(orb3Opacity) // Pulsing Opacity
.scaleEffect(orb3Scale)
.offset(orb3Offset)
.position(x: proxy.size.width / 2, y: proxy.size.height / 2)
// Orb 1
Circle()
.fill(colors.first ?? .blue)
.frame(width: proxy.size.width * 0.9, height: proxy.size.width * 0.9)
.blur(radius: 120)
.scaleEffect(orb1Scale)
.offset(orb1Offset)
.position(x: proxy.size.width / 2, y: proxy.size.height / 2) // Center start
// Orb 2 (Black Orb) - Shape Shifting
Circle()
.fill(colors.last ?? .purple)
.frame(width: proxy.size.width * 0.8, height: proxy.size.width * 0.8)
.blur(radius: 120)
.scaleEffect(x: orb2ScaleX, y: orb2ScaleY) // Independent scaling creates ovals
.offset(orb2Offset)
.position(x: proxy.size.width / 2, y: proxy.size.height / 2) // Center start
}
.onAppear {
animateOrb1(in: proxy.size)
animateOrb2(in: proxy.size)
animateOrb3(in: proxy.size)
}
}
.ignoresSafeArea()
}
private func animateOrb1(in size: CGSize) {
// Random wandering loop
let duration = Double.random(in: 6...9)
withAnimation(.easeInOut(duration: duration)) {
orb1Offset = CGSize(
width: CGFloat.random(in: -size.width/2...size.width/2),
height: CGFloat.random(in: -size.height/2...size.height/2)
)
orb1Scale = CGFloat.random(in: 0.9...1.2)
}
DispatchQueue.main.asyncAfter(deadline: .now() + duration) {
animateOrb1(in: size)
}
}
private func animateOrb2(in size: CGSize) {
// Random wandering loop with Shape Shifting
let duration = Double.random(in: 7...10)
withAnimation(.easeInOut(duration: duration)) {
orb2Offset = CGSize(
width: CGFloat.random(in: -size.width/2...size.width/2),
height: CGFloat.random(in: -size.height/2...size.height/2)
)
// Independent X and Y scales morph the shape between circle and oval
orb2ScaleX = CGFloat.random(in: 0.4...1.1)
orb2ScaleY = CGFloat.random(in: 0.4...1.1)
}
DispatchQueue.main.asyncAfter(deadline: .now() + duration) {
animateOrb2(in: size)
}
}
private func animateOrb3(in size: CGSize) {
// Pulsing Blue Orb Animation
let duration = Double.random(in: 10...15)
withAnimation(.easeInOut(duration: duration)) {
orb3Offset = CGSize(
width: CGFloat.random(in: -size.width/3...size.width/3),
height: CGFloat.random(in: -size.height/3...size.height/3)
)
orb3Scale = CGFloat.random(in: 0.9...1.1)
orb3Opacity = Double.random(in: 0.1...0.6) // Lightens and darkens
}
DispatchQueue.main.asyncAfter(deadline: .now() + duration) {
animateOrb3(in: size)
}
}
}
extension Color {
var complementary: Color {
let uiColor = UIColor(self)
var h: CGFloat = 0
var s: CGFloat = 0
var b: CGFloat = 0
var a: CGFloat = 0
uiColor.getHue(&h, saturation: &s, brightness: &b, alpha: &a)
let newHue = h + 0.5
let finalHue = newHue > 1.0 ? newHue - 1.0 : newHue
return Color(hue: finalHue, saturation: s, brightness: b, opacity: a)
}
}
extension View {
func liquidCard() -> some View {
self.modifier(LiquidGlassCard())
}
func glowingEdge(color: Color) -> some View {
self.modifier(GlowingEdge(color: color))
}
}

191
AtTable/InputBar.swift Normal file
View File

@@ -0,0 +1,191 @@
import SwiftUI
struct InputBar: View {
let userRole: UserRole
@Binding var text: String
@Binding var isRecording: Bool
var transcript: String // New property for live transcript
var onSend: () -> Void
var onToggleRecording: () -> Void
@State private var orbScale: CGFloat = 1.0
var body: some View {
VStack(spacing: 0) {
// Gradient Divider
LinearGradient(
colors: [.clear, .white.opacity(0.2), .clear],
startPoint: .leading,
endPoint: .trailing
)
.frame(height: 1)
ZStack {
Rectangle()
.fill(.ultraThinMaterial)
.ignoresSafeArea()
if userRole == .deaf {
deafInputView
} else {
hearingInputView
}
}
.frame(height: userRole == .hearing ? 200 : 160) // Increased height for transcript
}
}
var deafInputView: some View {
VStack(spacing: 12) {
HStack(spacing: 12) {
TextField("Type a message...", text: $text)
.padding()
.background(Color.black.opacity(0.3))
.cornerRadius(24)
.overlay(
RoundedRectangle(cornerRadius: 24)
.stroke(.white.opacity(0.2), lineWidth: 1)
)
.foregroundColor(.white)
.submitLabel(.send)
.onSubmit {
if !text.isEmpty {
onSend()
}
}
Button(action: {
UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil)
onSend()
}) {
Image(systemName: "arrow.up")
.font(.system(size: 20, weight: .bold))
.foregroundColor(.black)
.frame(width: 44, height: 44)
.background(
Circle()
.fill(text.isEmpty ? Color.gray : Color.white)
)
.shadow(radius: 5)
}
.disabled(text.isEmpty)
}
.padding(.horizontal)
ScrollView(.horizontal, showsIndicators: false) {
HStack(spacing: 10) {
ForEach(["Yes", "No", "Hold on", "Thanks", "Please repeat"], id: \.self) { phrase in
Button(action: {
text = phrase
onSend()
}) {
Text(phrase)
.font(.caption)
.fontWeight(.bold)
.padding(.horizontal, 16)
.padding(.vertical, 8)
.background(Color.white.opacity(0.1))
.cornerRadius(16)
.overlay(
RoundedRectangle(cornerRadius: 16)
.stroke(.white.opacity(0.3), lineWidth: 1)
)
.foregroundColor(.white)
}
}
}
.padding(.horizontal)
}
}
.padding(.vertical)
}
var hearingInputView: some View {
VStack(spacing: 16) {
// Live Transcription Box
if !transcript.isEmpty {
ScrollViewReader { proxy in
ScrollView {
Text(transcript)
.font(.system(.body, design: .rounded))
.foregroundColor(.white)
.multilineTextAlignment(.leading) // Leading alignment better for scrolling reading
.padding()
.id("transcriptionText")
}
.frame(height: 60) // Approx 2 lines + padding
.frame(maxWidth: .infinity)
.background(
RoundedRectangle(cornerRadius: 16)
.fill(Color.black.opacity(0.2))
)
.overlay(
RoundedRectangle(cornerRadius: 16)
.stroke(.white.opacity(0.1), lineWidth: 1)
)
.onChange(of: transcript) {
withAnimation {
proxy.scrollTo("transcriptionText", anchor: .bottom)
}
}
}
.padding(.horizontal)
.transition(.move(edge: .bottom).combined(with: .opacity))
} else {
Text(isRecording ? "Listening..." : "Press to speak...")
.frame(height: 60)
.frame(maxWidth: .infinity)
.background(
RoundedRectangle(cornerRadius: 16)
.fill(Color.black.opacity(0.1))
)
.overlay(
RoundedRectangle(cornerRadius: 16)
.stroke(.white.opacity(0.1), lineWidth: 1)
)
.font(.system(.body, design: .rounded))
.foregroundColor(.white.opacity(0.5))
.padding(.horizontal)
}
HStack {
Spacer()
// Glowing Intelligence Orb
Button(action: onToggleRecording) {
ZStack {
// Outer Glow
Circle()
.fill(
AngularGradient(
gradient: Gradient(colors: [.blue, .purple, .cyan, .blue]),
center: .center
)
)
.frame(width: 80, height: 80)
.blur(radius: isRecording ? 10 : 0)
.rotationEffect(.degrees(isRecording ? 360 : 0))
.animation(isRecording ? Animation.linear(duration: 2).repeatForever(autoreverses: false) : .default, value: isRecording)
// Core
Circle()
.fill(.ultraThinMaterial)
.frame(width: 76, height: 76)
.overlay(
Image(systemName: isRecording ? "stop.fill" : "mic.fill")
.font(.title)
.foregroundColor(.white)
.shadow(color: .white, radius: 5)
)
}
}
.scaleEffect(isRecording ? 1.1 : 1.0)
.animation(.spring(response: 0.3, dampingFraction: 0.5), value: isRecording)
Spacer()
}
}
.padding(.bottom, 20)
.padding(.top, 10)
}
}

17
AtTable/MeshMessage.swift Normal file
View File

@@ -0,0 +1,17 @@
import Foundation
struct MeshMessage: Codable, Identifiable, Hashable {
var id: UUID
var senderID: String // MCPeerID.displayName (for display purposes)
var senderNodeID: String = "" // Stable node ID per install
var senderInstance: Int = 0 // Monotonic instance ID to detect ghosts/restarts
var senderRole: UserRole
var senderColorHex: String
var content: String
var isTranscription: Bool // True = Audio source, False = Typed source
var isPartial: Bool = false // True = Live partial result, False = Final confirmed result
var isHandshake: Bool = false // True = Handshake message with user details
var isKeepAlive: Bool = false // True = Heartbeat to maintain AWDL links (ignore in UI)
var connectedNodeIDs: [String]? = nil // Gossip: List of NodeIDs this peer is connected to (for clique repair)
var timestamp: Date
}

View File

@@ -0,0 +1,84 @@
import SwiftUI
struct MessageBubble: View {
let message: MeshMessage
let isMyMessage: Bool
var body: some View {
HStack(alignment: .bottom, spacing: 10) {
if isMyMessage { Spacer() }
if !isMyMessage {
// Avatar Placeholder
Circle()
.fill(Color(hex: message.senderColorHex))
.frame(width: 30, height: 30)
.shadow(color: Color(hex: message.senderColorHex).opacity(0.5), radius: 5)
}
VStack(alignment: isMyMessage ? .trailing : .leading, spacing: 4) {
if !isMyMessage {
Text(message.senderID)
.font(.caption2)
.fontWeight(.bold)
.foregroundColor(.white.opacity(0.7))
}
HStack(alignment: .top) {
if message.isTranscription {
Image(systemName: "waveform")
.font(.caption)
.foregroundColor(.white.opacity(0.6))
}
Text(message.content)
.font(.system(.body, design: .rounded))
.foregroundColor(.white)
.fixedSize(horizontal: false, vertical: true)
}
.padding(12)
.background(
ZStack {
if isMyMessage {
LinearGradient(
colors: [Color.blue.opacity(0.8), Color.purple.opacity(0.8)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
} else {
Color(hex: message.senderColorHex).opacity(0.3)
}
}
)
.background(.ultraThinMaterial)
.cornerRadius(18, corners: isMyMessage ? [.topLeft, .topRight, .bottomLeft] : [.topLeft, .topRight, .bottomRight])
.shadow(color: isMyMessage ? .blue.opacity(0.3) : Color(hex: message.senderColorHex).opacity(0.2), radius: 5, x: 0, y: 2)
.overlay(
RoundedCorner(radius: 18, corners: isMyMessage ? [.topLeft, .topRight, .bottomLeft] : [.topLeft, .topRight, .bottomRight])
.stroke(.white.opacity(0.2), lineWidth: 1)
)
}
if !isMyMessage { Spacer() }
}
.padding(.horizontal)
.padding(.vertical, 4)
}
}
// Rounded Corner Shape for Bubble
struct RoundedCorner: Shape {
var radius: CGFloat = .infinity
var corners: UIRectCorner = .allCorners
func path(in rect: CGRect) -> Path {
let path = UIBezierPath(roundedRect: rect, byRoundingCorners: corners, cornerRadii: CGSize(width: radius, height: radius))
return Path(path.cgPath)
}
}
extension View {
func cornerRadius(_ radius: CGFloat, corners: UIRectCorner) -> some View {
clipShape(RoundedCorner(radius: radius, corners: corners))
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,26 @@
import Foundation
import Network
import Combine
class NetworkMonitor: ObservableObject {
static let shared = NetworkMonitor()
private let monitor = NWPathMonitor()
private let queue = DispatchQueue(label: "NetworkMonitor")
@Published var isConnected: Bool = true
@Published var isWiFi: Bool = true
init() {
monitor.pathUpdateHandler = { [weak self] path in
DispatchQueue.main.async {
self?.isConnected = path.status == .satisfied
self?.isWiFi = path.usesInterfaceType(.wifi)
}
}
monitor.start(queue: queue)
}
deinit {
monitor.cancel()
}
}

View File

@@ -0,0 +1,32 @@
import Foundation
/// Provides a stable node identity for mesh networking.
/// - `nodeID`: Stable per install (persisted in UserDefaults)
/// - `instance`: Monotonic counter per session start
struct NodeIdentity {
private static let nodeIDKey = "com.attable.mesh.nodeID"
private static let instanceKey = "com.attable.mesh.instance"
/// Get or create a stable nodeID for this installation
static var nodeID: String {
if let existing = UserDefaults.standard.string(forKey: nodeIDKey) {
return existing
}
let newID = UUID().uuidString
UserDefaults.standard.set(newID, forKey: nodeIDKey)
return newID
}
/// Increment and return the current instance counter
static func nextInstance() -> Int {
let current = UserDefaults.standard.integer(forKey: instanceKey)
let next = current + 1
UserDefaults.standard.set(next, forKey: instanceKey)
return next
}
/// Get the current instance without incrementing (for reference)
static var currentInstance: Int {
return UserDefaults.standard.integer(forKey: instanceKey)
}
}

View File

@@ -0,0 +1,181 @@
import SwiftUI
struct OnboardingView: View {
@Binding var isOnboardingComplete: Bool
@Binding var userName: String
@Binding var userRole: UserRole
@Binding var userColorHex: String
let colors: [String] = [
"#FF0055", // Neon Red
"#00FF99", // Neon Green
"#00CCFF", // Neon Blue
"#9900FF", // Neon Purple
"#FF9900", // Neon Orange
"#FF00CC", // Neon Pink
"#00FFEA", // Cyan
"#FFFFFF" // White
]
@State private var showQRCode = false
var body: some View {
ZStack {
MeshBackground(colors: [
Color(hex: userColorHex),
userRole == .deaf ? .purple : .blue,
.black
])
// Top Right QR Code Link
VStack {
HStack {
Spacer()
Button(action: {
showQRCode = true
}) {
HStack(spacing: 6) {
Image(systemName: "qrcode")
Text("QR code for the app")
}
.font(.caption)
.fontWeight(.bold)
.foregroundColor(.white)
.padding(8)
.background(.ultraThinMaterial)
.cornerRadius(12)
}
}
Spacer()
}
.padding(.top, 50) // Safe area
.padding(.horizontal)
.ignoresSafeArea()
VStack(spacing: 40) {
DesignSystem.Typography.title("Welcome")
.foregroundColor(.white)
.shadow(radius: 10)
VStack(spacing: 25) {
VStack(alignment: .leading, spacing: 8) {
Label("Your name", systemImage: "person.fill")
.font(.headline)
.foregroundColor(.white.opacity(0.8))
TextField("Enter Name", text: $userName)
.padding()
.background(.ultraThinMaterial)
.cornerRadius(12)
.foregroundColor(.white)
}
VStack(alignment: .leading, spacing: 8) {
Label("I am...", systemImage: "ear")
.font(.headline)
.foregroundColor(.white.opacity(0.8))
Picker("Role", selection: $userRole) {
ForEach(UserRole.allCases, id: \.self) { role in
Text(role.displayName).tag(role)
}
}
.pickerStyle(SegmentedPickerStyle())
.colorMultiply(.white)
.onChange(of: userRole) { _, newRole in
withAnimation {
if newRole == .hearing {
userColorHex = "#00FF99" // Neon Green
} else {
userColorHex = "#00CCFF" // Neon Blue
}
}
}
}
VStack(alignment: .leading, spacing: 15) {
Label("Pick your Aura color", systemImage: "paintpalette.fill")
.font(.headline)
.foregroundColor(.white.opacity(0.8))
LazyVGrid(columns: [GridItem(.adaptive(minimum: 44))], spacing: 15) {
ForEach(colors, id: \.self) { color in
Circle()
.fill(Color(hex: color))
.frame(width: 44, height: 44)
.overlay(
Circle()
.stroke(.white, lineWidth: userColorHex == color ? 4 : 0)
.shadow(color: Color(hex: color).opacity(0.8), radius: 10)
)
.scaleEffect(userColorHex == color ? 1.1 : 1.0)
.onTapGesture {
withAnimation(.spring()) {
userColorHex = color
}
}
}
}
}
}
.padding(30)
.liquidCard()
.padding(.horizontal)
Button(action: {
withAnimation {
if !userName.isEmpty {
isOnboardingComplete = true
}
}
}) {
Text("Start Conversation")
.font(.headline)
.fontWeight(.bold)
.foregroundColor(.black)
.frame(maxWidth: .infinity)
.padding()
.background(
LinearGradient(colors: [.white, .white.opacity(0.8)], startPoint: .top, endPoint: .bottom)
)
.cornerRadius(20)
.glowingEdge(color: .white)
}
.disabled(userName.isEmpty)
.opacity(userName.isEmpty ? 0.5 : 1)
.padding(.horizontal, 40)
}
}
.sheet(isPresented: $showQRCode) {
QRCodeView()
}
}
}
// Keep Hex Color extension or ensure it's in Constants/Shared
extension Color {
init(hex: String) {
let hex = hex.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int: UInt64 = 0
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (1, 1, 1, 0)
}
self.init(
.sRGB,
red: Double(r) / 255,
green: Double(g) / 255,
blue: Double(b) / 255,
opacity: Double(a) / 255
)
}
}

18
AtTable/PeerUser.swift Normal file
View File

@@ -0,0 +1,18 @@
import Foundation
import MultipeerConnectivity
struct PeerUser: Identifiable, Hashable {
let id: MCPeerID
let nodeID: String // Stable identifier from handshake (for reliable identity)
let name: String
let colorHex: String
let role: UserRole // Role from handshake (for data integrity)
init(peerID: MCPeerID, nodeID: String = "", name: String? = nil, colorHex: String = "#808080", role: UserRole = .hearing) {
self.id = peerID
self.nodeID = nodeID
self.name = name ?? peerID.displayName
self.colorHex = colorHex
self.role = role
}
}

47
AtTable/QRCodeView.swift Normal file
View File

@@ -0,0 +1,47 @@
import SwiftUI
struct QRCodeView: View {
@Environment(\.dismiss) var dismiss
var body: some View {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
VStack(spacing: 30) {
// Header
HStack {
Spacer()
Button(action: {
dismiss()
}) {
Image(systemName: "xmark.circle.fill")
.font(.system(size: 30))
.foregroundColor(.white.opacity(0.8))
}
}
.padding()
Spacer()
Image("qrcode")
.resizable()
.interpolation(.none)
.scaledToFit()
.frame(maxWidth: 280, maxHeight: 280)
.padding(20)
.background(Color.white) // QR code usually needs white background to be scannable
.cornerRadius(20)
.shadow(color: .white.opacity(0.2), radius: 20)
Text("Download At-Table app from Apple Appstore")
.font(.title3)
.fontWeight(.bold)
.multilineTextAlignment(.center)
.foregroundColor(.white)
.padding(.horizontal, 40)
Spacer()
}
}
}
}

View File

@@ -0,0 +1,262 @@
import Foundation
import Speech
import AVFoundation
import SwiftUI
import Combine
class SpeechRecognizer: ObservableObject {
@Published var transcript: String = ""
@Published var isRecording: Bool = false
@Published var error: String?
// Callback for when a final result is ready to be broadcast
var onFinalResult: ((String) -> Void)?
// Callback for partial results
var onPartialResult: ((String) -> Void)?
private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
private var silenceTimer: Timer?
init() {
requestAuthorization()
}
private func requestAuthorization() {
SFSpeechRecognizer.requestAuthorization { authStatus in
DispatchQueue.main.async {
switch authStatus {
case .authorized:
break
case .denied:
self.error = "Speech recognition authorization denied"
case .restricted:
self.error = "Speech recognition restricted on this device"
case .notDetermined:
self.error = "Speech recognition not yet authorized"
@unknown default:
self.error = "Unknown authorization status"
}
}
}
}
func startRecording() {
// If already recording, we might be calling this to restart, so we shouldn't necessarily just return.
// But for the public API toggle, we check `isRecording`.
// To support internal restarts, we might want to separate the logic.
// However, `stopRecording` sets `isRecording = false`.
if isRecording {
stopRecording()
// If the user tapped the button, we stop.
// If we are auto-restarting, we want to start again immediately.
// Let's rely on the caller or proper state management.
// If this is called from the UI toggle, we stop.
return
}
startRecordingInternal()
}
private func startRecordingInternal() {
// Reset state
lastFlushedTranscript = ""
currentFullTranscription = ""
// Cancel previous task if any
if recognitionTask != nil {
recognitionTask?.cancel()
recognitionTask = nil
}
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
self.error = "Audio session setup failed: \(error.localizedDescription)"
return
}
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else {
self.error = "Unable to create recognition request"
return
}
// Check network status to determine recognition mode
if NetworkMonitor.shared.isConnected {
// Force on-device recognition to false as per PRD for higher accuracy when online
recognitionRequest.requiresOnDeviceRecognition = false
print("SpeechRecognizer: Using server-based recognition (Online)")
} else {
// Fallback to on-device recognition when offline
recognitionRequest.requiresOnDeviceRecognition = true
print("SpeechRecognizer: Using on-device recognition (Offline)")
}
recognitionRequest.shouldReportPartialResults = true
// Keep a local reference to the request to check against stale callbacks
let currentRequest = recognitionRequest
let inputNode = audioEngine.inputNode
// Remove any existing tap to prevent "Tap already installed" crash
inputNode.removeTap(onBus: 0)
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest) { [weak self] result, error in
guard let self = self else { return }
// Safety Check: Ignore callbacks from old/stale requests
// This prevents a previous session's error/completion from stopping the NEW session
guard self.recognitionRequest === currentRequest else {
return
}
var isFinal = false
if let result = result {
DispatchQueue.main.async {
let fullText = result.bestTranscription.formattedString
self.currentFullTranscription = fullText
// Robust Delta Calculation
var newText = fullText
if !self.lastFlushedTranscript.isEmpty {
if fullText.hasPrefix(self.lastFlushedTranscript) {
// Perfect match logic
newText = String(fullText.dropFirst(self.lastFlushedTranscript.count))
} else if fullText.lowercased().hasPrefix(self.lastFlushedTranscript.lowercased()) {
// Case-insensitive match logic
newText = String(fullText.dropFirst(self.lastFlushedTranscript.count))
} else {
// Fallback: History rewritten (punctuation/spelling change).
// Blindly drop the length of the old text to avoid duplicating history.
let dropCount = min(fullText.count, self.lastFlushedTranscript.count)
newText = String(fullText.dropFirst(dropCount))
}
}
self.transcript = newText.trimmingCharacters(in: .whitespacesAndNewlines)
// Trigger partial result callback
self.onPartialResult?(self.transcript)
// Reset silence timer on every new result
self.resetSilenceTimer()
}
isFinal = result.isFinal
// If it's a final result, trigger the callback to broadcast
if isFinal {
DispatchQueue.main.async {
// Send the DELTA (self.transcript) not the full result
if !self.transcript.isEmpty {
self.onFinalResult?(self.transcript)
}
self.silenceTimer?.invalidate()
}
}
}
if error != nil || isFinal {
// Perform cleanup
self.audioEngine.stop()
inputNode.removeTap(onBus: 0)
self.recognitionRequest = nil
self.recognitionTask = nil
self.silenceTimer?.invalidate() // Safety invalidation
if error != nil {
DispatchQueue.main.async {
self.isRecording = false
}
}
}
}
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, when in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
isRecording = true
transcript = "Listening..."
} catch {
self.error = "Audio engine start failed: \(error.localizedDescription)"
}
}
func stopRecording(shouldUpdateState: Bool = true) {
silenceTimer?.invalidate()
// Manual Flush: If we have pending text, send it now to guarantee it's not lost.
if !transcript.isEmpty && transcript != "Listening..." {
onFinalResult?(transcript)
transcript = ""
} else {
// Also clear if it was just "Listening..."
transcript = ""
}
// Always trigger an empty partial result to clear the UI on the receiver side
onPartialResult?("")
audioEngine.stop()
// We use cancel() here instead of endAudio() because we just handled the final result manually.
// This prevents double-sending if endAudio() were to trigger a final callback later.
recognitionTask?.cancel()
// Deactivate Audio Session to release Bluetooth/Mic resources
// This is critical to prevent interference with MultipeerConnectivity
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false, options: .notifyOthersOnDeactivation)
} catch {
print("Failed to deactivate audio session: \(error)")
}
if shouldUpdateState {
isRecording = false
}
}
private func resetSilenceTimer() {
silenceTimer?.invalidate()
silenceTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: false) { [weak self] _ in
self?.handleSilence()
}
}
private var lastFlushedTranscript: String = ""
private var currentFullTranscription: String = ""
private func handleSilence() {
// Soft flush: Send what we have, clear local view, but keep engine running
guard !transcript.isEmpty && transcript != "Listening..." else { return }
// Send the final result
onFinalResult?(transcript)
// Mark the current *full* text as "flushed" so we ignore it in future updates
// We use the full string that the recognizer currently has
lastFlushedTranscript = currentFullTranscription
// Clear local transcript view
transcript = ""
onPartialResult?("")
print("Silence detected, flushed text. Engine continuing...")
}
}

13
AtTable/UserRole.swift Normal file
View File

@@ -0,0 +1,13 @@
import Foundation
enum UserRole: String, Codable, CaseIterable {
case deaf
case hearing
var displayName: String {
switch self {
case .deaf: return "Deaf / HoH"
case .hearing: return "Hearing"
}
}
}

255
MPC_how_it_works.md Normal file
View File

@@ -0,0 +1,255 @@
# Multipeer Connectivity (MPC) Architecture
This document explains how AtTable uses Apple's Multipeer Connectivity framework to create a peer-to-peer mesh network for real-time communication between deaf and hearing users.
---
## Overview
AtTable uses **Multipeer Connectivity (MPC)** to establish direct device-to-device connections without requiring a central server. The app supports connections over:
- **Wi-Fi** (same network)
- **Peer-to-peer Wi-Fi** (AWDL - Apple Wireless Direct Link)
- **Bluetooth**
When devices aren't on the same Wi-Fi network (e.g., on 5G/cellular), MPC automatically falls back to **AWDL** for peer-to-peer discovery and data transfer.
---
## User Onboarding Flow
### 1. Initial Setup (`OnboardingView.swift`)
When a user launches the app:
1. They enter their **name**
2. Select their **role** (Deaf or Hearing)
3. Choose an **aura color** (for visual identity in the mesh)
4. Tap **"Start Conversation"** to enter the mesh
```
User launches app → OnboardingView → Enter details → ChatView (mesh starts)
```
### 2. Identity Generation (`NodeIdentity.swift`)
Upon first launch, the app generates a **stable Node Identity**:
- **`nodeID`**: A UUID persisted in UserDefaults (stable per app installation)
- **`instance`**: A monotonic counter that increments each time a session starts
This identity system allows the mesh to:
- Reliably identify users across reconnections
- Detect and filter "ghost" peers (stale connections from previous sessions)
- Handle device reboots gracefully
---
## Network Connection Process
### Discovery & Connection (`MultipeerSession.swift`)
When `ChatView` appears, it calls `multipeerSession.start()`, which:
1. **Sets up the MCSession** with encryption disabled (for faster AWDL connections)
2. **Starts browsing** for nearby peers using `MCNearbyServiceBrowser`
3. **Starts advertising** (after 0.5s delay) using `MCNearbyServiceAdvertiser`
### Wi-Fi vs Cellular/5G Connections
| Network Type | Connection Method | Handshake Delay | Connection Time |
|--------------|-------------------|-----------------|-----------------|
| **Wi-Fi (same network)** | Infrastructure Wi-Fi | 0.5 seconds | Near-instant |
| **Cellular/5G** | AWDL (peer-to-peer Wi-Fi) | 1.5 seconds | Up to 60 seconds |
The app uses `NetworkMonitor.swift` to detect the current network type and adjusts timing:
```swift
let isWiFi = NetworkMonitor.shared.isWiFi
let delay = isWiFi ? 0.5s : 1.5s // Slower for AWDL stability
```
### Deterministic Leader/Follower Protocol
To prevent connection races (both devices trying to invite each other), the app uses a **deterministic leader election**:
```swift
if myNodeID > theirNodeID {
// I am LEADER - I will send the invite
} else {
// I am FOLLOWER - I wait for their invite
}
```
This ensures exactly one device initiates each connection.
---
## Handshake Protocol
Once connected at the socket level, devices exchange **handshake messages** containing:
```swift
struct MeshMessage {
var senderNodeID: String // Stable identity
var senderInstance: Int // Session counter (for ghost detection)
var senderRole: UserRole // Deaf or Hearing
var senderColorHex: String // Aura color
var isHandshake: Bool // Identifies this as handshake
}
```
The handshake:
1. Registers the peer in `connectedPeerUsers` for UI display
2. Starts a **15-second stability timer** before clearing failure counters
3. Maps the `MCPeerID` to the stable `nodeID` for reliable identification
---
## User Leaving the Conversation
### Explicit Leave (`ChatView.swift`)
When a user taps **"Leave"**:
```swift
Button(action: {
speechRecognizer.stopRecording() // Stop audio transcription
multipeerSession.stop() // Disconnect from mesh
isOnboardingComplete = false // Return to onboarding
})
```
### Disconnect Cleanup (`MultipeerSession.disconnect()`)
The `disconnect()` function performs complete cleanup:
1. **Cancel pending work**: Recovery tasks, connection timers
2. **Stop services**: Advertising and browsing
3. **Clear delegates**: Prevent zombie callbacks
4. **Disconnect session**: `session?.disconnect()`
5. **Clear all state**:
- `connectedPeers` / `connectedPeerUsers`
- `pendingInvites` / `latestByNodeID`
- `cooldownUntil` / `consecutiveFailures`
6. **Stop keep-alive heartbeats**
### Partial Transcript Preservation
If a peer disconnects mid-speech, their **partial transcript is preserved** as a final message:
```swift
if let partialText = liveTranscripts[peerKey], !partialText.isEmpty {
let finalMessage = MeshMessage(content: partialText, ...)
receivedMessages.append(finalMessage)
}
```
---
## Rejoining the Conversation
### Identity Recovery
When a user returns to the conversation:
1. App resets `isOnboardingComplete = false` on every launch (intentional - forces Login screen)
2. User completes onboarding again (name/role/color preserved in `@AppStorage`)
3. `multipeerSession.start()` called again
### Instance Increment
The key to reliable rejoining is the **instance counter**:
```swift
myInstance = NodeIdentity.nextInstance() // Monotonically increasing
```
When other devices see the new instance:
1. **Ghost Detection**: Old connections with lower instances are rejected
2. **Cooldown Clear**: Any cooldowns from previous failures are removed
3. **Fresh Connect**: The leader initiates a new invitation
### Handling Stale Peers
The mesh uses multiple mechanisms to handle rejoins:
| Mechanism | Purpose |
|-----------|---------|
| **Ghost Filtering** | Reject messages/invites from older instances |
| **Cooldown Clear** | Give returning peers a fresh chance |
| **Half-Open Deadlock Fix** | If we think we're connected but they invite us, accept the new invite |
| **Stability Timer** | Only reset failure counts after 15s of stable connection |
---
## Keep-Alive & Mesh Health
### Heartbeat System
When connected, the mesh sends **heartbeats every 10 seconds**:
```swift
let message = MeshMessage(
content: "💓",
isKeepAlive: true,
connectedNodeIDs: connectedPeerUsers.map { $0.nodeID } // Gossip
)
```
### Gossip Protocol
Heartbeats include a list of connected peers, enabling **clique repair**:
1. Device A receives heartbeat from Device B
2. If B knows Device C but A doesn't, A can proactively invite C
3. This heals mesh partitions without requiring everyone to be discoverable
---
## Connection Recovery
### Exponential Backoff
Failed connections trigger increasing cooldown periods:
```swift
// 0.5s 1.0s 2.0s 4.0s ... max 30s
let delay = min(0.5 * pow(2, failures - 1), 30.0)
```
### Smart Retry
Instead of restarting everything, failed connections are retried individually:
1. Only the **leader** initiates retries (prevents race conditions)
2. Retries respect cooldown periods
3. After 5 consecutive failures → **"Poisoned State"** triggers full reset
### Poisoned State Recovery
If a peer has too many consecutive failures:
```swift
if failures >= 5 {
restartServices(forcePoisonedRecovery: true)
// Creates new MCPeerID, clears all cooldowns
}
```
---
## Summary
| Event | What Happens |
|-------|--------------|
| **User joins** | NodeID retrieved, instance incremented, advertise + browse started |
| **On Wi-Fi** | Fast handshake (0.5s), near-instant connections |
| **On 5G/Cellular** | AWDL used, slower handshake (1.5s), up to 60s to connect |
| **User leaves** | Full cleanup, partial transcripts preserved |
| **User rejoins** | New instance number, ghosts filtered, cooldowns cleared |
| **Connection fails** | Exponential backoff, smart retry by leader only |
The architecture prioritizes **reliability over speed**, using defensive mechanisms like ghost filtering, stability timers, and gossip-based clique repair to maintain mesh health despite the inherent unreliability of peer-to-peer wireless connections.

11
MeshInfo.plist Normal file
View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSBonjourServices</key>
<array>
<string>_access-mesh._tcp</string>
<string>_access-mesh._udp</string>
</array>
</dict>
</plist>

40
README.md Normal file
View File

@@ -0,0 +1,40 @@
# At-Table
**Bridging the gap between Deaf and Hearing conversations through local mesh networking and calm design.**
At-Table is an iOS accessibility application designed to enable seamless, simultaneous conversations between multiple deaf and hearing individuals. By leveraging a full mesh network, devices connect automatically without the need for Wi-Fi routers or cellular data, creating a local "table" of communication.
The app prioritizes a stress-free environment for deaf users, utilizing calming visuals to counteract the anxiety often associated with following rapid-fire hearing conversations.
## 🌟 Key Features
### Core Communication
* **Full Mesh Networking:** Devices enter a peer-to-peer full mesh network using Apple's Multipeer Connectivity. No manual pairing or internet connection is required to chat.
* **Real-Time Transcription (Hearing Role):** Hearing users speak naturally. The app transcribes speech in real-time and automatically posts the text as a message to the group once the speaker pauses.
* **Text & Quick Replies (Deaf Role):** Deaf users can type messages or use one-tap "Quick Reply" chips (Yes, No, Hold on, Thanks) for rapid interaction.
* **Role-Based UI:** Distinct interfaces tailored for "Deaf/HoH" and "Hearing" users, optimized for their specific communication needs.
### Visual Design & Atmosphere
* **Calm ASMR Aesthetic:** The background features a slow-moving, blurring mesh of "Liquid Glass" orbs. This visual ASMR is designed to induce a calm mood, helping alleviate the cognitive load and stress deaf users experience during hearing conversations.
* **Personalized Auras:** Users select a "Neon Aura" color during onboarding. This color identifies them in the mesh and dynamically influences the background animation of their device.
* **Live Transcript Streaming:** Hearing users' speech appears as "partial" streaming text on receiving devices before becoming a finalized message, allowing for faster reading speeds.
### Technical Resilience
* **Ghost Filtering:** Advanced logic prevents "ghost" connections (stale peer signals) from clogging the network.
* **Smart Recovery:** Includes "Identity Thrashing" prevention and auto-reconnection logic to handle interruptions or app backgrounding seamlessly.
* **Hybrid Speech Recognition:** Utilizes server-based speech recognition for high accuracy when online, with a seamless fallback to on-device recognition when offline.
## 📱 How It Works
1. **Onboarding:** Users enter a display name, select their role (Deaf/HoH or Hearing), and choose an identifying color.
2. **Discovery:** The app automatically advertises and browses for nearby devices running At-Table.
3. **Connection:** Devices negotiate a connection automatically using a leader/follower algorithm to prevent collision loops.
4. **The Conversation:**
* **Hearing users** simply hold their phone; the microphone listens for speech, visualizes it, and broadcasts it.
* **Deaf users** read incoming bubbles and participate via text.
## 🔒 Privacy
* **Transient Data:** Messages and transcriptions are ephemeral. They exist only for the duration of the session and are not stored in any persistent database or uploaded to a cloud server (other than the temporary audio buffer sent to Apple for transcription if online).
* **Local Connectivity:** Chat data flows directly between devices over Wi-Fi/Bluetooth peer-to-peer protocols.

BIN
Screenshots/iPad/1.PNG Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 MiB

BIN
Screenshots/iPad/2.PNG Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

BIN
Screenshots/iPhone/1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 806 KiB

BIN
Screenshots/iPhone/2.PNG Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

31
check_build.sh Executable file
View File

@@ -0,0 +1,31 @@
#!/bin/zsh
set -o pipefail # Fail if xcodebuild fails, even with xcbeautify
# --- Configuration ---
TARGET_NAME="AtTable"
DEVICE_NAME="iPhone 17 Pro"
BUILD_PATH="$(pwd)/build"
echo "🔍 Checking compilation for $TARGET_NAME..."
# Build Only (No Install/Launch)
# Uses -target instead of -scheme to bypass potential scheme misconfigurations
# Explicitly unsets CC/CXX/LIBCLANG_PATH to avoid environment pollution
# Also overrides them in xcodebuild arguments to ensure Xcode uses default toolchain
# Uses SYMROOT instead of -derivedDataPath because -derivedDataPath requires -scheme
env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \
-target "$TARGET_NAME" \
-sdk iphonesimulator \
-destination "platform=iOS Simulator,name=$DEVICE_NAME" \
-configuration Debug \
SYMROOT="$BUILD_PATH" \
CC=clang CXX=clang++ LIBCLANG_PATH= \
build | xcbeautify
# Check exit code of the pipeline
if [ $? -eq 0 ]; then
echo "✅ Build Succeeded. No errors found."
else
echo "❌ Build Failed."
exit 1
fi