Initial commit

Add iOS app with Node.js/TypeScript backend for BeMyEars project.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-01-19 21:51:47 -05:00
commit d29b8182ca
52 changed files with 3623 additions and 0 deletions

56
.gitignore vendored Normal file
View File

@@ -0,0 +1,56 @@
# Build output
build/
dist/
DerivedData/
# Dependencies
Pods/
node_modules/
# Xcode
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata/
*.xccheckout
*.moved-aside
*.xcuserstate
*.xcscmblueprint
# macOS
.DS_Store
.AppleDouble
.LSOverride
._*
# Environment and secrets
.env
.env.local
.env.*.local
*.pem
# IDE
.idea/
.vscode/
*.swp
*.swo
*~
# Logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Testing
coverage/
# Misc
*.tmp
*.temp
.cache/

View File

@@ -0,0 +1,418 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXBuildFile section */
1C1EB9EF3CA4E91D1ED3391E /* Pods_BeMyEars.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 65598E6F009FE7A358A5011E /* Pods_BeMyEars.framework */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
4AD91A154B87B7EF1D84DFC2 /* Pods-BeMyEars.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-BeMyEars.release.xcconfig"; path = "Target Support Files/Pods-BeMyEars/Pods-BeMyEars.release.xcconfig"; sourceTree = "<group>"; };
6112C041C715DA9A93248ADC /* Pods-BeMyEars.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-BeMyEars.debug.xcconfig"; path = "Target Support Files/Pods-BeMyEars/Pods-BeMyEars.debug.xcconfig"; sourceTree = "<group>"; };
65598E6F009FE7A358A5011E /* Pods_BeMyEars.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_BeMyEars.framework; sourceTree = BUILT_PRODUCTS_DIR; };
7F62C4E32EF0F0DB00EF8370 /* BeMyEars.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = BeMyEars.app; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
7F62C4E52EF0F0DB00EF8370 /* BeMyEars */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = BeMyEars;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
7F62C4E02EF0F0DB00EF8370 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
1C1EB9EF3CA4E91D1ED3391E /* Pods_BeMyEars.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
61964F09D1F4E5D0178F333A /* Pods */ = {
isa = PBXGroup;
children = (
6112C041C715DA9A93248ADC /* Pods-BeMyEars.debug.xcconfig */,
4AD91A154B87B7EF1D84DFC2 /* Pods-BeMyEars.release.xcconfig */,
);
path = Pods;
sourceTree = "<group>";
};
7F62C4DA2EF0F0DB00EF8370 = {
isa = PBXGroup;
children = (
7F62C4E52EF0F0DB00EF8370 /* BeMyEars */,
7F62C4E42EF0F0DB00EF8370 /* Products */,
61964F09D1F4E5D0178F333A /* Pods */,
E8B54FF67C19AC2F5F6D3F52 /* Frameworks */,
);
sourceTree = "<group>";
};
7F62C4E42EF0F0DB00EF8370 /* Products */ = {
isa = PBXGroup;
children = (
7F62C4E32EF0F0DB00EF8370 /* BeMyEars.app */,
);
name = Products;
sourceTree = "<group>";
};
E8B54FF67C19AC2F5F6D3F52 /* Frameworks */ = {
isa = PBXGroup;
children = (
65598E6F009FE7A358A5011E /* Pods_BeMyEars.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7F62C4E22EF0F0DB00EF8370 /* BeMyEars */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7F62C4EE2EF0F0DB00EF8370 /* Build configuration list for PBXNativeTarget "BeMyEars" */;
buildPhases = (
8577AC5B22C6719EF802BC76 /* [CP] Check Pods Manifest.lock */,
7F62C4DF2EF0F0DB00EF8370 /* Sources */,
7F62C4E02EF0F0DB00EF8370 /* Frameworks */,
7F62C4E12EF0F0DB00EF8370 /* Resources */,
5025931BA4720F13D62A76E7 /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
7F62C4E52EF0F0DB00EF8370 /* BeMyEars */,
);
name = BeMyEars;
productName = BeMyEars;
productReference = 7F62C4E32EF0F0DB00EF8370 /* BeMyEars.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7F62C4DB2EF0F0DB00EF8370 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 2610;
LastUpgradeCheck = 2620;
TargetAttributes = {
7F62C4E22EF0F0DB00EF8370 = {
CreatedOnToolsVersion = 26.1.1;
};
};
};
buildConfigurationList = 7F62C4DE2EF0F0DB00EF8370 /* Build configuration list for PBXProject "BeMyEars" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7F62C4DA2EF0F0DB00EF8370;
minimizedProjectReferenceProxies = 1;
preferredProjectObjectVersion = 77;
productRefGroup = 7F62C4E42EF0F0DB00EF8370 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7F62C4E22EF0F0DB00EF8370 /* BeMyEars */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7F62C4E12EF0F0DB00EF8370 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
5025931BA4720F13D62A76E7 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-BeMyEars/Pods-BeMyEars-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
inputPaths = (
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-BeMyEars/Pods-BeMyEars-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-BeMyEars/Pods-BeMyEars-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
8577AC5B22C6719EF802BC76 /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputFileListPaths = (
);
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-BeMyEars-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7F62C4DF2EF0F0DB00EF8370 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
7F62C4EC2EF0F0DB00EF8370 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
7F62C4ED2EF0F0DB00EF8370 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 16.0;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
7F62C4EF2EF0F0DB00EF8370 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 6112C041C715DA9A93248ADC /* Pods-BeMyEars.debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CLANG_ALLOW_NON_MODULAR_INCLUDES_IN_FRAMEWORK_MODULES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_MODULE_VERIFIER = NO;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = PartialInfo.plist;
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.BeMyEars;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
7F62C4F02EF0F0DB00EF8370 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 4AD91A154B87B7EF1D84DFC2 /* Pods-BeMyEars.release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CLANG_ALLOW_NON_MODULAR_INCLUDES_IN_FRAMEWORK_MODULES = YES;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 7X85543FQQ;
ENABLE_MODULE_VERIFIER = NO;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_FILE = PartialInfo.plist;
IPHONEOS_DEPLOYMENT_TARGET = 18.6;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.jaredlog.BeMyEars;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7F62C4DE2EF0F0DB00EF8370 /* Build configuration list for PBXProject "BeMyEars" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F62C4EC2EF0F0DB00EF8370 /* Debug */,
7F62C4ED2EF0F0DB00EF8370 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7F62C4EE2EF0F0DB00EF8370 /* Build configuration list for PBXNativeTarget "BeMyEars" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7F62C4EF2EF0F0DB00EF8370 /* Debug */,
7F62C4F02EF0F0DB00EF8370 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7F62C4DB2EF0F0DB00EF8370 /* Project object */;
}

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:BeMyEars.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

View File

@@ -0,0 +1 @@
{"images":[{"size":"60x60","expected-size":"180","filename":"180.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"40x40","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"60x60","expected-size":"120","filename":"120.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"57x57","expected-size":"57","filename":"57.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"1x"},{"size":"29x29","expected-size":"87","filename":"87.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"57x57","expected-size":"114","filename":"114.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"2x"},{"size":"20x20","expected-size":"60","filename":"60.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"iphone","scale":"3x"},{"size":"1024x1024","filename":"1024.png","expected-size":"1024","idiom":"ios-marketing","folder":"Assets.xcassets/AppIcon.appiconset/","scale":"1x"},{"size":"40x40","expected-size":"80","filename":"80.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"72x72","expected-size":"72","filename":"72.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"76x76","expected-size":"152","filename":"152.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"50x50","expected-size":"100","filename":"100.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"29x29","expected-size":"58","filename":"58.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"76x76","expected-size":"76","filename":"76.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"29x29","expected-size":"29","filename":"29.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"50x50","expected-size":"50","filename":"50.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"72x72","expected-size":"144","filename":"144.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"40x40","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"83.5x83.5","expected-size":"167","filename":"167.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"},{"size":"20x20","expected-size":"20","filename":"20.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"1x"},{"size":"20x20","expected-size":"40","filename":"40.png","folder":"Assets.xcassets/AppIcon.appiconset/","idiom":"ipad","scale":"2x"}]}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 467 KiB

View File

@@ -0,0 +1,86 @@
import Foundation
enum Role: String, Codable {
case caller
case interpreter
}
enum PresenceState: String, Codable {
case unavailable = "UNAVAILABLE"
case available = "AVAILABLE"
case ringing = "RINGING"
case inCall = "IN_CALL"
}
struct User: Identifiable, Codable, Hashable {
var id: String { username }
let username: String
let role: Role
}
struct CallState: Equatable {
var callId: String?
var remoteUser: String?
var status: CallStatus
enum CallStatus: String {
case idle
case calling
case incoming
case connecting
case inCall
case ending
}
}
// Signaling Messages
struct SignalingMessage: Codable {
let type: String
let callId: String?
let from: String?
let to: String?
let payload: AnyCodable?
}
// Helper for type-erased JSON payload
struct AnyCodable: Codable {
let value: Any
init(_ value: Any) {
self.value = value
}
init(from decoder: Decoder) throws {
let container = try decoder.singleValueContainer()
if let x = try? container.decode(String.self) { value = x }
else if let x = try? container.decode(Int.self) { value = x }
else if let x = try? container.decode(Bool.self) { value = x }
else if let x = try? container.decode(Double.self) { value = x }
else if let x = try? container.decode([String: AnyCodable].self) { value = x.mapValues { $0.value } }
else if let x = try? container.decode([AnyCodable].self) { value = x.map { $0.value } }
else {
throw DecodingError.dataCorruptedError(in: container, debugDescription: "AnyCodable value cannot be decoded")
}
}
func encode(to encoder: Encoder) throws {
var container = encoder.singleValueContainer()
if let x = value as? String { try container.encode(x) }
else if let x = value as? Int { try container.encode(x) }
else if let x = value as? Int32 { try container.encode(x) } // Added Int32
else if let x = value as? Bool { try container.encode(x) }
else if let x = value as? Double { try container.encode(x) }
else if let x = value as? [String: Any] {
// Basic support for nested dicts
let wrapped = x.mapValues { AnyCodable($0) }
try container.encode(wrapped)
}
else if let x = value as? [Any] {
let wrapped = x.map { AnyCodable($0) }
try container.encode(wrapped)
}
else {
throw EncodingError.invalidValue(value, EncodingError.Context(codingPath: container.codingPath, debugDescription: "AnyCodable value cannot be encoded"))
}
}
}

View File

@@ -0,0 +1,77 @@
import Foundation
import Network
import Combine
class BonjourService: NSObject, ObservableObject, NetServiceBrowserDelegate, NetServiceDelegate {
@Published var foundServer: String?
private var netServiceBrowser: NetServiceBrowser?
private var services = [NetService]()
func start() {
print("BonjourService: Starting browsing (NetService)...")
netServiceBrowser = NetServiceBrowser()
netServiceBrowser?.delegate = self
netServiceBrowser?.searchForServices(ofType: "_bemyears._tcp", inDomain: "local.")
}
func stop() {
netServiceBrowser?.stop()
netServiceBrowser = nil
services.removeAll()
}
// MARK: - NetServiceBrowserDelegate
func netServiceBrowser(_ browser: NetServiceBrowser, didFind service: NetService, moreComing: Bool) {
print("BonjourService: Found service \(service.name)")
services.append(service)
service.delegate = self
// 5 seconds timeout for resolution
service.resolve(withTimeout: 5.0)
}
func netServiceBrowser(_ browser: NetServiceBrowser, didRemove service: NetService, moreComing: Bool) {
print("BonjourService: Removed service \(service.name)")
if let index = services.firstIndex(of: service) {
services.remove(at: index)
}
}
func netServiceBrowser(_ browser: NetServiceBrowser, didNotSearch errorDict: [String : NSNumber]) {
print("BonjourService: Search failed: \(errorDict)")
}
// MARK: - NetServiceDelegate
func netServiceDidResolveAddress(_ sender: NetService) {
guard let addresses = sender.addresses else { return }
for address in addresses {
let data = address as NSData
var storage = sockaddr_storage()
data.getBytes(&storage, length: MemoryLayout<sockaddr_storage>.size)
if Int32(storage.ss_family) == AF_INET {
let ipStr = String(cString: inet_ntoa(data.bytes.load(as: sockaddr_in.self).sin_addr))
print("BonjourService: Resolved IPv4: \(ipStr)")
// Found, update UI
DispatchQueue.main.async {
self.foundServer = ipStr
}
// Stop specifically resolving this one or just keep going?
// We'll just stop searching once we find a valid IPv4
stop()
return
} else if Int32(storage.ss_family) == AF_INET6 {
// Ignore IPv6
print("BonjourService: Ignoring IPv6 address found.")
}
}
}
func netService(_ sender: NetService, didNotResolve errorDict: [String : NSNumber]) {
print("BonjourService: Resolve failed: \(errorDict)")
}
}

View File

@@ -0,0 +1,84 @@
import Foundation
import Combine
class SignalingService: ObservableObject {
private var webSocketTask: URLSessionWebSocketTask?
private let urlSession = URLSession(configuration: .default)
private var serverUrl: URL {
let ip = UserDefaults.standard.string(forKey: "server_ip") ?? "192.168.1.199"
return URL(string: "ws://\(ip):8080")!
}
// Publishers for ViewModel to subscribe to
let messageReceived = PassthroughSubject<SignalingMessage, Never>()
let isConnected = CurrentValueSubject<Bool, Never>(false)
func connect() {
print("Connecting to \(serverUrl)...")
webSocketTask = urlSession.webSocketTask(with: serverUrl)
webSocketTask?.resume()
isConnected.send(true) // Optimistic, usually handle with delegate or ping
receiveMessage()
}
func disconnect() {
webSocketTask?.cancel(with: .normalClosure, reason: nil)
isConnected.send(false)
}
func send(_ message: SignalingMessage) {
do {
let data = try JSONEncoder().encode(message)
let message = URLSessionWebSocketTask.Message.data(data)
webSocketTask?.send(message) { error in
if let error = error {
print("Error sending message: \(error)")
}
}
} catch {
print("Error encoding message: \(error)")
}
}
private func receiveMessage() {
webSocketTask?.receive { [weak self] result in
guard let self = self else { return }
switch result {
case .failure(let error):
print("Error receiving message: \(error)")
self.isConnected.send(false)
// In production, implement reconnect logic here
case .success(let message):
switch message {
case .string(let text):
self.handleMessageData(text.data(using: .utf8))
case .data(let data):
self.handleMessageData(data)
@unknown default:
break
}
// Continue receiving messages
self.receiveMessage()
}
}
}
private func handleMessageData(_ data: Data?) {
guard let data = data else { return }
do {
let message = try JSONDecoder().decode(SignalingMessage.self, from: data)
DispatchQueue.main.async {
self.messageReceived.send(message)
}
} catch {
print("Failed to decode message: \(error)")
if let str = String(data: data, encoding: .utf8) {
print("Raw message: \(str)")
}
}
}
}

View File

@@ -0,0 +1,343 @@
import Foundation
import WebRTC
import AVFoundation
import CoreMedia
#if canImport(UIKit)
import UIKit
#endif
protocol WebRTCClientDelegate: AnyObject {
func webRTCClient(_ client: WebRTCService, didDiscoverLocalCandidate candidate: RTCIceCandidate)
func webRTCClient(_ client: WebRTCService, didChangeConnectionState state: RTCIceConnectionState)
func webRTCClient(_ client: WebRTCService, didReceiveData data: Data)
func webRTCClient(_ client: WebRTCService, didReceiveRemoteVideoTrack rtcVideoTrack: RTCVideoTrack)
func webRTCClient(_ client: WebRTCService, didCreateLocalVideoTrack rtcVideoTrack: RTCVideoTrack)
}
class WebRTCService: NSObject {
// MARK: - Properties
weak var delegate: WebRTCClientDelegate?
private let factory: RTCPeerConnectionFactory
private var peerConnection: RTCPeerConnection?
private var videoCapturer: RTCVideoCapturer? // Keep reference to prevent deallocation
private var videoSource: RTCVideoSource? // Keep reference to ensure delegate remains valid
override init() {
RTCInitializeSSL()
let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
self.factory = RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
// Fix Audio Session with Robust Activation
// Fix Audio Session with Robust Activation
let audioSession = RTCAudioSession.sharedInstance()
audioSession.lockForConfiguration()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord)
try audioSession.setMode(AVAudioSession.Mode.videoChat)
try audioSession.setActive(true)
} catch {
print("[WebRTC] AudioSession Configuration Error: \(error)")
}
audioSession.unlockForConfiguration()
}
// MARK: - Signalling
func createOffer(completion: @escaping (RTCSessionDescription) -> Void) {
let constraints = RTCMediaConstraints(mandatoryConstraints: [
"OfferToReceiveAudio": "true",
"OfferToReceiveVideo": "true"
], optionalConstraints: nil)
self.peerConnection?.offer(for: constraints, completionHandler: { (sdp, error) in
if let error = error {
print("[WebRTC] Failed to create offer: \(error)")
return
}
guard let sdp = sdp else { return }
print("[WebRTC] Created Offer")
self.peerConnection?.setLocalDescription(sdp, completionHandler: { (error) in
if let error = error { print("[WebRTC] Failed to set local offer: \(error)") }
completion(sdp)
})
})
}
func createAnswer(completion: @escaping (RTCSessionDescription) -> Void) {
let constraints = RTCMediaConstraints(mandatoryConstraints: [
"OfferToReceiveAudio": "true",
"OfferToReceiveVideo": "true"
], optionalConstraints: nil)
self.peerConnection?.answer(for: constraints, completionHandler: { (sdp, error) in
if let error = error {
print("[WebRTC] Failed to create answer: \(error)")
return
}
guard let sdp = sdp else { return }
print("[WebRTC] Created Answer")
self.peerConnection?.setLocalDescription(sdp, completionHandler: { (error) in
if let error = error { print("[WebRTC] Failed to set local answer: \(error)") }
completion(sdp)
})
})
}
func setRemoteDescription(sdp: RTCSessionDescription, completion: @escaping (Error?) -> Void) {
print("[WebRTC] Setting Remote Description: \(sdp.type == .offer ? "OFFER" : "ANSWER")")
self.peerConnection?.setRemoteDescription(sdp, completionHandler: { error in
if let error = error { print("[WebRTC] Remote Desc Error: \(error)") }
completion(error)
})
}
func setLocalDescription(sdp: RTCSessionDescription, completion: @escaping (Error?) -> Void) {
self.peerConnection?.setLocalDescription(sdp, completionHandler: completion)
}
func addCandidate(_ candidate: RTCIceCandidate) {
self.peerConnection?.add(candidate) { error in
if let error = error {
print("Failed to add ICE candidate: \(error)")
}
}
}
// MARK: - Connection
func setup(turnServers: [String] = [], username: String? = nil, credential: String? = nil) {
let config = RTCConfiguration()
var iceServers = [RTCIceServer]()
// STUN (Default Google for POC)
iceServers.append(RTCIceServer(urlStrings: ["stun:stun.l.google.com:19302"]))
// TURN (from backend)
if !turnServers.isEmpty {
let turnServer = RTCIceServer(urlStrings: turnServers, username: username ?? "", credential: credential ?? "")
iceServers.append(turnServer)
}
config.iceServers = iceServers
config.sdpSemantics = .unifiedPlan
// config.certificate = RTCCertificate.generate(withParams: ["expires": 100000]) // Optional
let constraints = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: ["DtlsSrtpKeyAgreement": "true"])
self.peerConnection = self.factory.peerConnection(with: config, constraints: constraints, delegate: self)
// Add local media stream
self.startCaptureLocalVideo()
}
func disconnect() {
self.peerConnection?.close()
self.peerConnection = nil
self.videoCapturer = nil
}
// MARK: - Media
private func startCaptureLocalVideo() {
// Create and retain video source
let source = self.factory.videoSource()
self.videoSource = source
#if targetEnvironment(simulator)
print("Simulator: Camera capture not supported")
#else
// Check Permissions
let authStatus = AVCaptureDevice.authorizationStatus(for: .video)
print("[WebRTC] Camera Permission Status: \(authStatus.rawValue) (3=Authorized, 0=NotDetermined, 2=Denied)")
if authStatus != .authorized {
print("[WebRTC] ⚠️ WARNING: Camera access not authorized!")
}
let capturer = RTCCameraVideoCapturer(delegate: source)
self.videoCapturer = capturer
// CRITICAL FIX: The session default preset locks the format.
// iPad Pros often require High preset and reject VGA.
// iPad Minis might reject High and crash on InputPriority.
// We must set the preset AFTER startCapture because startCapture resets it to InputPriority.
capturer.captureSession.beginConfiguration()
// Just commit what we have (likely InputPriority default) to clear state if needed?
// Actually, do nothing here. Let startCapture do its thing, then override.
capturer.captureSession.commitConfiguration()
// Observe Session Errors
NotificationCenter.default.addObserver(forName: .AVCaptureSessionRuntimeError, object: capturer.captureSession, queue: .main) { notification in
let error = notification.userInfo?[AVCaptureSessionErrorKey] as? Error
print("[WebRTC] ❌ Capture Session Runtime Error: \(String(describing: error))")
}
NotificationCenter.default.addObserver(forName: .AVCaptureSessionWasInterrupted, object: capturer.captureSession, queue: .main) { notification in
let reason = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? Int
print("[WebRTC] ⚠️ Capture Session Interrupted. Reason Code: \(String(describing: reason))")
}
// Find front camera
let devices = RTCCameraVideoCapturer.captureDevices()
if let frontCamera = devices.first(where: { $0.position == .front }) {
let formats = RTCCameraVideoCapturer.supportedFormats(for: frontCamera)
let targetWidth: Int32 = 1920 // Target 1080p
let targetHeight: Int32 = 1080
let targetFps: Float64 = 30.0
var selectedFormat: AVCaptureDevice.Format?
var currentDiff = Int32.max
for format in formats {
let dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
let diff = abs(dimension.width - targetWidth) + abs(dimension.height - targetHeight)
// Verify FPS support
var supportsFps = false
for range in format.videoSupportedFrameRateRanges {
if range.minFrameRate <= targetFps && range.maxFrameRate >= targetFps {
supportsFps = true
break
}
}
// On iPad Mini, 1280x960 (4:3) crashes with InputPriority (and even High in some cases).
// We prefer 16:9 formats (1920x1080, 1280x720) which are safer.
// If the found format is 4:3 but not 640x480, it might be unstable.
let pixelCount = dimension.width * dimension.height
// Prefer 720p or 1080p if available
let isSafeRatio = (Double(dimension.width) / Double(dimension.height) > 1.7) // ~16:9
if supportsFps && diff < currentDiff {
// If we are about to pick a 4:3 format >= 960p, ensure it's absolutely necessary or we prefer a safer one?
// Better heuristic: Just strictly target 720p if 1080p is not found.
selectedFormat = format
currentDiff = diff
}
// FIX: Force override if we find exact 1280x720 or 1920x1080
if supportsFps {
if dimension.width == 1920 && dimension.height == 1080 {
selectedFormat = format
currentDiff = 0
break
}
if dimension.width == 1280 && dimension.height == 720 {
// If our current selection is 1280x960 (diff ~760), this 720p (diff ~ 640+360=1000) is technically "further" from 1080p but SAFE.
// Actually 1280-1920=640, 720-1080=360 -> diff 1000.
// 1280-1920=640, 960-1080=120 -> diff 760.
// The math prefers 1280x960 (closer to 1080p). We need to intervene.
// Store 720p as a strong candidate overriding "closer" but dangerous formats
selectedFormat = format
currentDiff = 0 // "Fake" perfect match to stop searching or prefer this
}
}
}
if let format = selectedFormat {
DispatchQueue.main.async {
capturer.startCapture(with: frontCamera, format: format, fps: Int(targetFps))
// Force Preset Override for iPad Mini / Pro stability
// WebRTC defaults to InputPriority, which crashes some iPads with specific formats.
capturer.captureSession.beginConfiguration()
let targetPresets: [AVCaptureSession.Preset] = [.high, .medium, .low]
for preset in targetPresets {
if capturer.captureSession.canSetSessionPreset(preset) {
capturer.captureSession.sessionPreset = preset
print("[WebRTC] Post-Capture Override: Set Session Preset to \(preset.rawValue)")
break
}
}
capturer.captureSession.commitConfiguration()
// Adapt output to save bandwidth AFTER capture starts (WebRTC scaling)
// Adapt output to save bandwidth AFTER capture starts (WebRTC scaling)
var targetWidth: Int32 = 1280
var targetHeight: Int32 = 720
#if canImport(UIKit)
if UIDevice.current.userInterfaceIdiom == .pad {
let bounds = UIScreen.main.nativeBounds
let width = CGFloat(bounds.width)
let height = CGFloat(bounds.height)
// Determine aspect ratio based on physical pixels (usually portrait orientation for nativeBounds)
let maxDim = max(width, height)
let minDim = min(width, height)
let ratio = maxDim / minDim
// "Feed the beast" - Maintain higher quality/correct aspect for iPads
// Use 1080 as the base height (or similar) to keep quality high
// Example: 4:3 -> 1440x1080
targetHeight = 1080
targetWidth = Int32(CGFloat(targetHeight) * ratio)
// Ensure even dimensions
if targetWidth % 2 != 0 { targetWidth += 1 }
print("[WebRTC] iPad detected. Adapted output to: \(targetWidth)x\(targetHeight) for ratio \(ratio)")
}
#endif
self.videoSource?.adaptOutputFormat(toWidth: targetWidth, height: targetHeight, fps: 30)
print("[WebRTC] Capture started: \(CMVideoFormatDescriptionGetDimensions(format.formatDescription)) @ \(targetFps)fps")
}
} else if let fallback = formats.last {
// Last resort
DispatchQueue.main.async {
capturer.startCapture(with: frontCamera, format: fallback, fps: 30)
print("[WebRTC] Requested Capture at Fallback: \(CMVideoFormatDescriptionGetDimensions(fallback.formatDescription))")
}
}
} else {
print("No front camera found")
}
#endif
guard let source = self.videoSource else { return }
let videoTrack = self.factory.videoTrack(with: source, trackId: "video0")
self.peerConnection?.add(videoTrack, streamIds: ["stream0"])
// Notify delegate to show local video
self.delegate?.webRTCClient(self, didCreateLocalVideoTrack: videoTrack)
}
}
// MARK: - RTCPeerConnectionDelegate
extension WebRTCService: RTCPeerConnectionDelegate {
func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {}
func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {}
func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {}
func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {}
func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
self.delegate?.webRTCClient(self, didChangeConnectionState: newState)
}
func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {}
func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
self.delegate?.webRTCClient(self, didDiscoverLocalCandidate: candidate)
}
func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {}
func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {}
// Modern Unified Plan uses Track events
func peerConnection(_ peerConnection: RTCPeerConnection, didStartReceivingOn transceiver: RTCRtpTransceiver) {
print("[WebRTC] didStartReceivingOn transceiver: \(transceiver.mediaType == .video ? "Video" : "Audio")")
if transceiver.mediaType == .video, let track = transceiver.receiver.track as? RTCVideoTrack {
print("[WebRTC] Received Remote Video Track!")
self.delegate?.webRTCClient(self, didReceiveRemoteVideoTrack: track)
}
}
}

View File

@@ -0,0 +1,29 @@
import Foundation
struct NetworkManager {
static func getLocalIPAddress() -> String? {
var address: String?
var ifaddr: UnsafeMutablePointer<ifaddrs>?
if getifaddrs(&ifaddr) == 0 {
var ptr = ifaddr
while ptr != nil {
defer { ptr = ptr?.pointee.ifa_next }
guard let interface = ptr?.pointee else { return nil }
let addrFamily = interface.ifa_addr.pointee.sa_family
if addrFamily == UInt8(AF_INET) { // IPv4 only
let name: String = String(cString: interface.ifa_name)
if name == "en0" { // Usually WiFi
var hostname = [CChar](repeating: 0, count: Int(NI_MAXHOST))
getnameinfo(interface.ifa_addr, socklen_t(interface.ifa_addr.pointee.sa_len),
&hostname, socklen_t(hostname.count),
nil, socklen_t(0), NI_NUMERICHOST)
address = String(cString: hostname)
}
}
}
freeifaddrs(ifaddr)
}
return address
}
}

View File

@@ -0,0 +1,375 @@
import Foundation
import Combine
import WebRTC
class CallViewModel: ObservableObject {
// MARK: - Published State
@Published var currentUser: User?
@Published var callState: CallState = CallState(status: .idle)
@Published var availableInterpreters: [String] = []
@Published var interpreterState: PresenceState = .unavailable // Default to unavailable locally
// Notification State
@Published var remotePeerSeesMe: Bool = false
@Published var showVideoVisibleToast: Bool = false
// Stats State
@Published var onlineInterpretersCount: Int = 0
@Published var onlineCallersCount: Int = 0
@Published var errorMessage: String?
// Abuse Reporting
@Published var lastCallId: String?
@Published var lastRemoteUser: String?
// Services
private let signaling: SignalingService
private let webRTC: WebRTCService
private var cancellables = Set<AnyCancellable>()
// WebRTC Data
// In a real app, these would be exposed via a VideoView struct dependent on the WebRTC library's rendering
@Published var localVideoTrack: RTCVideoTrack?
@Published var remoteVideoTrack: RTCVideoTrack?
init() {
self.signaling = SignalingService()
self.webRTC = WebRTCService()
setupBindings()
}
// MARK: - Setup
private func setupBindings() {
// Signaling - Connection State
signaling.isConnected
.dropFirst() // Ignore initial 'false' state
.receive(on: DispatchQueue.main)
.sink { [weak self] isConnected in
if !isConnected {
self?.errorMessage = "Disconnected from server"
// Handle disconnect state
}
}
.store(in: &cancellables)
// Signaling - Messages
signaling.messageReceived
.receive(on: DispatchQueue.main)
.sink { [weak self] msg in
self?.handleSignalingMessage(msg)
}
.store(in: &cancellables)
// WebRTC Delegate
webRTC.delegate = self
}
// MARK: - Authentication / Registration
func login(username: String, role: Role) {
// 1. Optimistically set user so UI transitions immediately
self.currentUser = User(username: username, role: role)
// Reset interpreter state to unavailable on login/registration to match server default
if role == .interpreter {
self.interpreterState = .unavailable
}
// 2. Subscribe to connection status to send REGISTER only when actually connected
signaling.isConnected
.filter { $0 } // Wait for 'true'
.first() // Do this only once per login call
.sink { [weak self] _ in
print("Connected! Sending REGISTER for \(username)")
let msg = SignalingMessage(type: "REGISTER", callId: nil, from: nil, to: nil, payload: AnyCodable(["username": username, "role": role.rawValue]))
self?.signaling.send(msg)
}
.store(in: &cancellables)
// 3. Start connection
signaling.connect()
}
func logout() {
signaling.disconnect()
currentUser = nil
callState = CallState(status: .idle)
availableInterpreters = []
localVideoTrack = nil
remoteVideoTrack = nil
errorMessage = nil
// Cancel bindings to prevent receiving messages after logout
cancellables.removeAll()
// Re-setup to be ready for next login
setupBindings()
}
// MARK: - Call Logic (Caller)
func callInterpreter(username: String) {
guard let myUser = currentUser, myUser.role == .caller else { return }
callState = CallState(remoteUser: username, status: .calling)
let msg = SignalingMessage(type: "CALL_REQUEST", callId: nil, from: myUser.username, to: username, payload: nil)
signaling.send(msg)
}
// MARK: - Call Logic (Interpreter)
func acceptCall() {
guard let myUser = currentUser, myUser.role == .interpreter, let callId = callState.callId, let remoteUser = callState.remoteUser else { return }
let msg = SignalingMessage(type: "CALL_ACCEPT", callId: callId, from: myUser.username, to: remoteUser, payload: nil)
signaling.send(msg)
callState.status = .inCall // Transition to In Call
startWebRTC(isCaller: false)
}
func declineCall() {
guard let myUser = currentUser, myUser.role == .interpreter, let callId = callState.callId, let remoteUser = callState.remoteUser else { return }
let msg = SignalingMessage(type: "CALL_DECLINE", callId: callId, from: myUser.username, to: remoteUser, payload: nil)
signaling.send(msg)
resetCallState()
// Revert to AVAILABLE after declining (or should match server behavior)
self.interpreterState = .available
}
// MARK: - Presence Management
func togglePresence() {
guard let myUser = currentUser, myUser.role == .interpreter else { return }
let newState: PresenceState = (interpreterState == .available) ? .unavailable : .available
// Optimistic update
self.interpreterState = newState
let msg = SignalingMessage(type: "SET_PRESENCE", callId: nil, from: myUser.username, to: nil, payload: AnyCodable(["state": newState.rawValue]))
signaling.send(msg)
}
func reportAbuse(message: String) {
guard let myUser = currentUser, myUser.role == .interpreter, let callId = lastCallId, let remoteUser = lastRemoteUser else { return }
let msg = SignalingMessage(type: "REPORT_ABUSE", callId: callId, from: myUser.username, to: nil, payload: AnyCodable(["message": message, "offender": remoteUser]))
signaling.send(msg)
// Clear last call info so button disappears
self.lastCallId = nil
self.lastRemoteUser = nil
}
// MARK: - Call Logic (Shared)
func hangup() {
guard let myUser = currentUser, let callId = callState.callId, let remoteUser = callState.remoteUser else { return }
let msg = SignalingMessage(type: "HANGUP", callId: callId, from: myUser.username, to: remoteUser, payload: nil)
signaling.send(msg)
endCall()
}
private func endCall() {
webRTC.disconnect()
resetCallState()
remoteVideoTrack = nil
}
private func resetCallState() {
// Save last call info for abuse reporting
if let callId = callState.callId, let remoteUser = callState.remoteUser {
self.lastCallId = callId
self.lastRemoteUser = remoteUser
}
callState = CallState(status: .idle)
remotePeerSeesMe = false
showVideoVisibleToast = false
}
// MARK: - Video Visibility
func notifyVideoSeen() {
// Send signal to remote peer that we see them
guard let callId = callState.callId, let remoteUser = callState.remoteUser, let myUser = currentUser else { return }
// Prevent spamming
// In a real app we might want to allow this multiple times if video cuts out, but for now once per call is fine
// However, this method might be called multiple times by VideoView if resolution changes.
// We can debounce or just send it. Let's send it, it's cheap.
let msg = SignalingMessage(type: "VIDEO_VISIBLE", callId: callId, from: myUser.username, to: remoteUser, payload: nil)
signaling.send(msg)
}
// MARK: - Signaling Handling
private func handleSignalingMessage(_ msg: SignalingMessage) {
switch msg.type {
case "REGISTER_SUCCESS":
break // Already handled optimistically
case "PRESENCE_UPDATE":
if let payload = msg.payload?.value as? [[String: Any]] {
self.availableInterpreters = payload.compactMap { $0["username"] as? String }
}
case "STATS_UPDATE":
if let payload = msg.payload?.value as? [String: Any],
let numInterpreters = payload["interpreters"] as? Int,
let numCallers = payload["callers"] as? Int {
DispatchQueue.main.async {
self.onlineInterpretersCount = numInterpreters
self.onlineCallersCount = numCallers
}
}
// Interpreter Actions
case "PRESENCE_CHANGED":
if let payload = msg.payload?.value as? [String: Any],
let stateStr = payload["state"] as? String,
let state = PresenceState(rawValue: stateStr) {
self.interpreterState = state
}
case "CALL_REQUEST":
guard currentUser?.role == .interpreter else { return }
if let callId = msg.callId, let from = msg.from {
self.callState = CallState(callId: callId, remoteUser: from, status: .incoming)
}
// Caller Actions
case "CALL_ACCEPT":
if let callId = msg.callId {
self.callState.callId = callId
self.callState.status = .inCall
// Extract TURN config if present
if let payload = msg.payload?.value as? [String: Any],
let turn = payload["turnServer"] as? [String: Any] {
// Configure WebRTC with these credentials
let _ = turn // Placeholder usage
}
startWebRTC(isCaller: true)
}
case "CALL_DECLINE":
resetCallState()
errorMessage = "Call declined. Please try another available interpreter."
case "BUSY", "TIMEOUT":
resetCallState()
errorMessage = "\(msg.type): \(msg.payload?.value ?? "")"
case "VIDEO_VISIBLE":
// Remote peer sees us!
DispatchQueue.main.async {
self.remotePeerSeesMe = true
self.showVideoVisibleToast = true
// Hide toast after 3 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
self.showVideoVisibleToast = false
}
}
case "HANGUP":
endCall()
// WebRTC Signaling
case "OFFER":
if let sdpStr = msg.payload?.value as? String {
let sdp = RTCSessionDescription(type: .offer, sdp: sdpStr)
webRTC.setRemoteDescription(sdp: sdp) { error in
if error == nil {
self.webRTC.createAnswer { answerSdp in
self.webRTC.setLocalDescription(sdp: answerSdp) { _ in
self.sendSDP(answerSdp, type: "ANSWER")
}
}
}
}
}
case "ANSWER":
if let sdpStr = msg.payload?.value as? String {
let sdp = RTCSessionDescription(type: .answer, sdp: sdpStr)
webRTC.setRemoteDescription(sdp: sdp) { _ in }
}
case "ICE":
if let candidateDict = msg.payload?.value as? [String: Any],
let sdp = candidateDict["sdp"] as? String,
let sdpMLineIndexInt = candidateDict["sdpMLineIndex"] as? Int {
let sdpMLineIndex = Int32(sdpMLineIndexInt)
let sdpMid = candidateDict["sdpMid"] as? String ?? "" // sdpMid is optional sometimes, good to safely unwrap or default
let candidate = RTCIceCandidate(sdp: sdp, sdpMLineIndex: sdpMLineIndex, sdpMid: sdpMid)
webRTC.addCandidate(candidate)
}
default:
print("Unhandled message: \(msg.type)")
}
}
// MARK: - WebRTC Orchestration
private func startWebRTC(isCaller: Bool) {
// In production, pass TURN creds here
webRTC.setup(turnServers: ["turn:turn.jaredlog.com:3478"], username: "user", credential: "password")
if isCaller {
webRTC.createOffer { sdp in
self.webRTC.setLocalDescription(sdp: sdp) { error in
if error == nil {
self.sendSDP(sdp, type: "OFFER")
}
}
}
}
}
private func sendSDP(_ sdp: RTCSessionDescription, type: String) {
guard let callId = callState.callId, let remoteUser = callState.remoteUser, let myUser = currentUser else { return }
let msg = SignalingMessage(type: type, callId: callId, from: myUser.username, to: remoteUser, payload: AnyCodable(sdp.sdp))
signaling.send(msg)
}
}
// MARK: - WebRTC Delegate
extension CallViewModel: WebRTCClientDelegate {
func webRTCClient(_ client: WebRTCService, didDiscoverLocalCandidate candidate: RTCIceCandidate) {
guard let callId = callState.callId, let remoteUser = callState.remoteUser, let myUser = currentUser else { return }
let candidateDict: [String: Any] = [
"sdp": candidate.sdp,
"sdpMLineIndex": candidate.sdpMLineIndex,
"sdpMid": candidate.sdpMid ?? ""
]
let msg = SignalingMessage(type: "ICE", callId: callId, from: myUser.username, to: remoteUser, payload: AnyCodable(candidateDict))
signaling.send(msg)
}
func webRTCClient(_ client: WebRTCService, didChangeConnectionState state: RTCIceConnectionState) {
// Handle connection state changes (connecting, connected, disconnected, failed)
}
func webRTCClient(_ client: WebRTCService, didReceiveData data: Data) {
// Handle data channel
}
func webRTCClient(_ client: WebRTCService, didReceiveRemoteVideoTrack rtcVideoTrack: RTCVideoTrack) {
DispatchQueue.main.async {
self.remoteVideoTrack = rtcVideoTrack
}
}
func webRTCClient(_ client: WebRTCService, didCreateLocalVideoTrack rtcVideoTrack: RTCVideoTrack) {
DispatchQueue.main.async {
self.localVideoTrack = rtcVideoTrack
}
}
}

View File

@@ -0,0 +1,85 @@
import SwiftUI
import WebRTC
struct CallView: View {
@EnvironmentObject var viewModel: CallViewModel
var body: some View {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
// Remote Video (Full Screen)
// Remote Video (Full Screen)
if let remoteTrack = viewModel.remoteVideoTrack {
VideoView(track: remoteTrack, onRenderStart: {
print("Remote video is visible! Notifying peer...")
viewModel.notifyVideoSeen()
})
.edgesIgnoringSafeArea(.all)
} else {
Text("Waiting for video...")
.foregroundColor(.white)
}
// Local Video (PiP)
VStack {
HStack {
Spacer()
// Local Video
if let localTrack = viewModel.localVideoTrack {
VideoView(track: localTrack)
.frame(width: 100, height: 150)
.cornerRadius(8)
.padding()
.shadow(radius: 5)
} else {
Rectangle()
.fill(Color.gray)
.frame(width: 100, height: 150)
.overlay(Text("Local").foregroundColor(.white))
.cornerRadius(8)
.padding()
}
}
Spacer()
}
// Controls
VStack {
// Toast Notification
if viewModel.showVideoVisibleToast {
Text("Your video now visible on the other end.")
.font(.callout)
.padding(10)
.background(Color.black.opacity(0.7))
.foregroundColor(.white)
.cornerRadius(8)
.padding(.top, 50)
.transition(.opacity)
.animation(.easeInOut, value: viewModel.showVideoVisibleToast)
}
Spacer()
Button(action: {
viewModel.hangup()
}) {
Image(systemName: "phone.down.fill")
.font(.title)
.padding()
.background(Color.red)
.foregroundColor(.white)
.clipShape(Circle())
}
.padding(.bottom, 50)
}
}
.onAppear {
// Force keyboard dismissal
UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil)
}
.ignoresSafeArea(.keyboard)
}
}
// NOTE: To make video render, you need a SwiftUI wrapper around RTCMTLVideoView.
// This is non-trivial boilerplate that depends on the specific WebRTC package structure.

View File

@@ -0,0 +1,141 @@
import SwiftUI
struct DashboardView: View {
@EnvironmentObject var viewModel: CallViewModel
@State private var showAbuseReportSheet = false
@State private var abuseMessage = ""
var body: some View {
VStack {
HStack {
Text("Hello, \(viewModel.currentUser?.username ?? "")")
.font(.headline)
Spacer()
Button("Logout") {
viewModel.logout()
}
.foregroundColor(.red)
}
.padding()
if viewModel.currentUser?.role == .interpreter {
Text("Online: \(viewModel.onlineInterpretersCount) terps. \(viewModel.onlineCallersCount) users.")
.font(.subheadline)
.foregroundColor(.secondary)
.padding(.bottom, 5)
}
Divider()
if viewModel.currentUser?.role == .caller {
if viewModel.availableInterpreters.isEmpty {
VStack {
Spacer()
Text("No interpreters online")
.foregroundColor(.secondary)
Spacer()
}
} else {
List(viewModel.availableInterpreters, id: \.self) { interpreterName in
HStack {
Text(interpreterName)
Spacer()
Button("Call") {
viewModel.callInterpreter(username: interpreterName)
}
.buttonStyle(.borderedProminent)
}
}
}
} else {
// Interpreter View
VStack(spacing: 20) {
Spacer()
Image(systemName: viewModel.interpreterState == .available ? "phone.badge.checkmark" : "phone.badge.slash")
.font(.system(size: 50))
.foregroundColor(viewModel.interpreterState == .available ? .green : .gray)
Text(viewModel.interpreterState == .available ? "Online & Waiting for Calls" : "You are currently offline")
.font(.title2)
.foregroundColor(viewModel.interpreterState == .available ? .green : .secondary)
Button(action: {
viewModel.togglePresence()
}) {
Text(viewModel.interpreterState == .available ? "Go Offline" : "Go Online")
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(viewModel.interpreterState == .available ? Color.gray : Color.green)
.foregroundColor(.white)
.cornerRadius(10)
}
.padding(.horizontal, 40)
if viewModel.lastCallId != nil {
Button(action: {
abuseMessage = ""
showAbuseReportSheet = true
}) {
Text("Report Abuse")
.font(.headline)
.foregroundColor(.red)
.padding()
}
}
Spacer()
}
}
}
.sheet(isPresented: $showAbuseReportSheet) {
NavigationView {
VStack {
Text("Report Abuse")
.font(.headline)
.padding()
Text("Please describe the issue (max 100 chars):")
.font(.subheadline)
.foregroundColor(.secondary)
TextEditor(text: $abuseMessage)
.frame(height: 100)
.overlay(RoundedRectangle(cornerRadius: 8).stroke(Color.gray.opacity(0.5)))
.padding()
.onChange(of: abuseMessage) { _, newValue in
if newValue.count > 100 {
abuseMessage = String(newValue.prefix(100))
}
}
Spacer()
Button("Submit Report") {
if !abuseMessage.isEmpty {
viewModel.reportAbuse(message: abuseMessage)
showAbuseReportSheet = false
}
}
.font(.headline)
.foregroundColor(.white)
.padding()
.frame(maxWidth: .infinity)
.background(Color.red)
.cornerRadius(10)
.padding()
.disabled(abuseMessage.isEmpty)
}
.navigationBarTitleDisplayMode(.inline)
.toolbar {
ToolbarItem(placement: .cancellationAction) {
Button("Cancel") {
showAbuseReportSheet = false
}
}
}
}
.presentationDetents([.medium])
}
}
}

View File

@@ -0,0 +1,46 @@
import SwiftUI
struct IncomingCallView: View {
@EnvironmentObject var viewModel: CallViewModel
var body: some View {
VStack(spacing: 30) {
Text("Incoming Call")
.font(.largeTitle)
.fontWeight(.bold)
Text("from \(viewModel.callState.remoteUser ?? "Unknown")")
.font(.title)
HStack(spacing: 40) {
Button(action: {
viewModel.declineCall()
}) {
VStack {
Image(systemName: "phone.down.fill")
.font(.largeTitle)
.padding()
.background(Color.red)
.foregroundColor(.white)
.clipShape(Circle())
Text("Decline")
}
}
Button(action: {
viewModel.acceptCall()
}) {
VStack {
Image(systemName: "phone.fill")
.font(.largeTitle)
.padding()
.background(Color.green)
.foregroundColor(.white)
.clipShape(Circle())
Text("Accept")
}
}
}
}
}
}

View File

@@ -0,0 +1,109 @@
import SwiftUI
struct LoginView: View {
@EnvironmentObject var viewModel: CallViewModel
@State private var username: String = "Jared"
@State private var selectedRole: Role = .caller
@State private var serverIp: String = ""
@State private var isServerFound: Bool = false
@StateObject private var bonjourService = BonjourService()
var body: some View {
VStack(spacing: 20) {
Text("BeMyEars")
.font(.largeTitle)
.fontWeight(.bold)
TextField("Username", text: $username)
.textFieldStyle(RoundedBorderTextFieldStyle())
.autocapitalization(.none)
.padding()
HStack(spacing: 0) {
Button(action: { selectedRole = .caller }) {
Text("Caller (Deaf User)")
.font(.subheadline)
.padding(.vertical, 10)
.frame(maxWidth: .infinity)
.background(selectedRole == .caller ? Color.blue : Color.gray.opacity(0.2))
.foregroundColor(selectedRole == .caller ? .white : .primary)
}
Button(action: { selectedRole = .interpreter }) {
Text("Interpreter")
.font(.subheadline)
.padding(.vertical, 10)
.frame(maxWidth: .infinity)
.background(selectedRole == .interpreter ? Color.blue : Color.gray.opacity(0.2))
.foregroundColor(selectedRole == .interpreter ? .white : .primary)
}
}
.cornerRadius(8)
.padding()
.onChange(of: selectedRole) { _, newRole in
if newRole == .caller {
username = "Jared"
} else {
username = "Terp_01"
}
}
Button(action: {
guard !username.isEmpty else { return }
// Save Server IP
if !serverIp.isEmpty {
UserDefaults.standard.set(serverIp, forKey: "server_ip")
}
viewModel.login(username: username, role: selectedRole)
}) {
Text("Join")
.font(.headline)
.foregroundColor(.white)
.padding()
.frame(maxWidth: .infinity)
.background(Color.red)
.cornerRadius(10)
}
.padding()
Spacer()
VStack(alignment: .leading, spacing: 5) {
Text(isServerFound ? "Backend Server IP was found." : "Backend Server IP")
.font(.caption)
.foregroundColor(isServerFound ? .green : .gray)
TextField("Server IP", text: $serverIp)
.textFieldStyle(RoundedBorderTextFieldStyle())
.keyboardType(.numbersAndPunctuation)
}
.padding(.horizontal)
.padding(.bottom)
}
.padding()
.onAppear {
bonjourService.start()
if let savedIp = UserDefaults.standard.string(forKey: "server_ip") {
serverIp = savedIp
} else if let localIp = NetworkManager.getLocalIPAddress() {
// Try to predict server IP based on local IP (same subnet)
// e.g. 192.168.1.x -> 192.168.1.
var components = localIp.split(separator: ".")
if components.count == 4 {
components.removeLast()
serverIp = components.joined(separator: ".") + "."
} else {
serverIp = localIp
}
}
}
.onReceive(bonjourService.$foundServer) { found in
if let found = found {
serverIp = found
isServerFound = true
}
}
}
}

View File

@@ -0,0 +1,51 @@
import SwiftUI
import WebRTC
// Wraps RTCMTLVideoView for SwiftUI
struct VideoView: UIViewRepresentable {
let track: RTCVideoTrack?
var onRenderStart: (() -> Void)? = nil // Callback for when video starts rendering
func makeUIView(context: Context) -> RTCMTLVideoView {
let videoView = RTCMTLVideoView(frame: .zero)
videoView.videoContentMode = .scaleAspectFill
videoView.delegate = context.coordinator // Set delegate
return videoView
}
func updateUIView(_ uiView: RTCMTLVideoView, context: Context) {
// If track changed, we need to re-hook
if let track = track {
track.add(uiView)
} else {
// Optional: clear view if track is nil
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, RTCVideoViewDelegate {
var parent: VideoView
var hasNotifiedRender = false
init(_ parent: VideoView) {
self.parent = parent
}
func videoView(_ videoView: RTCVideoRenderer, didChangeVideoSize size: CGSize) {
// This delegate method is called when the first frame is received and dimensions are known.
// It's a good proxy for "Video is visible now"
if !hasNotifiedRender && size.width > 0 && size.height > 0 {
print("[VideoView] Video rendering started with size: \(size)")
hasNotifiedRender = true
DispatchQueue.main.async {
self.parent.onRenderStart?()
}
}
}
}
}

View File

@@ -0,0 +1,13 @@
import SwiftUI
@main
struct BeMyEarsApp: App {
@StateObject private var viewModel = CallViewModel()
var body: some Scene {
WindowGroup {
ContentView()
.environmentObject(viewModel)
}
}
}

View File

@@ -0,0 +1,43 @@
import SwiftUI
struct ContentView: View {
@EnvironmentObject var viewModel: CallViewModel
var body: some View {
Group {
if viewModel.currentUser == nil {
LoginView()
} else {
switch viewModel.callState.status {
case .incoming:
IncomingCallView()
case .inCall, .connecting:
CallView()
default:
DashboardView()
}
}
}
.overlay(
Group {
if let error = viewModel.errorMessage {
VStack {
Text(error)
.foregroundColor(.red)
.padding()
.background(Color(UIColor.systemBackground))
.cornerRadius(10)
.shadow(radius: 5)
}
.padding()
.transition(.move(edge: .top))
.onAppear {
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
viewModel.errorMessage = nil
}
}
}
}, alignment: .top
)
}
}

58
PartialInfo.plist Normal file
View File

@@ -0,0 +1,58 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UIApplicationSceneManifest</key>
<dict>
<key>UIApplicationSupportsMultipleScenes</key>
<true/>
<key>UISceneConfigurations</key>
<dict/>
</dict>
<key>UIApplicationSupportsIndirectInputEvents</key>
<true/>
<key>UILaunchScreen</key>
<dict/>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>NSCameraUsageDescription</key>
<string>BeMyEars needs camera access for video calls.</string>
<key>NSMicrophoneUsageDescription</key>
<string>BeMyEars needs microphone access for video calls.</string>
<key>NSLocalNetworkUsageDescription</key>
<string>BeMyEars uses the local network to discover the backend server.</string>
<key>NSBonjourServices</key>
<array>
<string>_bemyears._tcp</string>
</array>
</dict>
</plist>

6
Podfile Normal file
View File

@@ -0,0 +1,6 @@
platform :ios, '18.6'
use_frameworks!
target 'BeMyEars' do
pod 'WebRTC-SDK'
end

16
Podfile.lock Normal file
View File

@@ -0,0 +1,16 @@
PODS:
- WebRTC-SDK (137.7151.11)
DEPENDENCIES:
- WebRTC-SDK
SPEC REPOS:
trunk:
- WebRTC-SDK
SPEC CHECKSUMS:
WebRTC-SDK: 02622170bcaa1ea06008e2c8f4496a3c6db2413f
PODFILE CHECKSUM: 39ea3921fa62040a6e4b43f10539c262c91a6fc4
COCOAPODS: 1.16.2

57
README.md Normal file
View File

@@ -0,0 +1,57 @@
# BeMyEars 👂
**BeMyEars** is a real-time video communication platform designed to bridge the gap between **Deaf/Hard of Hearing (HoH)** individuals and **Sign Language Interpreters**.
The application provides a seamless, one-to-one video chat experience, allowing Deaf users to instantly request assistance for daily tasks, conversations, or minor emergencies, and enabling Interpreters to provide remote support.
---
## 👥 User Roles & Experience
### For Deaf / Hard of Hearing Users
* **Purpose**: Get on-demand access to a visual interpreter.
* **What to Expect**:
* **Dashboard**: A clear list of currently available (online) Interpreters.
* **One-Tap Calling**: Simply tap an Interpreter's name to initiate a video call.
* **High-Quality Video**: Optimized video connection (up to 1080p) to ensure sign language and facial expressions are clearly visible.
### For Interpreters
* **Purpose**: Offer remote visual assistance and interpretation services.
* **What to Expect**:
* **Standby Mode**: Login and toggle your status between "Online" and "Offline".
* **Incoming Call Alerts**: Full-screen notification when a user needs assistance.
* **Two-Way Audio/Video**: Use your voice to translate for the Deaf user while signing back to them via video.
* **Abuse Reporting**: Report inappropriate behavior immediately after a call.
---
## 🚀 Key Features
* **Real-Time WebRTC Video**: Low-latency peer-to-peer video streaming.
* **Bifunctional Login**: Single app supports both User and Interpreter workflows.
* **Presence System**: Interpreters control their availability; Deaf users see live counts.
* **Video Visibility**: Notifications confirm when video renders on the peer's device.
* **Privacy First**: Direct device-to-device connection.
---
## 🛠 Technical Overview
This project serves as a proof-of-concept for a modern IOS video chat application.
* **iOS Client**: Built with **SwiftUI** and **WebRTC**.
* **Backend**: Node.js + WebSocket signaling server.
* **Connectivity**: STUN/TURN support (ICE) for traversing NATs.
* **Optimization**: Custom video adaptation ("Feed the Beast" strategy) for iPad Pro compatibility.
## 📦 Setup & Build
1. **Install dependencies**:
```bash
cd BeMyEars
pod install
```
2. **Open Workspace**:
**CRITICAL**: You must open `BeMyEars.xcworkspace`, NOT `BeMyEars.xcodeproj`.
```bash
open BeMyEars.xcworkspace
```
3. **Build**: Select your target device and run (Cmd+R).

347
backend/package-lock.json generated Normal file
View File

@@ -0,0 +1,347 @@
{
"name": "bemyears-backend",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "bemyears-backend",
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"bonjour-service": "^1.3.0",
"uuid": "^9.0.0",
"ws": "^8.13.0"
},
"devDependencies": {
"@types/node": "^18.15.0",
"@types/uuid": "^9.0.1",
"@types/ws": "^8.5.4",
"ts-node": "^10.9.1",
"typescript": "^5.0.0"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/trace-mapping": "0.3.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.5",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.0.3",
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@leichtgewicht/ip-codec": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz",
"integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==",
"license": "MIT"
},
"node_modules/@tsconfig/node10": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz",
"integrity": "sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node12": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node14": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node16": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/node": {
"version": "18.19.130",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz",
"integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/@types/uuid": {
"version": "9.0.8",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz",
"integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/ws": {
"version": "8.18.1",
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz",
"integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/acorn": {
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/acorn-walk": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
"integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
"dev": true,
"license": "MIT",
"dependencies": {
"acorn": "^8.11.0"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/arg": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"dev": true,
"license": "MIT"
},
"node_modules/bonjour-service": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.3.0.tgz",
"integrity": "sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA==",
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
"multicast-dns": "^7.2.5"
}
},
"node_modules/create-require": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true,
"license": "MIT"
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/dns-packet": {
"version": "5.6.1",
"resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz",
"integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==",
"license": "MIT",
"dependencies": {
"@leichtgewicht/ip-codec": "^2.0.1"
},
"engines": {
"node": ">=6"
}
},
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
"license": "MIT"
},
"node_modules/make-error": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true,
"license": "ISC"
},
"node_modules/multicast-dns": {
"version": "7.2.5",
"resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz",
"integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==",
"license": "MIT",
"dependencies": {
"dns-packet": "^5.2.2",
"thunky": "^1.0.2"
},
"bin": {
"multicast-dns": "cli.js"
}
},
"node_modules/thunky": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz",
"integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==",
"license": "MIT"
},
"node_modules/ts-node": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7",
"@tsconfig/node12": "^1.0.7",
"@tsconfig/node14": "^1.0.0",
"@tsconfig/node16": "^1.0.2",
"acorn": "^8.4.1",
"acorn-walk": "^8.1.1",
"arg": "^4.1.0",
"create-require": "^1.1.0",
"diff": "^4.0.1",
"make-error": "^1.1.1",
"v8-compile-cache-lib": "^3.0.1",
"yn": "3.1.1"
},
"bin": {
"ts-node": "dist/bin.js",
"ts-node-cwd": "dist/bin-cwd.js",
"ts-node-esm": "dist/bin-esm.js",
"ts-node-script": "dist/bin-script.js",
"ts-node-transpile-only": "dist/bin-transpile.js",
"ts-script": "dist/bin-script-deprecated.js"
},
"peerDependencies": {
"@swc/core": ">=1.2.50",
"@swc/wasm": ">=1.2.50",
"@types/node": "*",
"typescript": ">=2.7"
},
"peerDependenciesMeta": {
"@swc/core": {
"optional": true
},
"@swc/wasm": {
"optional": true
}
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"dev": true,
"license": "MIT"
},
"node_modules/uuid": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
"integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==",
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
"dev": true,
"license": "MIT"
},
"node_modules/ws": {
"version": "8.18.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
"integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
}
}
}

26
backend/package.json Normal file
View File

@@ -0,0 +1,26 @@
{
"name": "bemyears-backend",
"version": "1.0.0",
"description": "Signaling server for BeMyEars iOS app",
"main": "dist/server.js",
"scripts": {
"build": "tsc",
"start": "node dist/server.js",
"dev": "ts-node src/server.ts"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"bonjour-service": "^1.3.0",
"uuid": "^9.0.0",
"ws": "^8.13.0"
},
"devDependencies": {
"@types/node": "^18.15.0",
"@types/uuid": "^9.0.1",
"@types/ws": "^8.5.4",
"ts-node": "^10.9.1",
"typescript": "^5.0.0"
}
}

456
backend/src/server.ts Normal file
View File

@@ -0,0 +1,456 @@
import { WebSocketServer, WebSocket } from 'ws';
import { v4 as uuidv4 } from 'uuid';
console.log("BeMyEars Signaling Server Starting...");
// Override console.log and console.error to include timestamps
const originalLog = console.log;
const originalError = console.error;
function getTimestamp() {
const now = new Date();
const dateComponent = now.toLocaleDateString('en-CA'); // YYYY-MM-DD
const timeComponent = now.toLocaleTimeString('en-US', { hour: 'numeric', minute: '2-digit', second: '2-digit', hour12: true });
return `${dateComponent} ${timeComponent}`;
}
console.log = (...args: any[]) => {
originalLog(`[${getTimestamp()}]`, ...args);
};
console.error = (...args: any[]) => {
originalError(`[${getTimestamp()}]`, ...args);
};
const PORT = 8080;
const wss = new WebSocketServer({ port: PORT });
// Types
type Role = 'caller' | 'interpreter';
type PresenceState = 'UNAVAILABLE' | 'AVAILABLE' | 'RINGING' | 'IN_CALL';
interface User {
username: string;
role: Role;
ws: WebSocket;
state?: PresenceState; // Only for interpreters
currentCallId?: string;
}
interface SignalingMessage {
type: string;
callId?: string;
from?: string; // Appended by server for security
to?: string;
payload?: any;
// Client sent properties
username?: string;
role?: Role;
}
// State
const users = new Map<string, User>();
const calls = new Map<string, { caller: string; interpreter: string; timer?: NodeJS.Timeout }>();
wss.on('connection', (ws, req) => {
let ip = req.socket.remoteAddress;
if (ip && ip.startsWith('::ffff:')) {
ip = ip.substring(7);
}
console.log(`New client connected from ${ip}`);
let currentUser: User | null = null;
ws.on('message', (message: any) => {
try {
const data: SignalingMessage = JSON.parse(message.toString());
handleMessage(ws, data, currentUser).then(user => {
if (user) currentUser = user;
});
} catch (e) {
console.error('Failed to parse message:', e);
}
});
ws.on('close', () => {
if (currentUser) {
console.log(`User disconnected: ${currentUser.username}`);
// Handle disconnect logic (hangup active calls, remove from presence)
handleDisconnect(currentUser);
}
});
});
async function handleMessage(ws: WebSocket, msg: SignalingMessage, currentUser: User | null): Promise<User | null> {
switch (msg.type) {
case 'SET_PRESENCE':
if (currentUser) handleSetPresence(currentUser, msg);
return currentUser;
case 'REGISTER':
return handleRegister(ws, msg);
case 'CALL_REQUEST':
if (currentUser) handleCallRequest(currentUser, msg);
return currentUser;
case 'CALL_ACCEPT':
if (currentUser) handleCallAccept(currentUser, msg);
return currentUser;
case 'CALL_DECLINE':
if (currentUser) handleCallDecline(currentUser, msg);
return currentUser;
case 'OFFER':
case 'ANSWER':
case 'ICE':
case 'HANGUP':
case 'VIDEO_VISIBLE':
if (currentUser) handleSignaling(currentUser, msg);
return currentUser;
case 'REPORT_ABUSE':
if (currentUser) handleReportAbuse(currentUser, msg);
return currentUser;
default:
console.warn(`Unknown message type: ${msg.type}`);
return currentUser;
}
}
function handleRegister(ws: WebSocket, msg: SignalingMessage): User | null {
const username = msg.username || msg.payload?.username;
const role = msg.role || msg.payload?.role;
if (!username || !role) {
console.log("Registration failed: Missing username or role in", msg);
return null;
}
// Simple uniqueness check
if (users.has(username)) {
ws.send(JSON.stringify({ type: 'ERROR', payload: { message: 'Username taken' } }));
return null;
}
const newUser: User = {
username: username,
role: role as Role,
ws: ws,
state: role === 'interpreter' ? 'UNAVAILABLE' : undefined
};
users.set(username, newUser);
console.log(`✅ CLIENT IDENTIFIED: ${newUser.username} (${newUser.role})`);
ws.send(JSON.stringify({ type: 'REGISTER_SUCCESS', payload: { username: newUser.username } }));
broadcastPresence();
// Send initial presence list to caller
if (newUser.role === 'caller') {
sendPresenceList(ws);
}
return newUser;
}
function handleCallRequest(caller: User, msg: SignalingMessage) {
if (caller.role !== 'caller') return;
const interpreterUsername = msg.to;
if (!interpreterUsername) return;
const interpreter = users.get(interpreterUsername);
if (!interpreter || interpreter.role !== 'interpreter' || interpreter.state !== 'AVAILABLE') {
caller.ws.send(JSON.stringify({ type: 'BUSY', payload: { message: 'Interpreter unavailable' } }));
return;
}
const callId = uuidv4();
// Update State
interpreter.state = 'RINGING';
interpreter.currentCallId = callId;
caller.currentCallId = callId;
calls.set(callId, { caller: caller.username, interpreter: interpreter.username });
console.log(`📞 Call Request (${callId}): ${caller.username} -> ${interpreter.username}`);
// Broadcast presence update (interpreter is now RINGING, so hidden/busy)
broadcastPresence();
// Forward to interpreter
interpreter.ws.send(JSON.stringify({
type: 'CALL_REQUEST',
callId: callId,
from: caller.username,
payload: msg.payload
}));
// Start 10s Timer
const timeout = setTimeout(() => {
handleCallTimeout(callId);
}, 10000);
const callData = calls.get(callId);
if (callData) callData.timer = timeout;
}
function handleCallAccept(interpreter: User, msg: SignalingMessage) {
if (interpreter.role !== 'interpreter' || !msg.callId) return;
const call = calls.get(msg.callId);
if (!call || call.interpreter !== interpreter.username) return; // Invalid call
if (call.timer) clearTimeout(call.timer);
interpreter.state = 'IN_CALL';
broadcastPresence();
console.log(`✅ Call Connected (${msg.callId}): ${call.caller} <-> ${interpreter.username}`);
const caller = users.get(call.caller);
if (caller) {
// Send TURN credentials here if we had them dynamic, for now relying on client config or static
caller.ws.send(JSON.stringify({
type: 'CALL_ACCEPT',
callId: msg.callId,
from: interpreter.username,
payload: {
turnServer: {
urls: "turn:turn.jaredlog.com:3478",
username: "user",
credential: "password"
}
}
}));
}
}
function handleCallDecline(interpreter: User, msg: SignalingMessage) {
if (interpreter.role !== 'interpreter' || !msg.callId) return;
const call = calls.get(msg.callId);
if (!call) return;
if (call.timer) clearTimeout(call.timer);
// Reset interpreter
interpreter.state = 'AVAILABLE';
interpreter.currentCallId = undefined;
broadcastPresence();
// Notify caller
const caller = users.get(call.caller);
if (caller) {
caller.ws.send(JSON.stringify({
type: 'CALL_DECLINE',
callId: msg.callId,
from: interpreter.username
}));
caller.currentCallId = undefined;
}
calls.delete(msg.callId);
console.log(`❌ Call Declined (${msg.callId}) by ${interpreter.username}`);
}
function handleCallTimeout(callId: string) {
const call = calls.get(callId);
if (!call) return;
const interpreter = users.get(call.interpreter);
const caller = users.get(call.caller);
if (interpreter) {
interpreter.state = 'AVAILABLE';
interpreter.currentCallId = undefined;
// Notify interpreter of timeout?
interpreter.ws.send(JSON.stringify({ type: 'TIMEOUT', callId }));
}
if (caller) {
caller.ws.send(JSON.stringify({ type: 'BUSY', payload: { message: 'No answer' }, callId }));
caller.currentCallId = undefined;
}
calls.delete(callId);
broadcastPresence();
}
function handleSignaling(user: User, msg: SignalingMessage) {
if (!msg.callId || !msg.to) return; // Note: HANGUP might not strictly need 'to' if callId maps it, but keeping for consistency
// Security check: is this user actually in this call?
const call = calls.get(msg.callId);
if (!call) return;
if (call.caller !== user.username && call.interpreter !== user.username) return;
if (msg.type === 'HANGUP') {
handleHangup(user, msg.callId);
return;
}
const targetUser = users.get(msg.to);
if (targetUser) {
targetUser.ws.send(JSON.stringify({
type: msg.type,
callId: msg.callId,
from: user.username,
payload: msg.payload
}));
}
}
function handleHangup(user: User, callId: string) {
const call = calls.get(callId);
if (!call) return;
if (call.timer) clearTimeout(call.timer);
const interpreter = users.get(call.interpreter);
const caller = users.get(call.caller);
const otherUser = user.username === call.caller ? interpreter : caller;
if (otherUser) {
otherUser.ws.send(JSON.stringify({ type: 'HANGUP', callId, from: user.username }));
otherUser.currentCallId = undefined;
if (otherUser.role === 'interpreter') {
otherUser.state = 'AVAILABLE';
}
}
if (user.role === 'interpreter') {
user.state = 'AVAILABLE';
}
user.currentCallId = undefined;
calls.delete(callId);
console.log(`❌ Call Disconnected (${callId}) by ${user.username}`);
broadcastPresence();
}
function handleDisconnect(user: User) {
users.delete(user.username);
if (user.currentCallId) {
handleHangup(user, user.currentCallId);
}
broadcastPresence();
}
function broadcastPresence() {
const availableInterpreters = Array.from(users.values())
.filter(u => u.role === 'interpreter' && u.state === 'AVAILABLE')
.map(u => ({ username: u.username }));
const message = JSON.stringify({
type: 'PRESENCE_UPDATE',
payload: availableInterpreters
});
// Calculate stats
const numInterpreters = Array.from(users.values()).filter(u => u.role === 'interpreter').length;
const numCallers = Array.from(users.values()).filter(u => u.role === 'caller').length;
const statsMessage = JSON.stringify({
type: 'STATS_UPDATE',
payload: {
interpreters: numInterpreters,
callers: numCallers
}
});
for (const u of users.values()) {
if (u.role === 'caller') {
u.ws.send(message);
} else if (u.role === 'interpreter') {
u.ws.send(statsMessage);
}
}
}
function sendPresenceList(ws: WebSocket) {
const availableInterpreters = Array.from(users.values())
.filter(u => u.role === 'interpreter' && u.state === 'AVAILABLE')
.map(u => ({ username: u.username }));
ws.send(JSON.stringify({
type: 'PRESENCE_UPDATE',
payload: availableInterpreters
}));
}
function handleSetPresence(user: User, msg: SignalingMessage) {
if (user.role !== 'interpreter') return;
// Only allow switching between AVAILABLE and UNAVAILABLE if not in call
if (user.state === 'RINGING' || user.state === 'IN_CALL') {
user.ws.send(JSON.stringify({ type: 'ERROR', payload: { message: 'Cannot change presence while in call' } }));
return;
}
const requestedState = msg.payload?.state;
if (requestedState === 'AVAILABLE' || requestedState === 'UNAVAILABLE') {
user.state = requestedState;
console.log(`Presence Update: ${user.username} -> ${user.state}`);
broadcastPresence();
// Ack
user.ws.send(JSON.stringify({ type: 'PRESENCE_CHANGED', payload: { state: user.state } }));
}
}
function handleReportAbuse(user: User, msg: SignalingMessage) {
if (user.role !== 'interpreter') return;
const message = msg.payload?.message;
const offender = msg.payload?.offender;
const callId = msg.callId;
if (!message || !callId) {
console.warn(`Invalid abuse report from ${user.username}: Missing message or callId`);
return;
}
// Limit message length to 100 chars
const truncatedMessage = message.substring(0, 100);
console.log(`⚠️ ABUSE REPORT:
From: ${user.username}
About: ${offender || 'Unknown'}
Call ID: ${callId}
Message: "${truncatedMessage}"`);
}
import os from 'os';
function getLocalIpAddress() {
const interfaces = os.networkInterfaces();
for (const name of Object.keys(interfaces)) {
for (const iface of interfaces[name]!) {
// Skip over internal (i.e. 127.0.0.1) and non-IPv4 addresses
if (iface.family === 'IPv4' && !iface.internal) {
return iface.address;
}
}
}
return 'localhost';
}
const localIp = getLocalIpAddress();
console.log(`Server listening on port ${PORT}`);
console.log(`Network access: http://${localIp}:${PORT}`);
// Bonjour / mDNS
import { Bonjour } from 'bonjour-service';
const bonjour = new Bonjour();
bonjour.publish({ name: 'BeMyEars Server', type: 'bemyears', port: PORT, protocol: 'tcp' });
console.log('📢 Advertising Bonjour service: _bemyears._tcp');
// Log user stats every 5 minutes
setInterval(() => {
const numInterpreters = Array.from(users.values()).filter(u => u.role === 'interpreter').length;
const numCallers = Array.from(users.values()).filter(u => u.role === 'caller').length;
console.log(`📊 STATS: ${numInterpreters} Interpreters, ${numCallers} Callers online`);
}, 300000);

12
backend/tsconfig.json Normal file
View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es2016",
"module": "commonjs",
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true
}
}

36
check_build.sh Executable file
View File

@@ -0,0 +1,36 @@
#!/bin/zsh
set -o pipefail
# --- Configuration ---
SCHEME="BeMyEars"
DEVICE_NAME="iPhone 17 Pro"
# Falling back to 16 Pro as 17 might not be in all sim runtimes yet,
# but easy to change if needed.
BUILD_PATH="./build"
echo "🔍 Checking compilation for $SCHEME..."
# Ensure xcbeautify is installed or fallback to cat
if ! command -v xcbeautify &> /dev/null; then
echo "⚠️ xcbeautify not found, using plain output."
FORMATTER="cat"
else
FORMATTER="xcbeautify"
fi
# Build Only
env -u CC -u CXX -u LIBCLANG_PATH xcodebuild \
-workspace "BeMyEars.xcworkspace" \
-scheme "$SCHEME" \
-destination "platform=iOS Simulator,name=$DEVICE_NAME" \
-configuration Debug \
-derivedDataPath "$BUILD_PATH" \
build | $FORMATTER
# Check exit code
if [ $? -eq 0 ]; then
echo "✅ Build Succeeded. No errors found."
else
echo "❌ Build Failed."
exit 1
fi

View File

@@ -0,0 +1,370 @@
# iOS 1:1 Video Chat for Deaf Users & Interpreters
## Full Technical Specifications Document
**Status:** LOCKED
**Version:** v1.3 (Final)
**Scope:** Proof of Concept (Production-Aligned)
-----
### 1\. Purpose & Product Overview
This project delivers an iOS proof-of-concept application that enables secure, real-time, one-to-one video communication between:
1. **Callers** (deaf users), and
2. **Interpreters** (who wait for incoming calls)
The application is designed explicitly for sign language communication, prioritizing:
* Visual clarity
* Low latency
* Predictable call behavior
* Privacy and trust
The POC is architected to evolve directly into a production system without re-architecture.
-----
### 2\. Platform & Runtime Constraints
| Category | Specification |
| :--- | :--- |
| **Target OS** | iOS |
| **Minimum iOS Version** | 18.6 |
| **UI Framework** | SwiftUI |
| **Devices** | iPhone & iPad |
| **Orientation** | Portrait & Landscape |
| **Background Execution** | Not supported (foreground-only) |
| **Distribution** | TestFlight |
| **Accessibility** | Sign-language-first UI decisions |
| **Accessibility** | Sign-language-first UI decisions |
| **Audio** | Implicit with video (no audio-only mode) |
| **Network** | **Local Network Permission Required** (Discovery) |
-----
### 3\. User Roles & Authorization Model
**3.1 Roles**
Roles are assigned at registration time and are ephemeral.
* **Caller:** Can initiate calls.
* **Interpreter:** Can receive calls only.
**3.2 Enforcement Rules**
* Only callers may initiate `CALL_REQUEST`.
* Only interpreters may respond with `CALL_ACCEPT` or `CALL_DECLINE`.
* Interpreters cannot initiate calls.
* Role enforcement is server-side authoritative.
-----
### 4\. Identity & Presence
**4.1 Username Rules**
* Usernames must be globally unique.
* Validated by registrar server.
* Ephemeral (no persistence).
* Cannot be changed while registered.
* No authentication.
**4.2 Presence Lifecycle**
Presence is server-authoritative and maintained via:
* **Heartbeat interval:** 15 seconds
* **Presence TTL:** 20 seconds
* **Presence delivery:** WebSocket push (immediate)
A user is considered present only while:
1. Registered
2. Heartbeat valid
3. WebSocket connection active
-----
### 5\. Presence States
Each interpreter exists in exactly one state:
0. **UNAVAILABLE:** Default state after login. Can NOT receive calls.
1. **AVAILABLE:** Can receive calls.
2. **RINGING:** Call request active.
3. **IN_CALL:** Actively connected.
**Presence List Behavior**
* Only interpreters in `AVAILABLE` appear to callers.
* Interpreters in `UNAVAILABLE`, `RINGING` or `IN_CALL` are hidden.
* State transitions are server-controlled.
* `UNAVAILABLE` is the enforced starting state for all interpreters.
* Interpreters must explicitly "Go Online" to become `AVAILABLE`.
-----
### 6\. Video & Networking Architecture
**6.1 Media Transport**
* WebRTC Peer-to-Peer (1:1)
* WebRTC Peer-to-Peer (1:1)
* DTLS-SRTP encryption (Media)
* **Signaling Transport:** WebSocket (`ws://`) for POC (Production requires `wss://`).
* No SFU or MCU
**6.2 NAT Traversal**
* **STUN:** for candidate discovery.
* **TURN:** self-hosted (coturn) as mandatory fallback.
* **Requirements:** TURN is required for Symmetric NATs, Carrier-grade NAT, and Enterprise firewalls.
**6.3 Video Capture & Encoding Strategy**
* **Capture:** Best available front camera format (Target 1080p @ 30fps).
* **Encoding/Streaming:** Dynamic resolution adaptation based on device aspect ratio (e.g. 16:9 for iPhones, 4:3 for iPads). Default target is 720p equivalent.
* **iPad Support:**
* **Dynamic Resolution:** Detects screen aspect ratio (using `UIScreen.nativeBounds`) to scale video output correctly (e.g. 1440x1080 for iPad 4:3) preventing distortion.
* **Stability Fix:** Explicitly forces `.high` session preset (with fallback to `.medium`/`.low`) *after* capture start to override WebRTC defaults that crash iPad Mini.
* **Format Selection:** Strictly prioritizes standard 16:9 capture formats (1280x720, 1920x1080) to ensure hardware compatibility, avoiding unstable 4:3 formats like 1280x960.
-----
### 7\. Call Lifecycle & Concurrency Model
**7.1 Call Creation (Authoritative Server Flow)**
1. Caller selects interpreter.
2. Server generates `callId` (UUID).
3. **Interpreter state transitions:** `AVAILABLE``RINGING`
4. Server starts 10-second ring timer.
5. Interpreter receives call request.
**7.2 Ring Outcomes**
* **Accept within 10s:** State `RINGING``IN_CALL`. WebRTC negotiation begins.
* **Decline:** State resets to `AVAILABLE`.
* **Timeout:** Server auto-reverts to `AVAILABLE`.
* **Race condition:** Immediate `BUSY` error to caller.
**7.3 Call Termination**
Any of the following revert interpreter to `AVAILABLE`:
* Hangup
* WebRTC failure
* ICE timeout
* Heartbeat expiration
* WebSocket disconnect
* App crash / force quit
-----
### 8\. Timeouts (Hard Guarantees)
| Stage | Timeout |
| :--- | :--- |
| **Ringing** | 10 seconds |
| **Offer/Answer** | 10 seconds after accept |
| **ICE gathering/connection** | 1015 seconds |
| **Max “connecting” state** | 20 seconds total |
-----
### 9\. Signaling & Call Identification
**9.1 callId**
* UUID generated only by server.
* Required on all signaling messages.
* Used to correlate messages, enforce authorization, and prevent race conditions.
**9.2 Authorization Rules**
For a given `callId`:
* Only caller + interpreter may exchange signaling.
* Messages with unknown/expired `callId` are rejected.
* Messages violating role rules are rejected.
* Invalid messages are explicitly errored (not forwarded).
-----
### 10\. Signaling Protocol
**WebSocket Message Envelope**
```json
{
"type": "CALL_REQUEST | CALL_ACCEPT | CALL_DECLINE | OFFER | ANSWER | ICE | HANGUP | BUSY | REPORT_ABUSE | STATS_UPDATE | VIDEO_VISIBLE",
"callId": "uuid",
"from": "username",
"to": "username",
"payload": {}
}
```
-----
### 11\. Presence Delivery
* Presence updates are pushed immediately via WebSocket.
* Clients do not poll.
* Server is the single source of truth.
* Client does not infer presence locally.
-----
### 12\. Video UX Requirements
| Aspect | Specification |
| :--- | :--- |
| **Camera** | Front only |
| **Remote View** | Full-screen |
| **Local Preview** | Picture-in-Picture (PiP) |
| **PiP Default** | Top-right |
| **PiP Behavior** | Draggable, snap-to-corners |
| **Safe Area** | Enforced |
| **Mirroring** | Enabled (front camera) |
| **Controls** | Hang-up only |
-----
### 13\. Bandwidth & Connection Quality Rules
**Setup Phase**
* **TCP Relay:** Permitted (allowed as fallback for restrictive firewalls/campus networks).
**Connected Phase**
* **Packet Loss:** Accepted.
* **Error Correction:** WebRTC handles packet loss via NACK and FEC (Forward Error Correction).
* **Degradation Policy:**
* If quality drops below usable thresholds (defined as tunable constants): **Show "Poor Connection" UI Warning.**
* Connection remains active unless fully severed by network timeout.
-----
### 14\. iOS Client Architecture
**14.1 Frameworks**
* SwiftUI
* WebRTC iOS SDK
* Combine / async-await as needed
**14.2 Architecture Pattern**
* MVVM
**14.3 Modules**
* Registration
* Presence
* Call State Machine
* WebRTC Engine
* PiP Video View
**14.4 Call State Machine**
`Idle``Registered``Calling``IncomingCall``Connecting``InCall``Ending``Error`
* Transitions driven only by: User action, Server signaling, WebRTC callbacks, Timeout events.
-----
### 15\. Backend Architecture
**15.1 Stack**
* Node.js (TypeScript)
* WebSocket signaling
* In-memory presence store (POC)
* HTTPS/WSS only
**15.2 Server Responsibilities**
* Username uniqueness
* Role enforcement
* Presence tracking
* Call state transitions
* Presence tracking
* Call state transitions
* Mutex/locking on call requests
* **Service Discovery:** Advertises via Bonjour (`_bemyears._tcp`) for zero-conf client connection.
* TURN configuration delivery
-----
### 16\. TURN Server Specification
| Item | Specification |
| :--- | :--- |
| **Software** | coturn |
| **Auth** | Static long-term credentials (POC only) |
| **Transport** | UDP + TCP |
| **Encryption** | TURN-TLS enabled |
| **Capacity** | ≤5 concurrent calls |
*Security Note: Static credentials are acceptable for TestFlight only and must be replaced before App Store release.*
-----
### 17. Service Discovery (Bonjour)
**17.1 Mechanism**
* The iOS client uses `NetServiceBrowser` to discover the backend server on the local network.
* **Service Type:** `_bemyears._tcp`
* **Domain:** `local.`
* **Resolution:** Resolves IPv4 address of the backend and auto-populates it for the user.
* **Fallback:** Manual IP entry is supported via `UserDefaults` persistence.
-----
### 18. Logging & Privacy
**18.1 Allowed Server Logs**
* Timestamp (YYYY-MM-DD HH:MM:SS format)
* Event type
* callId (Required for call-related events)
* sessionId
* Role
* Offender identity (for Abuse Reports)
* Periodic User Statistics (every 5 minutes)
**18.2 Explicitly Forbidden**
* SDP bodies
* ICE candidates
* Video metadata
* Media statistics tied to identity
**18.3 Client-Side**
* No logging beyond OS crash reports.
* `lastCallId` and `lastRemoteUser` are persisted temporarily for abuse reporting only.
-----
### 19. Security Posture
* TLS for all signaling (Production).
* DTLS-SRTP for media.
* No stored personal data.
* No call content persistence.
* Server-authoritative enforcement everywhere.
-----
### 20. Acceptance Criteria
* Users register successfully with unique usernames.
* Presence updates are immediate.
* Interpreters disappear on call request.
* Race conditions result in exactly one successful call.
* Calls succeed behind NAT using TURN (including TCP relay fallback).
* PiP works reliably for signing framing.
* All failure paths recover cleanly.
-----

248
server-setup.md Normal file
View File

@@ -0,0 +1,248 @@
# BeMyEars Server Setup Guide (Customized for webrtc.jaredlog.com)
This guide details the steps to deploy the BeMyEars backend and a self-hosted TURN server (Coturn) on your specific server environment (`webrtc.jaredlog.com`).
**Environment Details:**
- **Domain:** `webrtc.jaredlog.com` (Used for both API and TURN)
- **SSH Port:** **2222** (CRITICAL: Do not lock yourself out!)
- **Existing Services:** Nginx (Proxying Gunicorn, etc.), Postgres, Redis, etc.
---
## 1. Install Node.js & Tools
Your server has a node process on 5002, so Node might already be installed. Check version matches requirements (v18+ recommended).
```bash
# Check existing version
node -v
# IF needed, update:
curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash -
sudo apt install -y nodejs
# Install PM2 (Process Manager) globally if missing
sudo npm install -g pm2 ts-node typescript
```
---
## 2. Deploy Backend Code
Deploy the code to a suitable directory (e.g. `/var/www/bemyears`).
```bash
# Clone
git clone https://github.com/your-repo/BeMyEars.git /var/www/bemyears
# Install & Build
cd /var/www/bemyears/backend
npm install
npm run build
# Start with PM2
# Note: Your server has many services. We use port 8080 for the backend internally.
# Ensure 8080 is free (it wasn't listed in your netprograms output, so it should be safe).
pm2 start dist/server.js --name "bemyears-backend"
# Save PM2 list
pm2 save
```
The backend is now running on **127.0.0.1:8080**.
---
## 3. Setup COTURN (TURN Server)
You need to run Coturn on this server to handle NAT traversal.
**Ports Checked:** `3478` and `5349` are NOT listed in your `netprograms`, so they are available.
### 3.1 Install & Configure
```bash
sudo apt install -y coturn
sudo mv /etc/turnserver.conf /etc/turnserver.conf.backup
sudo nano /etc/turnserver.conf
```
**Configuration (Copy/Paste):**
Replace `<PUBLIC_IP>` with your server's WAN IP (`74.50.98.226` from your output).
```ini
# /etc/turnserver.conf
listening-port=3478
tls-listening-port=5349
listening-ip=0.0.0.0
# External IP
external-ip=74.50.98.226
# Domain (Using the same domain is fine)
realm=webrtc.jaredlog.com
server-name=webrtc.jaredlog.com
# Static Auth
user=user:password
# Security
no-cli
no-loopback-peers
no-multicast-peers
# Certificate (Re-use existing Nginx certs if possible, or new ones)
# Ideally point to the same certs Nginx uses for webrtc.jaredlog.com
cert=/etc/letsencrypt/live/webrtc.jaredlog.com/fullchain.pem
pkey=/etc/letsencrypt/live/webrtc.jaredlog.com/privkey.pem
```
### 3.2 Start
```bash
sudo sed -i 's/#TURNSERVER_ENABLED=1/TURNSERVER_ENABLED=1/g' /etc/default/coturn
sudo systemctl restart coturn
```
---
## 4. Nginx Configuration
Your Nginx is already listening on 443. We will add a configuration to proxy `wss://webrtc.jaredlog.com` to your Node backend.
### 4.1 Create Config
```bash
sudo nano /etc/nginx/sites-available/bemyears
```
**Content:**
```nginx
server {
listen 443 ssl;
server_name webrtc.jaredlog.com;
# SSL Config (Reuse your existing cert paths or generates new ones)
# Check /etc/nginx/sites-enabled/ for examples of your existing SSL setup
ssl_certificate /etc/letsencrypt/live/webrtc.jaredlog.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/webrtc.jaredlog.com/privkey.pem;
location / {
proxy_pass http://127.0.0.1:8080;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
}
}
```
### 4.2 Enable
```bash
sudo ln -s /etc/nginx/sites-available/bemyears /etc/nginx/sites-enabled/
sudo systemctl reload nginx
```
### 4.3 Obtain SSL Certificates (Certbot)
Since Nginx is already configured and listening on port 80/443, we use the `--nginx` plugin to request the certificates securely.
```bash
# 1. Obtain certs
sudo certbot --nginx -d webrtc.jaredlog.com
# 2. Verify paths
# Certificates should be at:
# /etc/letsencrypt/live/webrtc.jaredlog.com/fullchain.pem
# /etc/letsencrypt/live/webrtc.jaredlog.com/privkey.pem
```
### 4.4 Fix Permissions for Coturn
Coturn runs as the `turnserver` user and typically cannot read files in `/etc/letsencrypt/`. We need to grant it read access.
```bash
# Option A: Simple Group Access (Recommended)
# Add turnserver user to the ssl-cert group defined by certbot (or root group if ssl-cert is missing)
sudo usermod -a -G root turnserver
# Note: On some systems, certbot keys are owned by root:root with 700 permissions.
# A more robust hook prevents permission issues during renewal.
# Option B: Use a Deploy Hook (Robust)
# Create a script to copy certs to a turnserver-owned directory on renewal.
sudo mkdir -p /etc/coturn/certs
sudo chown -R turnserver:turnserver /etc/coturn/certs
sudo chmod 700 /etc/coturn/certs
# Create install script
sudo nano /etc/letsencrypt/renewal-hooks/deploy/coturn-cert-deploy.sh
```
Paste this script:
```bash
#!/bin/bash
DOMAIN="webrtc.jaredlog.com"
CERT_DIR="/etc/coturn/certs"
if [ "$RENEWED_DOMAINS" = "$DOMAIN" ]; then
cp /etc/letsencrypt/live/$DOMAIN/fullchain.pem $CERT_DIR/turn_server_cert.pem
cp /etc/letsencrypt/live/$DOMAIN/privkey.pem $CERT_DIR/turn_server_pkey.pem
chown turnserver:turnserver $CERT_DIR/*.pem
chmod 600 $CERT_DIR/*.pem
systemctl restart coturn
echo "Deployed new certs for Coturn"
fi
```
Make it executable and run it once manually:
```bash
sudo chmod +x /etc/letsencrypt/renewal-hooks/deploy/coturn-cert-deploy.sh
# Run manually to populate first time (simulating environment variables)
RENEWED_DOMAINS="webrtc.jaredlog.com" sudo -E /etc/letsencrypt/renewal-hooks/deploy/coturn-cert-deploy.sh
```
**Update Coturn Config:**
If you used Option B, update `/etc/turnserver.conf`:
```ini
cert=/etc/coturn/certs/turn_server_cert.pem
pkey=/etc/coturn/certs/turn_server_pkey.pem
```
If you used Option A (Group), keep the default Let's Encrypt paths.
---
## 5. Firewall (CRITICAL)
**⚠️ WARNING:** You have SSH running on port **2222**. The default UFW rule `allow ssh` opens port 22. You MUST explicitly open 2222 or you will be locked out.
```bash
# 1. Allow Connection Management
sudo ufw allow 2222/tcp # CRITICAL: Your SSH Port
sudo ufw allow 80/tcp
sudo ufw allow 443/tcp
# 2. Allow TURN (Signaling & Relay)
sudo ufw allow 3478/tcp
sudo ufw allow 3478/udp
sudo ufw allow 5349/tcp
sudo ufw allow 5349/udp
# 3. Allow Media Range (UDP)
sudo ufw allow 49152:65535/udp
# 4. Enable
sudo ufw enable
```
---
## 6. Verification
1. **WebSocket**: Connect to `wss://webrtc.jaredlog.com` (should hit your Node backend).
2. **TURN**: Test candidates using `turn:webrtc.jaredlog.com:3478` (user:password).