diff --git a/LiveKitExample.xcodeproj/project.pbxproj b/LiveKitExample.xcodeproj/project.pbxproj index a810ef2..d625d26 100644 --- a/LiveKitExample.xcodeproj/project.pbxproj +++ b/LiveKitExample.xcodeproj/project.pbxproj @@ -34,6 +34,7 @@ 7BBEBA832D791CB300586EC4 /* CIImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BBEBA822D791CAF00586EC4 /* CIImage.swift */; }; 7BBEBA892D79219600586EC4 /* LKButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BBEBA882D79219600586EC4 /* LKButton.swift */; }; 7BBEBA8B2D7921AA00586EC4 /* LKTextField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BBEBA8A2D7921AA00586EC4 /* LKTextField.swift */; }; + 7BBEBA8C2D7921BB00586EC4 /* SineWaveGenerator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BBEBA8D2D7921BB00586EC4 /* SineWaveGenerator.swift */; }; B5BCF77E2CFE7FDE00BCD4D8 /* BroadcastExt.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = 683F05F3273F96B20080C7AC /* BroadcastExt.appex */; platformFilters = (ios, tvos, xros, ); settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; }; B5BCF7842CFE859A00BCD4D8 /* LiveKit in Frameworks */ = {isa = PBXBuildFile; productRef = B5BCF7832CFE859A00BCD4D8 /* LiveKit */; }; B5C2EF162D0114C800FAC766 /* LiveKitComponents in Frameworks */ = {isa = PBXBuildFile; productRef = B5C2EF152D0114C800FAC766 /* LiveKitComponents */; }; @@ -97,6 +98,7 @@ 7BBEBA822D791CAF00586EC4 /* CIImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CIImage.swift; sourceTree = ""; }; 7BBEBA882D79219600586EC4 /* LKButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LKButton.swift; sourceTree = ""; }; 7BBEBA8A2D7921AA00586EC4 /* LKTextField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LKTextField.swift; sourceTree = ""; }; + 7BBEBA8D2D7921BB00586EC4 /* SineWaveGenerator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SineWaveGenerator.swift; sourceTree = ""; }; 9E7835E62751A71500559DEC /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS15.0.sdk/System/Library/Frameworks/CoreGraphics.framework; sourceTree = DEVELOPER_DIR; }; D7AA477A285A0FFC00EB41AE /* SampleHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleHandler.swift; sourceTree = ""; }; /* End PBXFileReference section */ @@ -172,6 +174,7 @@ 68A50ECE2C4C1ED500D2DE17 /* ExampleRoomMessage.swift */, 68A50ECF2C4C1ED500D2DE17 /* Participant+Helpers.swift */, 68A50ED02C4C1ED500D2DE17 /* SecureStore.swift */, + 7BBEBA8D2D7921BB00586EC4 /* SineWaveGenerator.swift */, ); path = Support; sourceTree = ""; @@ -391,6 +394,7 @@ 68A50EEF2C4C1ED500D2DE17 /* Bundle.swift in Sources */, 7BBEBA892D79219600586EC4 /* LKButton.swift in Sources */, 68A50EF02C4C1ED500D2DE17 /* AppContext.swift in Sources */, + 7BBEBA8C2D7921BB00586EC4 /* SineWaveGenerator.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/Multiplatform/Support/SineWaveGenerator.swift b/Multiplatform/Support/SineWaveGenerator.swift new file mode 100644 index 0000000..e5462a3 --- /dev/null +++ b/Multiplatform/Support/SineWaveGenerator.swift @@ -0,0 +1,77 @@ +/* + * Copyright 2025 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import AVFoundation +import LiveKit + +/// A simple sine wave generator for testing audio buffer capture +@MainActor +final class SineWaveGenerator: ObservableObject { + @Published var isGenerating: Bool = false + @Published var frequency: Double = 440.0 // A4 note + @Published var amplitude: Float = 0.5 + + private var generationTask: Task? + private let sampleRate: Double = 48000.0 + private let bufferSize: AVAudioFrameCount = 2 * 480 // 20ms at 48kHz + + func startGenerating() { + guard !isGenerating else { return } + + isGenerating = true + generationTask = Task { + await generateSineWave() + } + } + + func stopGenerating() { + isGenerating = false + generationTask?.cancel() + generationTask = nil + } + + private func generateSineWave() async { + let format = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)! + var phase = 0.0 + + while isGenerating, !Task.isCancelled { + let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: bufferSize)! + buffer.frameLength = bufferSize + + guard let channelData = buffer.floatChannelData?[0] else { + continue + } + + // Generate sine wave samples + for frame in 0 ..< Int(bufferSize) { + let sample = Float(sin(phase)) * amplitude + channelData[frame] = sample + phase += 2.0 * .pi * frequency / sampleRate + + // Keep phase in range to prevent overflow + if phase > 2.0 * .pi { + phase -= 2.0 * .pi + } + } + + // Capture the audio buffer + AudioManager.shared.mixer.capture(appAudio: buffer) + + // Wait for next buffer (10ms) + try? await Task.sleep(nanoseconds: 10_000_000) // 10ms + } + } +} diff --git a/Multiplatform/Views/AudioMixerView.swift b/Multiplatform/Views/AudioMixerView.swift index 4d0e028..184fd33 100644 --- a/Multiplatform/Views/AudioMixerView.swift +++ b/Multiplatform/Views/AudioMixerView.swift @@ -14,21 +14,202 @@ * limitations under the License. */ +import LiveKit import SwiftUI #if !os(tvOS) struct AudioMixerView: View { @EnvironmentObject var appCtx: AppContext + @EnvironmentObject var roomCtx: RoomContext + @StateObject private var sineWaveGenerator = SineWaveGenerator() + @State private var isManualMode: Bool = false + @State private var isMicEnabled: Bool = false + @State private var isPublishingAudioBuffer: Bool = false + @State private var errorMessage: String? var body: some View { - Text("Mic audio mixer") - HStack { - Text("Mic") - Slider(value: $appCtx.micVolume, in: 0.0 ... 1.0) + VStack(alignment: .leading, spacing: 16) { + Text("Audio Mixer") + .font(.headline) + + // Volume Controls + VStack(alignment: .leading, spacing: 8) { + HStack { + Text("Mic Volume") + Spacer() + Slider(value: $appCtx.micVolume, in: 0.0 ... 1.0) + .frame(width: 150) + } + HStack { + Text("App Volume") + Spacer() + Slider(value: $appCtx.appVolume, in: 0.0 ... 1.0) + .frame(width: 150) + } + } + + Divider() + + // Manual Mode Toggle + HStack { + Toggle("Manual Rendering Mode", isOn: $isManualMode) + .onChange(of: isManualMode) { newValue in + Task { + do { + try AudioManager.shared.setManualRenderingMode(newValue) + if newValue { + print("Manual rendering mode enabled - no device access") + } else { + print("Manual rendering mode disabled - device access restored") + } + } catch { + errorMessage = "Failed to set manual mode: \(error.localizedDescription)" + } + } + } + } + + Divider() + + // Microphone Control + HStack { + Button(action: toggleMicrophone) { + Text(isMicEnabled ? "Disable Microphone" : "Enable Microphone") + .foregroundColor(.white) + .padding(.horizontal, 16) + .padding(.vertical, 8) + .background(isMicEnabled ? Color.red : Color.blue) + .cornerRadius(8) + } + .disabled(roomCtx.room.connectionState != .connected) + } + + Divider() + + // Audio Buffer Controls + VStack(alignment: .leading, spacing: 12) { + Text("Audio Buffer Capture") + .font(.subheadline) + .fontWeight(.medium) + + // Sine Wave Generator Controls + VStack(alignment: .leading, spacing: 8) { + HStack { + Text("Frequency: \(Int(sineWaveGenerator.frequency)) Hz") + Spacer() + Slider(value: $sineWaveGenerator.frequency, in: 200 ... 2000) + .frame(width: 150) + } + + HStack { + Text("Amplitude: \(String(format: "%.2f", sineWaveGenerator.amplitude))") + Spacer() + Slider(value: $sineWaveGenerator.amplitude, in: 0.0 ... 1.0) + .frame(width: 150) + } + + HStack { + Button(action: { + if sineWaveGenerator.isGenerating { + sineWaveGenerator.stopGenerating() + } else { + sineWaveGenerator.startGenerating() + } + }) { + Text(sineWaveGenerator.isGenerating ? "Stop Sine Wave" : "Start Sine Wave") + .foregroundColor(.white) + .padding(.horizontal, 16) + .padding(.vertical, 8) + .background(sineWaveGenerator.isGenerating ? Color.red : Color.green) + .cornerRadius(8) + } + .disabled(roomCtx.room.connectionState != .connected) + + Spacer() + + Button(action: toggleAudioBufferPublishing) { + Text(isPublishingAudioBuffer ? "Stop Publishing Audio Buffer" : "Start Publishing Audio Buffer") + .foregroundColor(.white) + .padding(.horizontal, 16) + .padding(.vertical, 8) + .background(isPublishingAudioBuffer ? Color.red : Color.orange) + .cornerRadius(8) + } + .disabled(roomCtx.room.connectionState != .connected) + } + } + } + + // Error Message + if let errorMessage { + Text(errorMessage) + .foregroundColor(.red) + .font(.caption) + .padding(.top, 8) + } + + // Instructions + VStack(alignment: .leading, spacing: 4) { + Text("Instructions:") + .font(.caption) + .fontWeight(.medium) + + Text("• Enable microphone to capture both mic and app audio") + .font(.caption) + .foregroundColor(.secondary) + + Text("• Use manual mode to publish only app audio (no mic access)") + .font(.caption) + .foregroundColor(.secondary) + + Text("• Adjust volumes to control mic vs app audio levels") + .font(.caption) + .foregroundColor(.secondary) + } + .padding(.top, 8) + } + .padding() + .onAppear { + // Initialize manual mode state + isManualMode = AudioManager.shared.isManualRenderingMode + } + } + + private func toggleMicrophone() { + Task { + do { + try await roomCtx.room.localParticipant.setMicrophone(enabled: !isMicEnabled) + isMicEnabled.toggle() + } catch { + errorMessage = "Failed to toggle microphone: \(error.localizedDescription)" + } } - HStack { - Text("App") - Slider(value: $appCtx.appVolume, in: 0.0 ... 1.0) + } + + private func toggleAudioBufferPublishing() { + if isPublishingAudioBuffer { + // Stop publishing + isPublishingAudioBuffer = false + sineWaveGenerator.stopGenerating() + } else { + // Start publishing + isPublishingAudioBuffer = true + + // If not in manual mode, enable microphone to capture both mic and app audio + if !isManualMode, !isMicEnabled { + Task { + do { + try await roomCtx.room.localParticipant.setMicrophone(enabled: true) + isMicEnabled = true + } catch { + errorMessage = "Failed to enable microphone: \(error.localizedDescription)" + return + } + } + } + + // Start generating sine wave + sineWaveGenerator.startGenerating() } } } diff --git a/Multiplatform/Views/RoomView.swift b/Multiplatform/Views/RoomView.swift index 883635a..9fbe735 100644 --- a/Multiplatform/Views/RoomView.swift +++ b/Multiplatform/Views/RoomView.swift @@ -41,7 +41,7 @@ final class WindowAccess: ObservableObject { } } - @Published public var pinned: Bool = false { + @Published var pinned: Bool = false { didSet { guard oldValue != pinned else { return } level = pinned ? .floating : .normal @@ -58,7 +58,7 @@ final class WindowAccess: ObservableObject { } } - public func set(window: NSWindow?) { + func set(window: NSWindow?) { self.window = window Task { @MainActor in objectWillChange.send() @@ -394,12 +394,12 @@ struct RoomView: View { } label: { Image(systemSymbol: .switch2) } - .disabled(!isMicrophoneEnabled) +// .disabled(!isMicrophoneEnabled) #if !os(tvOS) - .popover(isPresented: $audioMixerOptionsPresented) { - AudioMixerView() - .padding() - } + .popover(isPresented: $audioMixerOptionsPresented) { + AudioMixerView() + .padding() + } #endif #if os(iOS)