From 8f47836de3b588f112c4fe4c3273c5176277edb1 Mon Sep 17 00:00:00 2001 From: geekz Date: Tue, 18 Jul 2023 17:51:37 +0800 Subject: [PATCH 1/2] fix decode HEVC format description --- sdk/objc/components/video_codec/RTCVideoDecoderH265.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH265.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH265.mm index da95149ae16..e4507d1d6a8 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH265.mm +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH265.mm @@ -101,7 +101,7 @@ - (NSInteger)decode:(RTCEncodedImage*)inputImage } rtc::ScopedCFTypeRef inputFormat = - rtc::ScopedCF(webrtc::CreateVideoFormatDescription( + rtc::ScopedCF(webrtc::CreateH265VideoFormatDescription( (uint8_t*)inputImage.buffer.bytes, inputImage.buffer.length)); if (inputFormat) { CMVideoDimensions dimensions = From 7db969162fc359d7c7f4d12f04f4283ce994fbf7 Mon Sep 17 00:00:00 2001 From: geekz Date: Wed, 19 Jul 2023 13:55:56 +0800 Subject: [PATCH 2/2] ios add default microphone mute into configruation --- .../audio/RTCAudioSession+Private.h | 2 + sdk/objc/components/audio/RTCAudioSession.h | 4 + sdk/objc/components/audio/RTCAudioSession.mm | 26 +++++ .../audio/RTCAudioSessionConfiguration.h | 1 + .../audio/RTCAudioSessionConfiguration.m | 3 + .../RTCNativeAudioSessionDelegateAdapter.mm | 5 + sdk/objc/native/src/audio/audio_device_ios.h | 2 + sdk/objc/native/src/audio/audio_device_ios.mm | 21 ++++ .../native/src/audio/audio_session_observer.h | 2 + .../src/audio/voice_processing_audio_unit.mm | 105 +++++++++++------- 10 files changed, 129 insertions(+), 42 deletions(-) diff --git a/sdk/objc/components/audio/RTCAudioSession+Private.h b/sdk/objc/components/audio/RTCAudioSession+Private.h index 2be1b9fb3dd..7f8ffe46414 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Private.h +++ b/sdk/objc/components/audio/RTCAudioSession+Private.h @@ -79,6 +79,8 @@ NS_ASSUME_NONNULL_BEGIN /** Notifies the receiver that there was an error when starting an audio unit. */ - (void)notifyAudioUnitStartFailedWithError:(OSStatus)error; +- (void)notifyDidChangeMicrophoneMute; + // Properties and methods for tests. - (void)notifyDidBeginInterruption; - (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession; diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h index 3b83b27ba5c..ae2323ec39a 100644 --- a/sdk/objc/components/audio/RTCAudioSession.h +++ b/sdk/objc/components/audio/RTCAudioSession.h @@ -102,6 +102,9 @@ RTC_OBJC_EXPORT - (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession audioUnitStartFailedWithError:(NSError *)error; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession + didChangeMicrophoneMute:(BOOL)isMicrophoneMute; + @end /** This is a protocol used to inform RTCAudioSession when the audio session @@ -181,6 +184,7 @@ RTC_OBJC_EXPORT @property(readonly) double preferredSampleRate; @property(readonly) NSInteger inputNumberOfChannels; @property(readonly) NSInteger outputNumberOfChannels; +@property(readonly) BOOL isMicrophoneMute; @property(readonly) float outputVolume; @property(readonly) NSTimeInterval inputLatency; @property(readonly) NSTimeInterval outputLatency; diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 550a426d365..da48c013fd6 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -55,6 +55,7 @@ @implementation RTC_OBJC_TYPE (RTCAudioSession) { BOOL _isAudioEnabled; BOOL _canPlayOrRecord; BOOL _isInterrupted; + BOOL _isMicrophoneMute; } @synthesize session = _session; @@ -679,6 +680,31 @@ - (void)setIsInterrupted:(BOOL)isInterrupted { } } +- (void)setIsMicrophoneMute:(BOOL)isMicrophoneMute { + @synchronized(self) { + if (_isMicrophoneMute == isMicrophoneMute) { + return; + } + _isMicrophoneMute = isMicrophoneMute; + } + [self notifyDidChangeMicrophoneMute]; +} + +- (BOOL)isMicrophoneMute { + @synchronized(self) { + return _isMicrophoneMute; + } +} + +- (void)notifyDidChangeMicrophoneMute { + for (auto delegate : self.delegates) { + SEL sel = @selector(audioSession:didChangeMicrophoneMute:); + if ([delegate respondsToSelector:sel]) { + [delegate audioSession:self didChangeMicrophoneMute:self.isMicrophoneMute]; + } + } +} + - (BOOL)checkLock:(NSError **)outError { if (!mutex_locked) { if (outError) { diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h index 4582b805571..33cd26f80d1 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h @@ -32,6 +32,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) NSTimeInterval ioBufferDuration; @property(nonatomic, assign) NSInteger inputNumberOfChannels; @property(nonatomic, assign) NSInteger outputNumberOfChannels; +@property(nonatomic, assign) BOOL isMicrophoneMute; /** Initializes configuration to defaults. */ - (instancetype)init NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 39e9ac13ecc..ce08f1f3100 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -62,6 +62,7 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) @synthesize ioBufferDuration = _ioBufferDuration; @synthesize inputNumberOfChannels = _inputNumberOfChannels; @synthesize outputNumberOfChannels = _outputNumberOfChannels; +@synthesize isMicrophoneMute = _isMicrophoneMute; - (instancetype)init { if (self = [super init]) { @@ -96,6 +97,7 @@ - (instancetype)init { // TODO(henrika): add support for stereo if needed. _inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels; _outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels; + _isMicrophoneMute = true; // default set micriphone to mute } return self; } @@ -115,6 +117,7 @@ + (instancetype)currentConfiguration { config.ioBufferDuration = session.IOBufferDuration; config.inputNumberOfChannels = session.inputNumberOfChannels; config.outputNumberOfChannels = session.outputNumberOfChannels; + config.isMicrophoneMute = session.isMicrophoneMute; return config; } diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index daddf314a43..4b7240e7ef7 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -86,4 +86,9 @@ - (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession _observer->OnChangedOutputVolume(); } +- (void)audioSession:(RTCAudioSession *)session + didChangeMicrophoneMute:(BOOL)isMicrophoneMute { + _observer->OnMicrophoneMuteChange(isMicrophoneMute); +} + @end diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index a86acb56fe7..73b14075137 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -147,6 +147,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void OnValidRouteChange() override; void OnCanPlayOrRecordChange(bool can_play_or_record) override; void OnChangedOutputVolume() override; + void OnMicrophoneMuteChange(bool is_microphone_mute) override; // VoiceProcessingAudioUnitObserver methods. OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags, @@ -171,6 +172,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void HandleSampleRateChange(); void HandlePlayoutGlitchDetected(); void HandleOutputVolumeChange(); + void HandleMicrophoneMuteChange(bool is_microphone_mute); // Uses current `playout_parameters_` and `record_parameters_` to inform the // audio device buffer (ADB) about our internal audio parameters. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index dd2c11bdd2b..f2fa1fc6c0f 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -1123,5 +1123,26 @@ static void LogDeviceInfo() { return 0; } +void AudioDeviceIOS::OnMicrophoneMuteChange(bool is_microphone_mute) { + RTC_DCHECK(thread_); + thread_->PostTask(SafeTask(safety_, [this, is_microphone_mute] { HandleMicrophoneMuteChange(is_microphone_mute); })); +} + +void AudioDeviceIOS::HandleMicrophoneMuteChange(bool is_microphone_mute) { + RTC_DCHECK_RUN_ON(thread_); + RTCLog(@"Handling MicrophoneMute change to %d", is_microphone_mute); + if (is_microphone_mute) { + StopRecording(); + StopPlayout(); + InitPlayout(); + StartPlayout(); + } else { + StopPlayout(); + InitRecording(); + StartRecording(); + StartPlayout(); + } +} + } // namespace ios_adm } // namespace webrtc diff --git a/sdk/objc/native/src/audio/audio_session_observer.h b/sdk/objc/native/src/audio/audio_session_observer.h index f7c44c8184b..f7ca98f7f9b 100644 --- a/sdk/objc/native/src/audio/audio_session_observer.h +++ b/sdk/objc/native/src/audio/audio_session_observer.h @@ -32,6 +32,8 @@ class AudioSessionObserver { virtual void OnChangedOutputVolume() = 0; + virtual void OnMicrophoneMuteChange(bool is_microphone_mute) = 0; + protected: virtual ~AudioSessionObserver() {} }; diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index 3905b6857a6..7ff641d188f 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -110,18 +110,23 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { RTCLogError(@"AudioComponentInstanceNew failed. Error=%ld.", (long)result); return false; } - // Enable input on the input scope of the input element. - UInt32 enable_input = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Input, kInputBus, &enable_input, - sizeof(enable_input)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to enable input on input scope of input element. " - "Error=%ld.", - (long)result); - return false; + RTCAudioSessionConfiguration* webRTCConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration]; + if (webRTCConfiguration.isMicrophoneMute) { + RTCLog(@"Not Enable input on the input scope of the input element."); + } else { + RTCLog(@"Enable input on the input scope of the input element."); + UInt32 enable_input = 1; + result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &enable_input, + sizeof(enable_input)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to enable input on input scope of input element. " + "Error=%ld.", + (long)result); + return false; + } } // Enable output on the output scope of the output element. @@ -155,34 +160,44 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { // Disable AU buffer allocation for the recorder, we allocate our own. // TODO(henrika): not sure that it actually saves resource to make this call. - UInt32 flag = 0; - result = AudioUnitSetProperty( - vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer, - kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to disable buffer allocation on the input bus. " - "Error=%ld.", - (long)result); - return false; + if (webRTCConfiguration.isMicrophoneMute) { + RTCLog(@"Not Disable AU buffer allocation for the recorder."); + } else { + RTCLog(@"Disable AU buffer allocation for the recorder, we allocate our own."); + UInt32 flag = 0; + result = AudioUnitSetProperty( + vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer, + kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to disable buffer allocation on the input bus. " + "Error=%ld.", + (long)result); + return false; + } } // Specify the callback to be called by the I/O thread to us when input audio // is available. The recorded samples can then be obtained by calling the // AudioUnitRender() method. - AURenderCallbackStruct input_callback; - input_callback.inputProc = OnDeliverRecordedData; - input_callback.inputProcRefCon = this; - result = AudioUnitSetProperty(vpio_unit_, - kAudioOutputUnitProperty_SetInputCallback, - kAudioUnitScope_Global, kInputBus, - &input_callback, sizeof(input_callback)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to specify the input callback on the input bus. " - "Error=%ld.", - (long)result); - return false; + if (webRTCConfiguration.isMicrophoneMute) { + RTCLog(@"Not Specify the callback to be called by the I/O thread to us when input audio"); + } else { + RTCLog(@"Specify the callback to be called by the I/O thread to us when input audio"); + AURenderCallbackStruct input_callback; + input_callback.inputProc = OnDeliverRecordedData; + input_callback.inputProcRefCon = this; + result = AudioUnitSetProperty(vpio_unit_, + kAudioOutputUnitProperty_SetInputCallback, + kAudioUnitScope_Global, kInputBus, + &input_callback, sizeof(input_callback)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to specify the input callback on the input bus. " + "Error=%ld.", + (long)result); + return false; + } } state_ = kUninitialized; @@ -205,14 +220,20 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { #endif // Set the format on the output scope of the input element/bus. - result = - AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, - kAudioUnitScope_Output, kInputBus, &format, size); - if (result != noErr) { - RTCLogError(@"Failed to set format on output scope of input bus. " - "Error=%ld.", - (long)result); - return false; + RTCAudioSessionConfiguration* webRTCConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration]; + if (webRTCConfiguration.isMicrophoneMute) { + RTCLog(@"Not Set the format on the output scope of the input element/bus."); + } else { + RTCLog(@"Set the format on the output scope of the input element/bus."); + result = + AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, kInputBus, &format, size); + if (result != noErr) { + RTCLogError(@"Failed to set format on output scope of input bus. " + "Error=%ld.", + (long)result); + return false; + } } // Set the format on the input scope of the output element/bus.