Skip to content
This repository was archived by the owner on Oct 25, 2024. It is now read-only.

fix decode HEVC format description #200

Open
wants to merge 2 commits into
base: 108-sdk
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions sdk/objc/components/audio/RTCAudioSession+Private.h
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ NS_ASSUME_NONNULL_BEGIN
/** Notifies the receiver that there was an error when starting an audio unit. */
- (void)notifyAudioUnitStartFailedWithError:(OSStatus)error;

- (void)notifyDidChangeMicrophoneMute;

// Properties and methods for tests.
- (void)notifyDidBeginInterruption;
- (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession;
Expand Down
4 changes: 4 additions & 0 deletions sdk/objc/components/audio/RTCAudioSession.h
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,9 @@ RTC_OBJC_EXPORT
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
audioUnitStartFailedWithError:(NSError *)error;

- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didChangeMicrophoneMute:(BOOL)isMicrophoneMute;

@end

/** This is a protocol used to inform RTCAudioSession when the audio session
Expand Down Expand Up @@ -181,6 +184,7 @@ RTC_OBJC_EXPORT
@property(readonly) double preferredSampleRate;
@property(readonly) NSInteger inputNumberOfChannels;
@property(readonly) NSInteger outputNumberOfChannels;
@property(readonly) BOOL isMicrophoneMute;
@property(readonly) float outputVolume;
@property(readonly) NSTimeInterval inputLatency;
@property(readonly) NSTimeInterval outputLatency;
Expand Down
26 changes: 26 additions & 0 deletions sdk/objc/components/audio/RTCAudioSession.mm
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ @implementation RTC_OBJC_TYPE (RTCAudioSession) {
BOOL _isAudioEnabled;
BOOL _canPlayOrRecord;
BOOL _isInterrupted;
BOOL _isMicrophoneMute;
}

@synthesize session = _session;
Expand Down Expand Up @@ -679,6 +680,31 @@ - (void)setIsInterrupted:(BOOL)isInterrupted {
}
}

- (void)setIsMicrophoneMute:(BOOL)isMicrophoneMute {
@synchronized(self) {
if (_isMicrophoneMute == isMicrophoneMute) {
return;
}
_isMicrophoneMute = isMicrophoneMute;
}
[self notifyDidChangeMicrophoneMute];
}

- (BOOL)isMicrophoneMute {
@synchronized(self) {
return _isMicrophoneMute;
}
}

- (void)notifyDidChangeMicrophoneMute {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSession:didChangeMicrophoneMute:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self didChangeMicrophoneMute:self.isMicrophoneMute];
}
}
}

- (BOOL)checkLock:(NSError **)outError {
if (!mutex_locked) {
if (outError) {
Expand Down
1 change: 1 addition & 0 deletions sdk/objc/components/audio/RTCAudioSessionConfiguration.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ RTC_OBJC_EXPORT
@property(nonatomic, assign) NSTimeInterval ioBufferDuration;
@property(nonatomic, assign) NSInteger inputNumberOfChannels;
@property(nonatomic, assign) NSInteger outputNumberOfChannels;
@property(nonatomic, assign) BOOL isMicrophoneMute;

/** Initializes configuration to defaults. */
- (instancetype)init NS_DESIGNATED_INITIALIZER;
Expand Down
3 changes: 3 additions & 0 deletions sdk/objc/components/audio/RTCAudioSessionConfiguration.m
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration)
@synthesize ioBufferDuration = _ioBufferDuration;
@synthesize inputNumberOfChannels = _inputNumberOfChannels;
@synthesize outputNumberOfChannels = _outputNumberOfChannels;
@synthesize isMicrophoneMute = _isMicrophoneMute;

- (instancetype)init {
if (self = [super init]) {
Expand Down Expand Up @@ -96,6 +97,7 @@ - (instancetype)init {
// TODO(henrika): add support for stereo if needed.
_inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
_outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
_isMicrophoneMute = true; // default set micriphone to mute
}
return self;
}
Expand All @@ -115,6 +117,7 @@ + (instancetype)currentConfiguration {
config.ioBufferDuration = session.IOBufferDuration;
config.inputNumberOfChannels = session.inputNumberOfChannels;
config.outputNumberOfChannels = session.outputNumberOfChannels;
config.isMicrophoneMute = session.isMicrophoneMute;
return config;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,9 @@ - (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
_observer->OnChangedOutputVolume();
}

- (void)audioSession:(RTCAudioSession *)session
didChangeMicrophoneMute:(BOOL)isMicrophoneMute {
_observer->OnMicrophoneMuteChange(isMicrophoneMute);
}

@end
2 changes: 1 addition & 1 deletion sdk/objc/components/video_codec/RTCVideoDecoderH265.mm
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ - (NSInteger)decode:(RTCEncodedImage*)inputImage
}

rtc::ScopedCFTypeRef<CMVideoFormatDescriptionRef> inputFormat =
rtc::ScopedCF(webrtc::CreateVideoFormatDescription(
rtc::ScopedCF(webrtc::CreateH265VideoFormatDescription(
(uint8_t*)inputImage.buffer.bytes, inputImage.buffer.length));
if (inputFormat) {
CMVideoDimensions dimensions =
Expand Down
2 changes: 2 additions & 0 deletions sdk/objc/native/src/audio/audio_device_ios.h
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
void OnValidRouteChange() override;
void OnCanPlayOrRecordChange(bool can_play_or_record) override;
void OnChangedOutputVolume() override;
void OnMicrophoneMuteChange(bool is_microphone_mute) override;

// VoiceProcessingAudioUnitObserver methods.
OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
Expand All @@ -171,6 +172,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
void HandleSampleRateChange();
void HandlePlayoutGlitchDetected();
void HandleOutputVolumeChange();
void HandleMicrophoneMuteChange(bool is_microphone_mute);

// Uses current `playout_parameters_` and `record_parameters_` to inform the
// audio device buffer (ADB) about our internal audio parameters.
Expand Down
21 changes: 21 additions & 0 deletions sdk/objc/native/src/audio/audio_device_ios.mm
Original file line number Diff line number Diff line change
Expand Up @@ -1123,5 +1123,26 @@ static void LogDeviceInfo() {
return 0;
}

void AudioDeviceIOS::OnMicrophoneMuteChange(bool is_microphone_mute) {
RTC_DCHECK(thread_);
thread_->PostTask(SafeTask(safety_, [this, is_microphone_mute] { HandleMicrophoneMuteChange(is_microphone_mute); }));
}

void AudioDeviceIOS::HandleMicrophoneMuteChange(bool is_microphone_mute) {
RTC_DCHECK_RUN_ON(thread_);
RTCLog(@"Handling MicrophoneMute change to %d", is_microphone_mute);
if (is_microphone_mute) {
StopRecording();
StopPlayout();
InitPlayout();
StartPlayout();
} else {
StopPlayout();
InitRecording();
StartRecording();
StartPlayout();
}
}

} // namespace ios_adm
} // namespace webrtc
2 changes: 2 additions & 0 deletions sdk/objc/native/src/audio/audio_session_observer.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ class AudioSessionObserver {

virtual void OnChangedOutputVolume() = 0;

virtual void OnMicrophoneMuteChange(bool is_microphone_mute) = 0;

protected:
virtual ~AudioSessionObserver() {}
};
Expand Down
105 changes: 63 additions & 42 deletions sdk/objc/native/src/audio/voice_processing_audio_unit.mm
Original file line number Diff line number Diff line change
Expand Up @@ -110,18 +110,23 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
RTCLogError(@"AudioComponentInstanceNew failed. Error=%ld.", (long)result);
return false;
}

// Enable input on the input scope of the input element.
UInt32 enable_input = 1;
result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kInputBus, &enable_input,
sizeof(enable_input));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to enable input on input scope of input element. "
"Error=%ld.",
(long)result);
return false;
RTCAudioSessionConfiguration* webRTCConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
if (webRTCConfiguration.isMicrophoneMute) {
RTCLog(@"Not Enable input on the input scope of the input element.");
} else {
RTCLog(@"Enable input on the input scope of the input element.");
UInt32 enable_input = 1;
result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kInputBus, &enable_input,
sizeof(enable_input));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to enable input on input scope of input element. "
"Error=%ld.",
(long)result);
return false;
}
}

// Enable output on the output scope of the output element.
Expand Down Expand Up @@ -155,34 +160,44 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {

// Disable AU buffer allocation for the recorder, we allocate our own.
// TODO(henrika): not sure that it actually saves resource to make this call.
UInt32 flag = 0;
result = AudioUnitSetProperty(
vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to disable buffer allocation on the input bus. "
"Error=%ld.",
(long)result);
return false;
if (webRTCConfiguration.isMicrophoneMute) {
RTCLog(@"Not Disable AU buffer allocation for the recorder.");
} else {
RTCLog(@"Disable AU buffer allocation for the recorder, we allocate our own.");
UInt32 flag = 0;
result = AudioUnitSetProperty(
vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to disable buffer allocation on the input bus. "
"Error=%ld.",
(long)result);
return false;
}
}

// Specify the callback to be called by the I/O thread to us when input audio
// is available. The recorded samples can then be obtained by calling the
// AudioUnitRender() method.
AURenderCallbackStruct input_callback;
input_callback.inputProc = OnDeliverRecordedData;
input_callback.inputProcRefCon = this;
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, kInputBus,
&input_callback, sizeof(input_callback));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to specify the input callback on the input bus. "
"Error=%ld.",
(long)result);
return false;
if (webRTCConfiguration.isMicrophoneMute) {
RTCLog(@"Not Specify the callback to be called by the I/O thread to us when input audio");
} else {
RTCLog(@"Specify the callback to be called by the I/O thread to us when input audio");
AURenderCallbackStruct input_callback;
input_callback.inputProc = OnDeliverRecordedData;
input_callback.inputProcRefCon = this;
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, kInputBus,
&input_callback, sizeof(input_callback));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to specify the input callback on the input bus. "
"Error=%ld.",
(long)result);
return false;
}
}

state_ = kUninitialized;
Expand All @@ -205,14 +220,20 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
#endif

// Set the format on the output scope of the input element/bus.
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, kInputBus, &format, size);
if (result != noErr) {
RTCLogError(@"Failed to set format on output scope of input bus. "
"Error=%ld.",
(long)result);
return false;
RTCAudioSessionConfiguration* webRTCConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
if (webRTCConfiguration.isMicrophoneMute) {
RTCLog(@"Not Set the format on the output scope of the input element/bus.");
} else {
RTCLog(@"Set the format on the output scope of the input element/bus.");
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, kInputBus, &format, size);
if (result != noErr) {
RTCLogError(@"Failed to set format on output scope of input bus. "
"Error=%ld.",
(long)result);
return false;
}
}

// Set the format on the input scope of the output element/bus.
Expand Down