Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Improve speaker switching logic for iOS. #692

Merged
merged 3 commits into from
Feb 5, 2025
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions example/lib/widgets/controls.dart
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,12 @@ class _ControlsWidgetState extends State<ControlsWidget> {

void _setSpeakerphoneOn() {
_speakerphoneOn = !_speakerphoneOn;
Hardware.instance.setSpeakerphoneOn(_speakerphoneOn);
Hardware.instance
.setSpeakerphoneOn(_speakerphoneOn, forceSpeakerOutput: false);
setState(() {});
}

void _toggleCamera() async {
//
final track = participant.videoTrackPublications.firstOrNull?.track;
if (track == null) return;

Expand Down
8 changes: 4 additions & 4 deletions lib/src/core/engine.dart
Original file line number Diff line number Diff line change
Expand Up @@ -541,8 +541,8 @@ class Engine extends Disposable with EventsEmittable<EngineEvent> {
type: Reliability.lossy,
)));
// _onDCStateUpdated(Reliability.lossy, state)
} catch (_) {
logger.severe('[$objectId] createDataChannel() did throw $_');
} catch (err) {
logger.severe('[$objectId] createDataChannel() did throw $err');
}

try {
Expand All @@ -558,8 +558,8 @@ class Engine extends Disposable with EventsEmittable<EngineEvent> {
state: state,
type: Reliability.reliable,
)));
} catch (_) {
logger.severe('[$objectId] createDataChannel() did throw $_');
} catch (err) {
logger.severe('[$objectId] createDataChannel() did throw $err');
}
}

Expand Down
12 changes: 6 additions & 6 deletions lib/src/core/transport.dart
Original file line number Diff line number Diff line change
Expand Up @@ -85,15 +85,15 @@ class Transport extends Disposable {
List<rtc.RTCRtpSender> senders = [];
try {
senders = await pc.getSenders();
} catch (_) {
logger.warning('getSenders() failed with error: $_');
} catch (err) {
logger.warning('getSenders() failed with error: $err');
}

for (final e in senders) {
try {
await pc.removeTrack(e);
} catch (_) {
logger.warning('removeTrack() failed with error: $_');
} catch (err) {
logger.warning('removeTrack() failed with error: $err');
}
}

Expand Down Expand Up @@ -261,8 +261,8 @@ class Transport extends Disposable {
try {
final result = await pc.getRemoteDescription();
return result;
} catch (_) {
logger.warning('pc.getRemoteDescription failed with error: $_');
} catch (err) {
logger.warning('pc.getRemoteDescription failed with error: $err');
}
return null;
}
Expand Down
51 changes: 32 additions & 19 deletions lib/src/hardware/hardware.dart
Original file line number Diff line number Diff line change
Expand Up @@ -76,14 +76,18 @@ class Hardware {

MediaDevice? selectedVideoInput;

bool? _speakerOn;
bool? get speakerOn => _preferSpeakerOutput;

bool? get speakerOn => _speakerOn;

bool _preferSpeakerOutput = false;
bool _preferSpeakerOutput = true;

bool get preferSpeakerOutput => _preferSpeakerOutput;

bool _forceSpeakerOutput = false;

/// if true, will force speaker output even if headphones or bluetooth is connected
/// only supported on iOS for now
bool get forceSpeakerOutput => _forceSpeakerOutput && _preferSpeakerOutput;

Future<List<MediaDevice>> enumerateDevices({String? type}) async {
var infos = await rtc.navigator.mediaDevices.enumerateDevices();
var devices = infos
Expand Down Expand Up @@ -126,33 +130,42 @@ class Hardware {
await rtc.Helper.selectAudioInput(device.deviceId);
}

Future<void> setPreferSpeakerOutput(bool enable) async {
if (lkPlatformIs(PlatformType.iOS)) {
if (_preferSpeakerOutput != enable) {
@Deprecated('use setSpeakerphoneOn')
Future<void> setPreferSpeakerOutput(bool enable) => setSpeakerphoneOn(enable);

bool get canSwitchSpeakerphone => lkPlatformIsMobile();

/// [enable] set speakerphone on or off, by default wired/bluetooth headsets will still
/// be prioritized even if set to true.
/// [forceSpeakerOutput] if true, will force speaker output even if headphones
/// or bluetooth is connected, only supported on iOS for now
Future<void> setSpeakerphoneOn(bool enable,
{bool forceSpeakerOutput = false}) async {
if (canSwitchSpeakerphone) {
_preferSpeakerOutput = enable;
_forceSpeakerOutput = forceSpeakerOutput;
if (lkPlatformIs(PlatformType.iOS)) {
NativeAudioConfiguration? config;
if (lkPlatformIs(PlatformType.iOS)) {
// Only iOS for now...
config = await onConfigureNativeAudio.call(audioTrackState);
if (_preferSpeakerOutput && _forceSpeakerOutput) {
config = config.copyWith(
appleAudioCategoryOptions: {
AppleAudioCategoryOption.defaultToSpeaker,
},
);
}
logger.fine('configuring for ${audioTrackState} using ${config}...');
try {
await Native.configureAudio(config);
} catch (error) {
logger.warning('failed to configure ${error}');
}
}
} else {
await rtc.Helper.setSpeakerphoneOn(enable);
}
_preferSpeakerOutput = enable;
} else {
logger.warning('setPreferSpeakerOutput only support on iOS');
}
}

bool get canSwitchSpeakerphone => lkPlatformIsMobile();

Future<void> setSpeakerphoneOn(bool enable) async {
if (canSwitchSpeakerphone) {
_speakerOn = enable;
await rtc.Helper.setSpeakerphoneOn(enable);
} else {
logger.warning('setSpeakerphoneOn only support on iOS/Android');
}
Expand Down
5 changes: 0 additions & 5 deletions lib/src/support/native.dart
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,6 @@ class Native {
static Future<bool> configureAudio(
NativeAudioConfiguration configuration) async {
try {
if (bypassVoiceProcessing) {
/// skip configuring audio if bypassVoiceProcessing
/// is enabled
return false;
}
final result = await channel.invokeMethod<bool>(
'configureNativeAudio',
configuration.toMap(),
Expand Down
55 changes: 47 additions & 8 deletions lib/src/support/native_audio.dart
Original file line number Diff line number Diff line change
Expand Up @@ -86,15 +86,50 @@ class NativeAudioConfiguration {
final AppleAudioCategory? appleAudioCategory;
final Set<AppleAudioCategoryOption>? appleAudioCategoryOptions;
final AppleAudioMode? appleAudioMode;
final bool? preferSpeakerOutput;

NativeAudioConfiguration({
// for iOS / Mac
this.appleAudioCategory,
this.appleAudioCategoryOptions,
this.appleAudioMode,
// Android options
// ...
});
static final soloAmbient = NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.soloAmbient,
appleAudioCategoryOptions: {},
appleAudioMode: AppleAudioMode.default_,
);

static final playback = NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.playback,
appleAudioCategoryOptions: {AppleAudioCategoryOption.mixWithOthers},
appleAudioMode: AppleAudioMode.spokenAudio,
);

static final playAndRecordSpeaker = NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.playAndRecord,
appleAudioCategoryOptions: {
AppleAudioCategoryOption.allowBluetooth,
AppleAudioCategoryOption.allowBluetoothA2DP,
AppleAudioCategoryOption.allowAirPlay,
},
appleAudioMode: AppleAudioMode.videoChat,
);

static final playAndRecordReceiver = NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.playAndRecord,
appleAudioCategoryOptions: {
AppleAudioCategoryOption.allowBluetooth,
AppleAudioCategoryOption.allowBluetoothA2DP,
AppleAudioCategoryOption.allowAirPlay,
},
appleAudioMode: AppleAudioMode.voiceChat,
);

NativeAudioConfiguration(
{
// for iOS / Mac
this.appleAudioCategory,
this.appleAudioCategoryOptions,
this.appleAudioMode,
this.preferSpeakerOutput
// Android options
// ...
});

Map<String, dynamic> toMap() => <String, dynamic>{
if (appleAudioCategory != null)
Expand All @@ -104,17 +139,21 @@ class NativeAudioConfiguration {
appleAudioCategoryOptions!.map((e) => e.toStringValue()).toList(),
if (appleAudioMode != null)
'appleAudioMode': appleAudioMode!.toStringValue(),
if (preferSpeakerOutput != null)
'preferSpeakerOutput': preferSpeakerOutput,
};

NativeAudioConfiguration copyWith({
AppleAudioCategory? appleAudioCategory,
Set<AppleAudioCategoryOption>? appleAudioCategoryOptions,
AppleAudioMode? appleAudioMode,
bool? preferSpeakerOutput,
}) =>
NativeAudioConfiguration(
appleAudioCategory: appleAudioCategory ?? this.appleAudioCategory,
appleAudioCategoryOptions:
appleAudioCategoryOptions ?? this.appleAudioCategoryOptions,
appleAudioMode: appleAudioMode ?? this.appleAudioMode,
preferSpeakerOutput: preferSpeakerOutput ?? this.preferSpeakerOutput,
);
}
54 changes: 15 additions & 39 deletions lib/src/track/audio_management.dart
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.

import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc;
import 'package:synchronized/synchronized.dart' as sync;

import '../hardware/hardware.dart';
Expand Down Expand Up @@ -114,6 +113,14 @@ Future<void> _onAudioTrackCountDidChange() async {
if (lkPlatformIs(PlatformType.iOS)) {
// Only iOS for now...
config = await onConfigureNativeAudio.call(_audioTrackState);

if (Hardware.instance.forceSpeakerOutput) {
config = config.copyWith(
appleAudioCategoryOptions: {
AppleAudioCategoryOption.defaultToSpeaker,
},
);
}
}

if (config != null) {
Expand All @@ -124,13 +131,6 @@ Future<void> _onAudioTrackCountDidChange() async {
logger.warning('failed to configure ${error}');
}
}

if (lkPlatformIs(PlatformType.iOS)) {
if (Hardware.instance.speakerOn != null &&
Hardware.instance.canSwitchSpeakerphone) {
await rtc.Helper.setSpeakerphoneOn(Hardware.instance.speakerOn!);
}
}
}
}

Expand All @@ -148,38 +148,14 @@ AudioTrackState _computeAudioTrackState() {

Future<NativeAudioConfiguration> defaultNativeAudioConfigurationFunc(
AudioTrackState state) async {
//
if (state == AudioTrackState.remoteOnly &&
if (state == AudioTrackState.none) {
return NativeAudioConfiguration.soloAmbient;
} else if (state == AudioTrackState.remoteOnly &&
Hardware.instance.preferSpeakerOutput) {
return NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.playback,
appleAudioCategoryOptions: {
AppleAudioCategoryOption.mixWithOthers,
},
appleAudioMode: AppleAudioMode.spokenAudio,
);
} else if ([
AudioTrackState.localOnly,
AudioTrackState.localAndRemote,
].contains(state) ||
(state == AudioTrackState.remoteOnly &&
!Hardware.instance.preferSpeakerOutput)) {
return NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.playAndRecord,
appleAudioCategoryOptions: {
AppleAudioCategoryOption.allowBluetooth,
AppleAudioCategoryOption.allowBluetoothA2DP,
AppleAudioCategoryOption.allowAirPlay,
},
appleAudioMode: Hardware.instance.preferSpeakerOutput
? AppleAudioMode.videoChat
: AppleAudioMode.voiceChat,
);
return NativeAudioConfiguration.playback;
}

return NativeAudioConfiguration(
appleAudioCategory: AppleAudioCategory.soloAmbient,
appleAudioCategoryOptions: {},
appleAudioMode: AppleAudioMode.default_,
);
return Hardware.instance.preferSpeakerOutput
? NativeAudioConfiguration.playAndRecordSpeaker
: NativeAudioConfiguration.playAndRecordReceiver;
}
2 changes: 2 additions & 0 deletions lib/src/track/track.dart
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,8 @@ abstract class Track extends DisposableChangeNotifier

logger.fine('$objectId.stop()');

await mediaStreamTrack.stop();

_active = false;
return true;
}
Expand Down
16 changes: 9 additions & 7 deletions lib/src/widgets/video_track_renderer.dart
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,14 @@ class _VideoTrackRendererState extends State<VideoTrackRenderer> {
late GlobalKey _internalKey;

Future<rtc.VideoRenderer> _initializeRenderer() async {
if (widget.renderMode == VideoRenderMode.platformView) {
if (lkPlatformIs(PlatformType.iOS) &&
widget.renderMode == VideoRenderMode.platformView) {
return Null as Future<rtc.VideoRenderer>;
}
_renderer ??= rtc.RTCVideoRenderer();
await _renderer!.initialize();
if (_renderer == null) {
_renderer = rtc.RTCVideoRenderer();
await _renderer!.initialize();
}
await _attach();
return _renderer!;
}
Expand Down Expand Up @@ -181,8 +184,7 @@ class _VideoTrackRendererState extends State<VideoTrackRenderer> {

Widget _videoRendererView() {
if (lkPlatformIs(PlatformType.iOS) &&
[VideoRenderMode.auto, VideoRenderMode.platformView]
.contains(widget.renderMode)) {
widget.renderMode == VideoRenderMode.platformView) {
return rtc.RTCVideoPlatFormView(
mirror: _shouldMirror(),
objectFit: widget.fit,
Expand All @@ -205,8 +207,8 @@ class _VideoTrackRendererState extends State<VideoTrackRenderer> {
future: _initializeRenderer(),
builder: (context, snapshot) {
if ((snapshot.hasData && _renderer != null) ||
[VideoRenderMode.auto, VideoRenderMode.platformView]
.contains(widget.renderMode)) {
(lkPlatformIs(PlatformType.iOS) &&
widget.renderMode == VideoRenderMode.platformView)) {
return Builder(
key: _internalKey,
builder: (ctx) {
Expand Down
4 changes: 4 additions & 0 deletions shared_swift/LiveKitPlugin.swift
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,10 @@ public class LiveKitPlugin: NSObject, FlutterPlugin {
// options: configuration.categoryOptions)
// print("[LiveKit] AVAudioSession Configure success")

// preferSpeakerOutput
if let preferSpeakerOutput = args["preferSpeakerOutput"] as? Bool {
try rtcSession.overrideOutputAudioPort(preferSpeakerOutput ? .speaker : .none)
}
result(true)
} catch let error {
print("[LiveKit] Configure audio error: ", error)
Expand Down
Loading