diff --git a/packages/video_player/video_player/CHANGELOG.md b/packages/video_player/video_player/CHANGELOG.md index d01c0ec1d9a..6f84fb7c061 100644 --- a/packages/video_player/video_player/CHANGELOG.md +++ b/packages/video_player/video_player/CHANGELOG.md @@ -1,5 +1,6 @@ -## NEXT +## 2.11.0 +* Adds `getAudioTracks()` and `selectAudioTrack()` methods to retrieve and select available audio tracks. * Updates minimum supported SDK version to Flutter 3.29/Dart 3.7. ## 2.10.0 diff --git a/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist b/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist index 7c569640062..1dc6cf7652b 100644 --- a/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist +++ b/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 12.0 + 13.0 diff --git a/packages/video_player/video_player/example/ios/Podfile b/packages/video_player/video_player/example/ios/Podfile index 01d4aa611bb..17adeb14132 100644 --- a/packages/video_player/video_player/example/ios/Podfile +++ b/packages/video_player/video_player/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '12.0' +# platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj index 2ab10fb9081..1b87f424cbc 100644 --- a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj @@ -140,6 +140,7 @@ 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 40E43985C26639614BC3B419 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -221,6 +222,23 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; + 40E43985C26639614BC3B419 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; @@ -335,7 +353,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -414,7 +432,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -465,7 +483,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; diff --git a/packages/video_player/video_player/example/lib/audio_tracks_demo.dart b/packages/video_player/video_player/example/lib/audio_tracks_demo.dart new file mode 100644 index 00000000000..f613518b17a --- /dev/null +++ b/packages/video_player/video_player/example/lib/audio_tracks_demo.dart @@ -0,0 +1,377 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import 'dart:collection'; + +import 'package:flutter/material.dart'; +import 'package:video_player/video_player.dart'; + +/// A demo page that showcases audio track functionality. +class AudioTracksDemo extends StatefulWidget { + /// Creates an AudioTracksDemo widget. + const AudioTracksDemo({super.key}); + + @override + State createState() => _AudioTracksDemoState(); +} + +class _AudioTracksDemoState extends State { + VideoPlayerController? _controller; + List _audioTracks = []; + bool _isLoading = false; + String? _error; + + // Track previous state to detect relevant changes + bool _wasPlaying = false; + bool _wasInitialized = false; + + // Sample video URLs with multiple audio tracks + static const List _sampleVideos = [ + 'https://flutter.github.io/assets-for-api-docs/assets/videos/butterfly.mp4', + 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8', + ]; + + int _selectedVideoIndex = 0; + + @override + void initState() { + super.initState(); + _initializeVideo(); + } + + Future _initializeVideo() async { + setState(() { + _isLoading = true; + _error = null; + }); + + try { + await _controller?.dispose(); + + final VideoPlayerController controller = VideoPlayerController.networkUrl( + Uri.parse(_sampleVideos[_selectedVideoIndex]), + ); + _controller = controller; + + await controller.initialize(); + + // Add listener for video player state changes + _controller!.addListener(_onVideoPlayerValueChanged); + + // Initialize tracking variables + _wasPlaying = _controller!.value.isPlaying; + _wasInitialized = _controller!.value.isInitialized; + + // Get audio tracks after initialization + await _loadAudioTracks(); + if (!mounted) { + return; + } + setState(() { + _isLoading = false; + }); + } catch (e) { + if (!mounted) { + return; + } + setState(() { + _error = 'Failed to initialize video: $e'; + _isLoading = false; + }); + } + } + + Future _loadAudioTracks() async { + final VideoPlayerController? controller = _controller; + if (controller == null || !controller.value.isInitialized) { + return; + } + + try { + final List tracks = await _controller!.getAudioTracks(); + if (!mounted) { + return; + } + setState(() { + _audioTracks = tracks; + }); + } catch (e) { + if (!mounted) { + return; + } + setState(() { + _error = 'Failed to load audio tracks: $e'; + }); + } + } + + Future _selectAudioTrack(String trackId) async { + final VideoPlayerController? controller = _controller; + if (controller == null) { + return; + } + + try { + await controller.selectAudioTrack(trackId); + + // Reload tracks to update selection status + await _loadAudioTracks(); + + if (!mounted) { + return; + } + ScaffoldMessenger.of( + context, + ).showSnackBar(SnackBar(content: Text('Selected audio track: $trackId'))); + } catch (e) { + if (!mounted) { + return; + } + ScaffoldMessenger.of(context).showSnackBar( + SnackBar(content: Text('Failed to select audio track: $e')), + ); + } + } + + void _onVideoPlayerValueChanged() { + if (!mounted || _controller == null) { + return; + } + + final VideoPlayerValue currentValue = _controller!.value; + bool shouldUpdate = false; + + // Check for relevant state changes that affect UI + if (currentValue.isPlaying != _wasPlaying) { + _wasPlaying = currentValue.isPlaying; + shouldUpdate = true; + } + + if (currentValue.isInitialized != _wasInitialized) { + _wasInitialized = currentValue.isInitialized; + shouldUpdate = true; + } + + // Only call setState if there are relevant changes + if (shouldUpdate) { + setState(() {}); + } + } + + @override + void dispose() { + _controller?.removeListener(_onVideoPlayerValueChanged); + _controller?.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Audio Tracks Demo'), + backgroundColor: Theme.of(context).colorScheme.inversePrimary, + ), + body: Column( + children: [ + // Video selection dropdown + Padding( + padding: const EdgeInsets.all(16.0), + child: DropdownMenu( + initialSelection: _selectedVideoIndex, + label: const Text('Select Video'), + inputDecorationTheme: const InputDecorationTheme( + border: OutlineInputBorder(), + ), + dropdownMenuEntries: + _sampleVideos.indexed.map(((int, String) record) { + final (int index, _) = record; + return DropdownMenuEntry( + value: index, + label: 'Video ${index + 1}', + ); + }).toList(), + onSelected: (int? value) { + if (value != null && value != _selectedVideoIndex) { + setState(() { + _selectedVideoIndex = value; + }); + _initializeVideo(); + } + }, + ), + ), + + // Video player + Expanded( + flex: 2, + child: ColoredBox(color: Colors.black, child: _buildVideoPlayer()), + ), + + // Audio tracks list + Expanded(flex: 3, child: _buildAudioTracksList()), + ], + ), + floatingActionButton: FloatingActionButton( + onPressed: _loadAudioTracks, + tooltip: 'Refresh Audio Tracks', + child: const Icon(Icons.refresh), + ), + ); + } + + Widget _buildVideoPlayer() { + if (_isLoading) { + return const Center(child: CircularProgressIndicator()); + } + + if (_error != null) { + return Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Icon(Icons.error, size: 48, color: Colors.red[300]), + const SizedBox(height: 16), + Text( + _error!, + style: const TextStyle(color: Colors.white), + textAlign: TextAlign.center, + ), + const SizedBox(height: 16), + ElevatedButton( + onPressed: _initializeVideo, + child: const Text('Retry'), + ), + ], + ), + ); + } + + final VideoPlayerController? controller = _controller; + if (controller?.value.isInitialized ?? false) { + return Stack( + alignment: Alignment.center, + children: [ + AspectRatio( + aspectRatio: controller!.value.aspectRatio, + child: VideoPlayer(controller), + ), + _buildPlayPauseButton(), + ], + ); + } + + return const Center( + child: Text('No video loaded', style: TextStyle(color: Colors.white)), + ); + } + + Widget _buildPlayPauseButton() { + final VideoPlayerController? controller = _controller; + if (controller == null) { + return const SizedBox.shrink(); + } + + return Container( + decoration: BoxDecoration( + color: Colors.black54, + borderRadius: BorderRadius.circular(30), + ), + child: IconButton( + iconSize: 48, + color: Colors.white, + onPressed: () { + if (controller.value.isPlaying) { + controller.pause(); + } else { + controller.play(); + } + }, + icon: Icon(controller.value.isPlaying ? Icons.pause : Icons.play_arrow), + ), + ); + } + + Widget _buildAudioTracksList() { + return Container( + padding: const EdgeInsets.all(16.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + const Icon(Icons.audiotrack), + const SizedBox(width: 8), + Text( + 'Audio Tracks (${_audioTracks.length})', + style: Theme.of(context).textTheme.headlineSmall, + ), + ], + ), + const SizedBox(height: 16), + + if (_audioTracks.isEmpty) + const Expanded( + child: Center( + child: Text( + 'No audio tracks available.\nTry loading a video with multiple audio tracks.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 16, color: Colors.grey), + ), + ), + ) + else + Expanded( + child: ListView.builder( + itemCount: _audioTracks.length, + itemBuilder: (BuildContext context, int index) { + final VideoAudioTrack track = _audioTracks[index]; + return _buildAudioTrackTile(track); + }, + ), + ), + ], + ), + ); + } + + Widget _buildAudioTrackTile(VideoAudioTrack track) { + return Card( + margin: const EdgeInsets.only(bottom: 8.0), + child: ListTile( + leading: CircleAvatar( + backgroundColor: track.isSelected ? Colors.green : Colors.grey, + child: Icon( + track.isSelected ? Icons.check : Icons.audiotrack, + color: Colors.white, + ), + ), + title: Text( + track.label.isNotEmpty ? track.label : 'Track ${track.id}', + style: TextStyle( + fontWeight: track.isSelected ? FontWeight.bold : FontWeight.normal, + ), + ), + subtitle: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text('ID: ${track.id}'), + Text('Language: ${track.language}'), + if (track.codec != null) Text('Codec: ${track.codec}'), + if (track.bitrate != null) Text('Bitrate: ${track.bitrate} bps'), + if (track.sampleRate != null) + Text('Sample Rate: ${track.sampleRate} Hz'), + if (track.channelCount != null) + Text('Channels: ${track.channelCount}'), + ], + ), + trailing: + track.isSelected + ? const Icon(Icons.radio_button_checked, color: Colors.green) + : const Icon(Icons.radio_button_unchecked), + onTap: track.isSelected ? null : () => _selectAudioTrack(track.id), + ), + ); + } +} diff --git a/packages/video_player/video_player/example/lib/main.dart b/packages/video_player/video_player/example/lib/main.dart index 1382af5d87d..bdd0cd0bd48 100644 --- a/packages/video_player/video_player/example/lib/main.dart +++ b/packages/video_player/video_player/example/lib/main.dart @@ -11,6 +11,8 @@ library; import 'package:flutter/material.dart'; import 'package:video_player/video_player.dart'; +import 'audio_tracks_demo.dart'; + void main() { runApp(MaterialApp(home: _App())); } @@ -37,6 +39,19 @@ class _App extends StatelessWidget { ); }, ), + IconButton( + key: const ValueKey('audio_tracks_demo'), + icon: const Icon(Icons.audiotrack), + tooltip: 'Audio Tracks Demo', + onPressed: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => const AudioTracksDemo(), + ), + ); + }, + ), ], bottom: const TabBar( isScrollable: true, diff --git a/packages/video_player/video_player/example/pubspec.yaml b/packages/video_player/video_player/example/pubspec.yaml index 6c990c8b34f..f531ae25ceb 100644 --- a/packages/video_player/video_player/example/pubspec.yaml +++ b/packages/video_player/video_player/example/pubspec.yaml @@ -35,3 +35,11 @@ flutter: - assets/bumble_bee_captions.srt - assets/bumble_bee_captions.vtt - assets/Audio.mp3 +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_android: + { path: ../../../../packages/video_player/video_player_android } + video_player_avfoundation: + { path: ../../../../packages/video_player/video_player_avfoundation } + video_player_web: { path: ../../../../packages/video_player/video_player_web } diff --git a/packages/video_player/video_player/lib/video_player.dart b/packages/video_player/video_player/lib/video_player.dart index 8f8ebf5d8dc..a0592722409 100644 --- a/packages/video_player/video_player/lib/video_player.dart +++ b/packages/video_player/video_player/lib/video_player.dart @@ -4,11 +4,12 @@ import 'dart:async'; import 'dart:io'; - import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:video_player_platform_interface/video_player_platform_interface.dart'; +import 'package:video_player_platform_interface/video_player_platform_interface.dart' + as platform_interface; import 'src/closed_caption_file.dart'; @@ -24,6 +25,112 @@ export 'package:video_player_platform_interface/video_player_platform_interface. export 'src/closed_caption_file.dart'; +/// Represents an audio track in a video with its metadata. +@immutable +class VideoAudioTrack { + /// Constructs an instance of [VideoAudioTrack]. + const VideoAudioTrack({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + /// Unique identifier for the audio track. + final String id; + + /// Human-readable label for the track. + final String label; + + /// Language code of the audio track (e.g., 'en', 'es', 'und'). + final String language; + + /// Whether this track is currently selected. + final bool isSelected; + + /// Bitrate of the audio track in bits per second. + /// May be null if not available from the platform. + final int? bitrate; + + /// Sample rate of the audio track in Hz. + /// May be null if not available from the platform. + final int? sampleRate; + + /// Number of audio channels. + /// May be null if not available from the platform. + final int? channelCount; + + /// Audio codec used (e.g., 'aac', 'mp3', 'ac3'). + /// May be null if not available from the platform. + final String? codec; + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is VideoAudioTrack && + runtimeType == other.runtimeType && + id == other.id && + label == other.label && + language == other.language && + isSelected == other.isSelected && + bitrate == other.bitrate && + sampleRate == other.sampleRate && + channelCount == other.channelCount && + codec == other.codec; + } + + @override + int get hashCode => Object.hash( + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ); + + @override + String toString() => + 'VideoAudioTrack(' + 'id: $id, ' + 'label: $label, ' + 'language: $language, ' + 'isSelected: $isSelected, ' + 'bitrate: $bitrate, ' + 'sampleRate: $sampleRate, ' + 'channelCount: $channelCount, ' + 'codec: $codec)'; +} + +/// Converts a platform interface [VideoAudioTrack] to the public API type. +/// +/// This internal method is used to decouple the public API from the +/// platform interface implementation. +/// +/// Normalizes null values from the platform to provide a consistent API: +/// - null label becomes 'Unknown' +/// - null language becomes 'und' (undefined) +VideoAudioTrack _convertPlatformAudioTrack( + platform_interface.VideoAudioTrack platformTrack, +) { + return VideoAudioTrack( + id: platformTrack.id, + label: platformTrack.label ?? 'Unknown', + language: platformTrack.language ?? 'und', + isSelected: platformTrack.isSelected, + bitrate: platformTrack.bitrate, + sampleRate: platformTrack.sampleRate, + channelCount: platformTrack.channelCount, + codec: platformTrack.codec, + ); +} + VideoPlayerPlatform? _lastVideoPlayerPlatform; VideoPlayerPlatform get _videoPlayerPlatform { @@ -819,6 +926,63 @@ class VideoPlayerController extends ValueNotifier { } } + /// Gets the available audio tracks for the video. + /// + /// Returns a list of [VideoAudioTrack] objects containing metadata about + /// each available audio track. The list may be empty if no audio tracks + /// are available or if the video is not initialized. + /// + /// Throws an exception if the video player is disposed. + Future> getAudioTracks() async { + if (_isDisposed) { + throw Exception('VideoPlayerController is disposed'); + } + if (!value.isInitialized) { + return []; + } + final List platformTracks = + await _videoPlayerPlatform.getAudioTracks(_playerId); + return platformTracks.map(_convertPlatformAudioTrack).toList(); + } + + /// Selects which audio track is chosen for playback from its [trackId] + /// + /// The [trackId] should match the ID of one of the tracks returned by + /// [getAudioTracks]. If the track ID is not found or invalid, the + /// platform may ignore the request or throw an exception. + /// + /// Throws an exception if the video player is disposed or not initialized. + Future selectAudioTrack(String trackId) async { + if (_isDisposedOrNotInitialized) { + throw Exception('VideoPlayerController is disposed or not initialized'); + } + // The platform implementation (e.g., Android) will wait for the track + // selection to complete by listening to platform-specific events + await _videoPlayerPlatform.selectAudioTrack(_playerId, trackId); + } + + /// Returns whether audio track selection is supported on this platform. + /// + /// This method allows developers to query at runtime whether the current + /// platform supports audio track selection functionality. This is useful + /// for platforms like web where audio track selection may not be available. + /// + /// Returns `true` if [getAudioTracks] and [selectAudioTrack] are supported, + /// `false` otherwise. + /// + /// Example usage: + /// ```dart + /// if (controller.isAudioTrackSupportAvailable()) { + /// final tracks = await controller.getAudioTracks(); + /// // Show audio track selection UI + /// } else { + /// // Hide audio track selection UI or show unsupported message + /// } + /// ``` + bool isAudioTrackSupportAvailable() { + return _videoPlayerPlatform.isAudioTrackSupportAvailable(); + } + bool get _isDisposedOrNotInitialized => _isDisposed || !value.isInitialized; } diff --git a/packages/video_player/video_player/pubspec.yaml b/packages/video_player/video_player/pubspec.yaml index c8863f632ff..bd52bde7000 100644 --- a/packages/video_player/video_player/pubspec.yaml +++ b/packages/video_player/video_player/pubspec.yaml @@ -27,7 +27,7 @@ dependencies: html: ^0.15.0 video_player_android: ^2.8.1 video_player_avfoundation: ^2.7.0 - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 video_player_web: ^2.1.0 dev_dependencies: @@ -38,3 +38,11 @@ dev_dependencies: topics: - video - video-player +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_android: + { path: ../../../packages/video_player/video_player_android } + video_player_avfoundation: + { path: ../../../packages/video_player/video_player_avfoundation } + video_player_web: { path: ../../../packages/video_player/video_player_web } diff --git a/packages/video_player/video_player/test/video_player_test.dart b/packages/video_player/video_player/test/video_player_test.dart index 8ded8dd56e6..e2cb4fe3922 100644 --- a/packages/video_player/video_player/test/video_player_test.dart +++ b/packages/video_player/video_player/test/video_player_test.dart @@ -10,7 +10,11 @@ import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:video_player/video_player.dart'; -import 'package:video_player_platform_interface/video_player_platform_interface.dart'; +import 'package:video_player_platform_interface/video_player_platform_interface.dart' + hide VideoAudioTrack; +import 'package:video_player_platform_interface/video_player_platform_interface.dart' + as platform_interface + show VideoAudioTrack; const String _localhost = 'https://127.0.0.1'; final Uri _localhostUri = Uri.parse(_localhost); @@ -84,6 +88,49 @@ class FakeController extends ValueNotifier Future setClosedCaptionFile( Future? closedCaptionFile, ) async {} + + @override + Future> getAudioTracks() async { + return [ + const VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ), + const VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ), + const VideoAudioTrack( + id: 'track_3', + label: 'French', + language: 'fr', + isSelected: false, + bitrate: 96000, + ), + ]; + } + + @override + Future selectAudioTrack(String trackId) async { + // Store the selected track ID for verification in tests + selectedAudioTrackId = trackId; + } + + @override + bool isAudioTrackSupportAvailable() { + // Return true for testing purposes + return true; + } + + String? selectedAudioTrackId; } Future _loadClosedCaption() async => @@ -769,6 +816,197 @@ void main() { }); }); + group('audio tracks', () { + test('getAudioTracks returns list of tracks', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + final List tracks = await controller.getAudioTracks(); + + expect(tracks.length, 3); + expect(tracks[0].id, 'track_1'); + expect(tracks[0].label, 'English'); + expect(tracks[0].language, 'en'); + expect(tracks[0].isSelected, true); + expect(tracks[0].bitrate, null); + expect(tracks[0].sampleRate, null); + expect(tracks[0].channelCount, null); + expect(tracks[0].codec, null); + + expect(tracks[1].id, 'track_2'); + expect(tracks[1].label, 'Spanish'); + expect(tracks[1].language, 'es'); + expect(tracks[1].isSelected, false); + expect(tracks[1].bitrate, 128000); + expect(tracks[1].sampleRate, 44100); + expect(tracks[1].channelCount, 2); + expect(tracks[1].codec, 'aac'); + + expect(tracks[2].id, 'track_3'); + expect(tracks[2].label, 'French'); + expect(tracks[2].language, 'fr'); + expect(tracks[2].isSelected, false); + expect(tracks[2].bitrate, 96000); + expect(tracks[2].sampleRate, null); + expect(tracks[2].channelCount, null); + expect(tracks[2].codec, null); + }); + + test('getAudioTracks before initialization returns empty list', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + final List tracks = await controller.getAudioTracks(); + expect(tracks, isEmpty); + }); + + test('selectAudioTrack works with valid track ID', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + await controller.selectAudioTrack('track_2'); + + // Verify the platform recorded the selection + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_2', + ); + }); + + test('selectAudioTrack before initialization throws', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + expect( + () => controller.selectAudioTrack('track_1'), + throwsA(isA()), + ); + }); + + test('selectAudioTrack with empty track ID', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + await controller.selectAudioTrack(''); + + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + '', + ); + }); + + test('multiple track selections update correctly', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + + await controller.selectAudioTrack('track_1'); + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_1', + ); + + await controller.selectAudioTrack('track_3'); + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_3', + ); + }); + }); + + group('VideoAudioTrack', () { + test('equality works correctly', () { + const VideoAudioTrack track1 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const VideoAudioTrack track2 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const VideoAudioTrack track3 = VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + ); + + expect(track1, equals(track2)); + expect(track1, isNot(equals(track3))); + }); + + test('hashCode works correctly', () { + const VideoAudioTrack track1 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const VideoAudioTrack track2 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + expect(track1.hashCode, equals(track2.hashCode)); + }); + + test('toString works correctly', () { + const VideoAudioTrack track = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ); + + final String trackString = track.toString(); + expect(trackString, contains('track_1')); + expect(trackString, contains('English')); + expect(trackString, contains('en')); + expect(trackString, contains('true')); + expect(trackString, contains('128000')); + expect(trackString, contains('44100')); + expect(trackString, contains('2')); + expect(trackString, contains('aac')); + }); + + test('optional fields can be null', () { + const VideoAudioTrack track = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + expect(track.bitrate, null); + expect(track.sampleRate, null); + expect(track.channelCount, null); + expect(track.codec, null); + }); + }); + group('caption', () { test('works when position updates', () async { final VideoPlayerController controller = @@ -1587,4 +1825,50 @@ class FakeVideoPlayerPlatform extends VideoPlayerPlatform { calls.add('setWebOptions'); webOptions[playerId] = options; } + + @override + Future> getAudioTracks( + int playerId, + ) async { + calls.add('getAudioTracks'); + return [ + const platform_interface.VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ), + const platform_interface.VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ), + const platform_interface.VideoAudioTrack( + id: 'track_3', + label: 'French', + language: 'fr', + isSelected: false, + bitrate: 96000, + ), + ]; + } + + @override + Future selectAudioTrack(int playerId, String trackId) async { + calls.add('selectAudioTrack'); + selectedAudioTrackIds[playerId] = trackId; + } + + @override + bool isAudioTrackSupportAvailable() { + calls.add('isAudioTrackSupportAvailable'); + return true; // Return true for testing purposes + } + + final Map selectedAudioTrackIds = {}; } diff --git a/packages/video_player/video_player_android/CHANGELOG.md b/packages/video_player/video_player_android/CHANGELOG.md index a6decfda03b..570df98784b 100644 --- a/packages/video_player/video_player_android/CHANGELOG.md +++ b/packages/video_player/video_player_android/CHANGELOG.md @@ -1,3 +1,7 @@ +## 2.9.0 + +* Implements `getAudioTracks()` and `selectAudioTrack()` methods for Android using ExoPlayer. + ## 2.8.17 * Moves video event processing logic to Dart, and fixes an issue where buffer diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/ExoPlayerEventListener.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/ExoPlayerEventListener.java index 5b5203b39e7..33988786a78 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/ExoPlayerEventListener.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/ExoPlayerEventListener.java @@ -5,8 +5,11 @@ package io.flutter.plugins.videoplayer; import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.media3.common.C; import androidx.media3.common.PlaybackException; import androidx.media3.common.Player; +import androidx.media3.common.Tracks; import androidx.media3.exoplayer.ExoPlayer; public abstract class ExoPlayerEventListener implements Player.Listener { @@ -88,4 +91,34 @@ public void onPlayerError(@NonNull final PlaybackException error) { public void onIsPlayingChanged(boolean isPlaying) { events.onIsPlayingStateUpdate(isPlaying); } + + @Override + public void onTracksChanged(@NonNull Tracks tracks) { + // Find the currently selected audio track and notify + String selectedTrackId = findSelectedAudioTrackId(tracks); + events.onAudioTrackChanged(selectedTrackId); + } + + /** + * Finds the ID of the currently selected audio track. + * + * @param tracks The current tracks + * @return The track ID in format "groupIndex_trackIndex", or null if no audio track is selected + */ + @Nullable + private String findSelectedAudioTrackId(@NonNull Tracks tracks) { + int groupIndex = 0; + for (Tracks.Group group : tracks.getGroups()) { + if (group.getType() == C.TRACK_TYPE_AUDIO && group.isSelected()) { + // Find the selected track within this group + for (int i = 0; i < group.length; i++) { + if (group.isTrackSelected(i)) { + return groupIndex + "_" + i; + } + } + } + groupIndex++; + } + return null; + } } diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java index d297dad31cc..ca6d185a989 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java @@ -7,14 +7,23 @@ import static androidx.media3.common.Player.REPEAT_MODE_ALL; import static androidx.media3.common.Player.REPEAT_MODE_OFF; +import android.util.Log; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.media3.common.AudioAttributes; import androidx.media3.common.C; +import androidx.media3.common.Format; import androidx.media3.common.MediaItem; import androidx.media3.common.PlaybackParameters; +import androidx.media3.common.TrackGroup; +import androidx.media3.common.TrackSelectionOverride; +import androidx.media3.common.Tracks; +import androidx.media3.common.util.UnstableApi; import androidx.media3.exoplayer.ExoPlayer; +import androidx.media3.exoplayer.trackselection.DefaultTrackSelector; import io.flutter.view.TextureRegistry.SurfaceProducer; +import java.util.ArrayList; +import java.util.List; /** * A class responsible for managing video playback using {@link ExoPlayer}. @@ -26,6 +35,7 @@ public abstract class VideoPlayer implements VideoPlayerInstanceApi { @Nullable protected final SurfaceProducer surfaceProducer; @Nullable private DisposeHandler disposeHandler; @NonNull protected ExoPlayer exoPlayer; + @UnstableApi @Nullable protected DefaultTrackSelector trackSelector; /** A closure-compatible signature since {@link java.util.function.Supplier} is API level 24. */ public interface ExoPlayerProvider { @@ -43,6 +53,7 @@ public interface DisposeHandler { void onDispose(); } + @UnstableApi public VideoPlayer( @NonNull VideoPlayerCallbacks events, @NonNull MediaItem mediaItem, @@ -52,6 +63,12 @@ public VideoPlayer( this.videoPlayerEvents = events; this.surfaceProducer = surfaceProducer; exoPlayer = exoPlayerProvider.get(); + + // Try to get the track selector from the ExoPlayer if it was built with one + if (exoPlayer.getTrackSelector() instanceof DefaultTrackSelector) { + trackSelector = (DefaultTrackSelector) exoPlayer.getTrackSelector(); + } + exoPlayer.setMediaItem(mediaItem); exoPlayer.prepare(); exoPlayer.addListener(createExoPlayerEventListener(exoPlayer, surfaceProducer)); @@ -122,6 +139,112 @@ public ExoPlayer getExoPlayer() { return exoPlayer; } + @UnstableApi + @Override + public @NonNull NativeAudioTrackData getAudioTracks() { + List audioTracks = new ArrayList<>(); + + // Get the current tracks from ExoPlayer + Tracks tracks = exoPlayer.getCurrentTracks(); + + // Iterate through all track groups + for (int groupIndex = 0; groupIndex < tracks.getGroups().size(); groupIndex++) { + Tracks.Group group = tracks.getGroups().get(groupIndex); + + // Only process audio tracks + if (group.getType() == C.TRACK_TYPE_AUDIO) { + for (int trackIndex = 0; trackIndex < group.length; trackIndex++) { + Format format = group.getTrackFormat(trackIndex); + boolean isSelected = group.isTrackSelected(trackIndex); + + // Create audio track data with metadata + ExoPlayerAudioTrackData audioTrack = + new ExoPlayerAudioTrackData( + (long) groupIndex, + (long) trackIndex, + format.label, + format.language, + isSelected, + format.bitrate != Format.NO_VALUE ? (long) format.bitrate : null, + format.sampleRate != Format.NO_VALUE ? (long) format.sampleRate : null, + format.channelCount != Format.NO_VALUE ? (long) format.channelCount : null, + format.codecs != null ? format.codecs : null); + + audioTracks.add(audioTrack); + } + } + } + return new NativeAudioTrackData(audioTracks); + } + + @UnstableApi + @Override + public void selectAudioTrack(long groupIndex, long trackIndex) { + if (trackSelector == null) { + Log.w("VideoPlayer", "Cannot select audio track: track selector is null"); + return; + } + + try { + + // Get current tracks + Tracks tracks = exoPlayer.getCurrentTracks(); + + if (groupIndex >= tracks.getGroups().size()) { + Log.w( + "VideoPlayer", + "Cannot select audio track: groupIndex " + + groupIndex + + " is out of bounds (available groups: " + + tracks.getGroups().size() + + ")"); + return; + } + + Tracks.Group group = tracks.getGroups().get((int) groupIndex); + + // Verify it's an audio track and the track index is valid + if (group.getType() != C.TRACK_TYPE_AUDIO || (int) trackIndex >= group.length) { + if (group.getType() != C.TRACK_TYPE_AUDIO) { + Log.w( + "VideoPlayer", + "Cannot select audio track: group at index " + + groupIndex + + " is not an audio track (type: " + + group.getType() + + ")"); + } else { + Log.w( + "VideoPlayer", + "Cannot select audio track: trackIndex " + + trackIndex + + " is out of bounds (available tracks in group: " + + group.length + + ")"); + } + return; + } + + // Get the track group and create a selection override + TrackGroup trackGroup = group.getMediaTrackGroup(); + TrackSelectionOverride override = new TrackSelectionOverride(trackGroup, (int) trackIndex); + + // Apply the track selection override + trackSelector.setParameters( + trackSelector.buildUponParameters().setOverrideForType(override).build()); + + } catch (ArrayIndexOutOfBoundsException e) { + Log.w( + "VideoPlayer", + "Cannot select audio track: invalid indices (groupIndex: " + + groupIndex + + ", trackIndex: " + + trackIndex + + "). " + + e.getMessage()); + } + } + public void dispose() { if (disposeHandler != null) { disposeHandler.onDispose(); diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerCallbacks.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerCallbacks.java index 379f73e2091..4cac902319e 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerCallbacks.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerCallbacks.java @@ -24,4 +24,6 @@ public interface VideoPlayerCallbacks { void onError(@NonNull String code, @Nullable String message, @Nullable Object details); void onIsPlayingStateUpdate(boolean isPlaying); + + void onAudioTrackChanged(@Nullable String selectedTrackId); } diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerEventCallbacks.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerEventCallbacks.java index 782f1cc2ce8..a471ec960e6 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerEventCallbacks.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerEventCallbacks.java @@ -63,4 +63,9 @@ public void onError(@NonNull String code, @Nullable String message, @Nullable Ob public void onIsPlayingStateUpdate(boolean isPlaying) { eventSink.success(new IsPlayingStateEvent(isPlaying)); } + + @Override + public void onAudioTrackChanged(@Nullable String selectedTrackId) { + eventSink.success(new AudioTrackChangedEvent(selectedTrackId)); + } } diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java index 34b7533bd38..355e82d6fb0 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java @@ -9,6 +9,7 @@ import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import androidx.media3.common.MediaItem; +import androidx.media3.common.util.UnstableApi; import androidx.media3.exoplayer.ExoPlayer; import io.flutter.plugins.videoplayer.ExoPlayerEventListener; import io.flutter.plugins.videoplayer.VideoAsset; @@ -22,6 +23,7 @@ * displaying the video in the app. */ public class PlatformViewVideoPlayer extends VideoPlayer { + @UnstableApi @VisibleForTesting public PlatformViewVideoPlayer( @NonNull VideoPlayerCallbacks events, @@ -40,6 +42,7 @@ public PlatformViewVideoPlayer( * @param options options for playback. * @return a video player instance. */ + @UnstableApi @NonNull public static PlatformViewVideoPlayer create( @NonNull Context context, @@ -51,8 +54,11 @@ public static PlatformViewVideoPlayer create( asset.getMediaItem(), options, () -> { + androidx.media3.exoplayer.trackselection.DefaultTrackSelector trackSelector = + new androidx.media3.exoplayer.trackselection.DefaultTrackSelector(context); ExoPlayer.Builder builder = new ExoPlayer.Builder(context) + .setTrackSelector(trackSelector) .setMediaSourceFactory(asset.getMediaSourceFactory(context)); return builder.build(); }); diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java index 57ed030f564..4f0999248f2 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java @@ -11,6 +11,7 @@ import androidx.annotation.RestrictTo; import androidx.annotation.VisibleForTesting; import androidx.media3.common.MediaItem; +import androidx.media3.common.util.UnstableApi; import androidx.media3.exoplayer.ExoPlayer; import io.flutter.plugins.videoplayer.ExoPlayerEventListener; import io.flutter.plugins.videoplayer.VideoAsset; @@ -39,6 +40,7 @@ public final class TextureVideoPlayer extends VideoPlayer implements SurfaceProd * @param options options for playback. * @return a video player instance. */ + @UnstableApi @NonNull public static TextureVideoPlayer create( @NonNull Context context, @@ -52,13 +54,17 @@ public static TextureVideoPlayer create( asset.getMediaItem(), options, () -> { + androidx.media3.exoplayer.trackselection.DefaultTrackSelector trackSelector = + new androidx.media3.exoplayer.trackselection.DefaultTrackSelector(context); ExoPlayer.Builder builder = new ExoPlayer.Builder(context) + .setTrackSelector(trackSelector) .setMediaSourceFactory(asset.getMediaSourceFactory(context)); return builder.build(); }); } + @UnstableApi @VisibleForTesting public TextureVideoPlayer( @NonNull VideoPlayerCallbacks events, diff --git a/packages/video_player/video_player_android/android/src/main/kotlin/io/flutter/plugins/videoplayer/Messages.kt b/packages/video_player/video_player_android/android/src/main/kotlin/io/flutter/plugins/videoplayer/Messages.kt index 800026ab4d8..75bb515d245 100644 --- a/packages/video_player/video_player_android/android/src/main/kotlin/io/flutter/plugins/videoplayer/Messages.kt +++ b/packages/video_player/video_player_android/android/src/main/kotlin/io/flutter/plugins/videoplayer/Messages.kt @@ -55,7 +55,7 @@ private object MessagesPigeonUtils { } if (a is Map<*, *> && b is Map<*, *>) { return a.size == b.size && - a.all { (b as Map).contains(it.key) && deepEquals(it.value, b[it.key]) } + a.all { (b as Map).containsKey(it.key) && deepEquals(it.value, b[it.key]) } } return a == b } @@ -225,6 +225,44 @@ data class IsPlayingStateEvent(val isPlaying: Boolean) : PlatformVideoEvent() { override fun hashCode(): Int = toList().hashCode() } +/** + * Sent when audio tracks change. + * + * This includes when the selected audio track changes after calling selectAudioTrack. Corresponds + * to ExoPlayer's onTracksChanged. + * + * Generated class from Pigeon that represents data sent in messages. + */ +data class AudioTrackChangedEvent( + /** The ID of the newly selected audio track, if any. */ + val selectedTrackId: String? = null +) : PlatformVideoEvent() { + companion object { + fun fromList(pigeonVar_list: List): AudioTrackChangedEvent { + val selectedTrackId = pigeonVar_list[0] as String? + return AudioTrackChangedEvent(selectedTrackId) + } + } + + fun toList(): List { + return listOf( + selectedTrackId, + ) + } + + override fun equals(other: Any?): Boolean { + if (other !is AudioTrackChangedEvent) { + return false + } + if (this === other) { + return true + } + return MessagesPigeonUtils.deepEquals(toList(), other.toList()) + } + + override fun hashCode(): Int = toList().hashCode() +} + /** * Information passed to the platform view creation. * @@ -326,6 +364,199 @@ data class TexturePlayerIds(val playerId: Long, val textureId: Long) { override fun hashCode(): Int = toList().hashCode() } +/** Generated class from Pigeon that represents data sent in messages. */ +data class PlaybackState( + /** The current playback position, in milliseconds. */ + val playPosition: Long, + /** The current buffer position, in milliseconds. */ + val bufferPosition: Long +) { + companion object { + fun fromList(pigeonVar_list: List): PlaybackState { + val playPosition = pigeonVar_list[0] as Long + val bufferPosition = pigeonVar_list[1] as Long + return PlaybackState(playPosition, bufferPosition) + } + } + + fun toList(): List { + return listOf( + playPosition, + bufferPosition, + ) + } + + override fun equals(other: Any?): Boolean { + if (other !is PlaybackState) { + return false + } + if (this === other) { + return true + } + return MessagesPigeonUtils.deepEquals(toList(), other.toList()) + } + + override fun hashCode(): Int = toList().hashCode() +} + +/** + * Represents an audio track in a video. + * + * Generated class from Pigeon that represents data sent in messages. + */ +data class AudioTrackMessage( + val id: String, + val label: String, + val language: String, + val isSelected: Boolean, + val bitrate: Long? = null, + val sampleRate: Long? = null, + val channelCount: Long? = null, + val codec: String? = null +) { + companion object { + fun fromList(pigeonVar_list: List): AudioTrackMessage { + val id = pigeonVar_list[0] as String + val label = pigeonVar_list[1] as String + val language = pigeonVar_list[2] as String + val isSelected = pigeonVar_list[3] as Boolean + val bitrate = pigeonVar_list[4] as Long? + val sampleRate = pigeonVar_list[5] as Long? + val channelCount = pigeonVar_list[6] as Long? + val codec = pigeonVar_list[7] as String? + return AudioTrackMessage( + id, label, language, isSelected, bitrate, sampleRate, channelCount, codec) + } + } + + fun toList(): List { + return listOf( + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ) + } + + override fun equals(other: Any?): Boolean { + if (other !is AudioTrackMessage) { + return false + } + if (this === other) { + return true + } + return MessagesPigeonUtils.deepEquals(toList(), other.toList()) + } + + override fun hashCode(): Int = toList().hashCode() +} + +/** + * Raw audio track data from ExoPlayer Format objects. + * + * Generated class from Pigeon that represents data sent in messages. + */ +data class ExoPlayerAudioTrackData( + val groupIndex: Long, + val trackIndex: Long, + val label: String? = null, + val language: String? = null, + val isSelected: Boolean, + val bitrate: Long? = null, + val sampleRate: Long? = null, + val channelCount: Long? = null, + val codec: String? = null +) { + companion object { + fun fromList(pigeonVar_list: List): ExoPlayerAudioTrackData { + val groupIndex = pigeonVar_list[0] as Long + val trackIndex = pigeonVar_list[1] as Long + val label = pigeonVar_list[2] as String? + val language = pigeonVar_list[3] as String? + val isSelected = pigeonVar_list[4] as Boolean + val bitrate = pigeonVar_list[5] as Long? + val sampleRate = pigeonVar_list[6] as Long? + val channelCount = pigeonVar_list[7] as Long? + val codec = pigeonVar_list[8] as String? + return ExoPlayerAudioTrackData( + groupIndex, + trackIndex, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec) + } + } + + fun toList(): List { + return listOf( + groupIndex, + trackIndex, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ) + } + + override fun equals(other: Any?): Boolean { + if (other !is ExoPlayerAudioTrackData) { + return false + } + if (this === other) { + return true + } + return MessagesPigeonUtils.deepEquals(toList(), other.toList()) + } + + override fun hashCode(): Int = toList().hashCode() +} + +/** + * Container for raw audio track data from Android ExoPlayer. + * + * Generated class from Pigeon that represents data sent in messages. + */ +data class NativeAudioTrackData( + /** ExoPlayer-based tracks */ + val exoPlayerTracks: List? = null +) { + companion object { + fun fromList(pigeonVar_list: List): NativeAudioTrackData { + val exoPlayerTracks = pigeonVar_list[0] as List? + return NativeAudioTrackData(exoPlayerTracks) + } + } + + fun toList(): List { + return listOf( + exoPlayerTracks, + ) + } + + override fun equals(other: Any?): Boolean { + if (other !is NativeAudioTrackData) { + return false + } + if (this === other) { + return true + } + return MessagesPigeonUtils.deepEquals(toList(), other.toList()) + } + + override fun hashCode(): Int = toList().hashCode() +} + private open class MessagesPigeonCodec : StandardMessageCodec() { override fun readValueOfType(type: Byte, buffer: ByteBuffer): Any? { return when (type) { @@ -345,16 +576,31 @@ private open class MessagesPigeonCodec : StandardMessageCodec() { return (readValue(buffer) as? List)?.let { IsPlayingStateEvent.fromList(it) } } 134.toByte() -> { + return (readValue(buffer) as? List)?.let { AudioTrackChangedEvent.fromList(it) } + } + 135.toByte() -> { return (readValue(buffer) as? List)?.let { PlatformVideoViewCreationParams.fromList(it) } } - 135.toByte() -> { + 136.toByte() -> { return (readValue(buffer) as? List)?.let { CreationOptions.fromList(it) } } - 136.toByte() -> { + 137.toByte() -> { return (readValue(buffer) as? List)?.let { TexturePlayerIds.fromList(it) } } + 138.toByte() -> { + return (readValue(buffer) as? List)?.let { PlaybackState.fromList(it) } + } + 139.toByte() -> { + return (readValue(buffer) as? List)?.let { AudioTrackMessage.fromList(it) } + } + 140.toByte() -> { + return (readValue(buffer) as? List)?.let { ExoPlayerAudioTrackData.fromList(it) } + } + 141.toByte() -> { + return (readValue(buffer) as? List)?.let { NativeAudioTrackData.fromList(it) } + } else -> super.readValueOfType(type, buffer) } } @@ -381,18 +627,38 @@ private open class MessagesPigeonCodec : StandardMessageCodec() { stream.write(133) writeValue(stream, value.toList()) } - is PlatformVideoViewCreationParams -> { + is AudioTrackChangedEvent -> { stream.write(134) writeValue(stream, value.toList()) } - is CreationOptions -> { + is PlatformVideoViewCreationParams -> { stream.write(135) writeValue(stream, value.toList()) } - is TexturePlayerIds -> { + is CreationOptions -> { stream.write(136) writeValue(stream, value.toList()) } + is TexturePlayerIds -> { + stream.write(137) + writeValue(stream, value.toList()) + } + is PlaybackState -> { + stream.write(138) + writeValue(stream, value.toList()) + } + is AudioTrackMessage -> { + stream.write(139) + writeValue(stream, value.toList()) + } + is ExoPlayerAudioTrackData -> { + stream.write(140) + writeValue(stream, value.toList()) + } + is NativeAudioTrackData -> { + stream.write(141) + writeValue(stream, value.toList()) + } else -> super.writeValue(stream, value) } } @@ -584,6 +850,10 @@ interface VideoPlayerInstanceApi { fun getCurrentPosition(): Long /** Returns the current buffer position, in milliseconds. */ fun getBufferedPosition(): Long + /** Gets the available audio tracks for the video. */ + fun getAudioTracks(): NativeAudioTrackData + /** Selects which audio track is chosen for playback from its [groupIndex] and [trackIndex] */ + fun selectAudioTrack(groupIndex: Long, trackIndex: Long) companion object { /** The codec used by VideoPlayerInstanceApi. */ @@ -774,6 +1044,50 @@ interface VideoPlayerInstanceApi { channel.setMessageHandler(null) } } + run { + val channel = + BasicMessageChannel( + binaryMessenger, + "dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.getAudioTracks$separatedMessageChannelSuffix", + codec) + if (api != null) { + channel.setMessageHandler { _, reply -> + val wrapped: List = + try { + listOf(api.getAudioTracks()) + } catch (exception: Throwable) { + MessagesPigeonUtils.wrapError(exception) + } + reply.reply(wrapped) + } + } else { + channel.setMessageHandler(null) + } + } + run { + val channel = + BasicMessageChannel( + binaryMessenger, + "dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.selectAudioTrack$separatedMessageChannelSuffix", + codec) + if (api != null) { + channel.setMessageHandler { message, reply -> + val args = message as List + val groupIndexArg = args[0] as Long + val trackIndexArg = args[1] as Long + val wrapped: List = + try { + api.selectAudioTrack(groupIndexArg, trackIndexArg) + listOf(null) + } catch (exception: Throwable) { + MessagesPigeonUtils.wrapError(exception) + } + reply.reply(wrapped) + } + } else { + channel.setMessageHandler(null) + } + } } } } diff --git a/packages/video_player/video_player_android/android/src/test/java/io/flutter/plugins/videoplayer/AudioTracksTest.java b/packages/video_player/video_player_android/android/src/test/java/io/flutter/plugins/videoplayer/AudioTracksTest.java new file mode 100644 index 00000000000..0152c39fc67 --- /dev/null +++ b/packages/video_player/video_player_android/android/src/test/java/io/flutter/plugins/videoplayer/AudioTracksTest.java @@ -0,0 +1,370 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package io.flutter.plugins.videoplayer; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import androidx.media3.common.C; +import androidx.media3.common.Format; +import androidx.media3.common.MediaItem; +import androidx.media3.common.Tracks; +import androidx.media3.exoplayer.ExoPlayer; +import com.google.common.collect.ImmutableList; +import io.flutter.view.TextureRegistry; +import java.lang.reflect.Field; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.robolectric.RobolectricTestRunner; + +@RunWith(RobolectricTestRunner.class) +public class AudioTracksTest { + + @Mock private ExoPlayer mockExoPlayer; + @Mock private VideoPlayerCallbacks mockVideoPlayerCallbacks; + @Mock private TextureRegistry.SurfaceProducer mockSurfaceProducer; + @Mock private MediaItem mockMediaItem; + @Mock private VideoPlayerOptions mockVideoPlayerOptions; + @Mock private Tracks mockTracks; + @Mock private Tracks.Group mockAudioGroup1; + @Mock private Tracks.Group mockAudioGroup2; + @Mock private Tracks.Group mockVideoGroup; + + private VideoPlayer videoPlayer; + + @Before + public void setUp() { + MockitoAnnotations.openMocks(this); + + // Create a concrete VideoPlayer implementation for testing + videoPlayer = + new VideoPlayer( + mockVideoPlayerCallbacks, + mockMediaItem, + mockVideoPlayerOptions, + mockSurfaceProducer, + () -> mockExoPlayer) { + @Override + protected ExoPlayerEventListener createExoPlayerEventListener( + ExoPlayer exoPlayer, TextureRegistry.SurfaceProducer surfaceProducer) { + return mock(ExoPlayerEventListener.class); + } + }; + } + + // Helper method to set the length field on a mocked Tracks.Group + private void setGroupLength(Tracks.Group group, int length) { + try { + Field lengthField = group.getClass().getDeclaredField("length"); + lengthField.setAccessible(true); + lengthField.setInt(group, length); + } catch (Exception e) { + // If reflection fails, we'll handle it in the test + throw new RuntimeException("Failed to set length field", e); + } + } + + @Test + public void testGetAudioTracks_withMultipleAudioTracks() { + // Create mock formats for audio tracks + Format audioFormat1 = + new Format.Builder() + .setId("audio_track_1") + .setLabel("English") + .setLanguage("en") + .setAverageBitrate(128000) + .setSampleRate(48000) + .setChannelCount(2) + .setCodecs("mp4a.40.2") + .build(); + + Format audioFormat2 = + new Format.Builder() + .setId("audio_track_2") + .setLabel("Español") + .setLanguage("es") + .setAverageBitrate(96000) + .setSampleRate(44100) + .setChannelCount(2) + .setCodecs("mp4a.40.2") + .build(); + + // Mock audio groups and set length field + setGroupLength(mockAudioGroup1, 1); + setGroupLength(mockAudioGroup2, 1); + + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(audioFormat1); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(true); + + when(mockAudioGroup2.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup2.getTrackFormat(0)).thenReturn(audioFormat2); + when(mockAudioGroup2.isTrackSelected(0)).thenReturn(false); + + when(mockVideoGroup.getType()).thenReturn(C.TRACK_TYPE_VIDEO); + + // Mock tracks + ImmutableList groups = + ImmutableList.of(mockAudioGroup1, mockAudioGroup2, mockVideoGroup); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(2, result.size()); + + // Verify first track + ExoPlayerAudioTrackData track1 = result.get(0); + assertEquals(0L, track1.getGroupIndex()); + assertEquals(0L, track1.getTrackIndex()); + assertEquals("English", track1.getLabel()); + assertEquals("en", track1.getLanguage()); + assertTrue(track1.isSelected()); + assertEquals(Long.valueOf(128000), track1.getBitrate()); + assertEquals(Long.valueOf(48000), track1.getSampleRate()); + assertEquals(Long.valueOf(2), track1.getChannelCount()); + assertEquals("mp4a.40.2", track1.getCodec()); + + // Verify second track + ExoPlayerAudioTrackData track2 = result.get(1); + assertEquals(1L, track2.getGroupIndex()); + assertEquals(0L, track2.getTrackIndex()); + assertEquals("Español", track2.getLabel()); + assertEquals("es", track2.getLanguage()); + assertFalse(track2.isSelected()); + assertEquals(Long.valueOf(96000), track2.getBitrate()); + assertEquals(Long.valueOf(44100), track2.getSampleRate()); + assertEquals(Long.valueOf(2), track2.getChannelCount()); + assertEquals("mp4a.40.2", track2.getCodec()); + } + + @Test + public void testGetAudioTracks_withNoAudioTracks() { + // Mock video group only (no audio tracks) + when(mockVideoGroup.getType()).thenReturn(C.TRACK_TYPE_VIDEO); + + ImmutableList groups = ImmutableList.of(mockVideoGroup); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(0, result.size()); + } + + @Test + public void testGetAudioTracks_withNullValues() { + // Create format with null/missing values + Format audioFormat = + new Format.Builder() + .setId("audio_track_null") + .setLabel(null) // Null label + .setLanguage(null) // Null language + .setAverageBitrate(Format.NO_VALUE) // No bitrate + .setSampleRate(Format.NO_VALUE) // No sample rate + .setChannelCount(Format.NO_VALUE) // No channel count + .setCodecs(null) // Null codec + .build(); + + // Mock audio group and set length field + setGroupLength(mockAudioGroup1, 1); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(audioFormat); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(false); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(1, result.size()); + + ExoPlayerAudioTrackData track = result.get(0); + assertEquals(0L, track.getGroupIndex()); + assertEquals(0L, track.getTrackIndex()); + assertNull(track.getLabel()); // Null values should be preserved + assertNull(track.getLanguage()); // Null values should be preserved + assertFalse(track.isSelected()); + assertNull(track.getBitrate()); + assertNull(track.getSampleRate()); + assertNull(track.getChannelCount()); + assertNull(track.getCodec()); + } + + @Test + public void testGetAudioTracks_withMultipleTracksInSameGroup() { + // Create format for group with multiple tracks + Format audioFormat1 = + new Format.Builder() + .setId("audio_track_1") + .setLabel("Track 1") + .setLanguage("en") + .setAverageBitrate(128000) + .build(); + + Format audioFormat2 = + new Format.Builder() + .setId("audio_track_2") + .setLabel("Track 2") + .setLanguage("en") + .setAverageBitrate(192000) + .build(); + + // Mock audio group with multiple tracks + setGroupLength(mockAudioGroup1, 2); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(audioFormat1); + when(mockAudioGroup1.getTrackFormat(1)).thenReturn(audioFormat2); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(true); + when(mockAudioGroup1.isTrackSelected(1)).thenReturn(false); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(2, result.size()); + + // Verify track indices are correct + ExoPlayerAudioTrackData track1 = result.get(0); + ExoPlayerAudioTrackData track2 = result.get(1); + assertEquals(0L, track1.getGroupIndex()); + assertEquals(0L, track1.getTrackIndex()); + assertEquals(0L, track2.getGroupIndex()); + assertEquals(1L, track2.getTrackIndex()); + // Tracks have same group but different track indices + assertEquals(track1.getGroupIndex(), track2.getGroupIndex()); + assertNotEquals(track1.getTrackIndex(), track2.getTrackIndex()); + } + + @Test + public void testGetAudioTracks_withDifferentCodecs() { + // Test various codec formats + Format aacFormat = new Format.Builder().setCodecs("mp4a.40.2").setLabel("AAC Track").build(); + + Format ac3Format = new Format.Builder().setCodecs("ac-3").setLabel("AC3 Track").build(); + + Format eac3Format = new Format.Builder().setCodecs("ec-3").setLabel("EAC3 Track").build(); + + // Mock audio group with different codecs + setGroupLength(mockAudioGroup1, 3); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(aacFormat); + when(mockAudioGroup1.getTrackFormat(1)).thenReturn(ac3Format); + when(mockAudioGroup1.getTrackFormat(2)).thenReturn(eac3Format); + when(mockAudioGroup1.isTrackSelected(anyInt())).thenReturn(false); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(3, result.size()); + + assertEquals("mp4a.40.2", result.get(0).getCodec()); + assertEquals("ac-3", result.get(1).getCodec()); + assertEquals("ec-3", result.get(2).getCodec()); + } + + @Test + public void testGetAudioTracks_withHighBitrateValues() { + // Test with high bitrate values + Format highBitrateFormat = + new Format.Builder() + .setId("high_bitrate_track") + .setLabel("High Quality") + .setAverageBitrate(1536000) // 1.5 Mbps + .setSampleRate(96000) // 96 kHz + .setChannelCount(8) // 7.1 surround + .build(); + + // Mock audio group with high bitrate format + setGroupLength(mockAudioGroup1, 1); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(highBitrateFormat); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(true); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(1, result.size()); + + ExoPlayerAudioTrackData track = result.get(0); + assertEquals(Long.valueOf(1536000), track.getBitrate()); + assertEquals(Long.valueOf(96000), track.getSampleRate()); + assertEquals(Long.valueOf(8), track.getChannelCount()); + } + + @Test + public void testGetAudioTracks_performanceWithManyTracks() { + // Test performance with many audio tracks + int numGroups = 50; + List groups = new java.util.ArrayList<>(); + + for (int i = 0; i < numGroups; i++) { + Format format = + new Format.Builder().setId("track_" + i).setLabel("Track " + i).setLanguage("en").build(); + + Tracks.Group mockGroup = mock(Tracks.Group.class); + setGroupLength(mockGroup, 1); + when(mockGroup.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockGroup.getTrackFormat(0)).thenReturn(format); + when(mockGroup.isTrackSelected(0)).thenReturn(i == 0); // Only first track selected + groups.add(mockGroup); + } + + when(mockTracks.getGroups()).thenReturn(ImmutableList.copyOf(groups)); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Measure performance + long startTime = System.currentTimeMillis(); + NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + long endTime = System.currentTimeMillis(); + + // Verify results + assertNotNull(result); + assertEquals(numGroups, result.size()); + + // Should complete within reasonable time (1 second for 50 tracks) + assertTrue( + "getAudioTracks took too long: " + (endTime - startTime) + "ms", + (endTime - startTime) < 1000); + } +} diff --git a/packages/video_player/video_player_android/example/pubspec.yaml b/packages/video_player/video_player_android/example/pubspec.yaml index 4afc63d4990..07c5b497d5d 100644 --- a/packages/video_player/video_player_android/example/pubspec.yaml +++ b/packages/video_player/video_player_android/example/pubspec.yaml @@ -18,7 +18,7 @@ dependencies: # The example app is bundled with the plugin so we use a path dependency on # the parent directory to use the current plugin's version. path: ../ - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 dev_dependencies: espresso: ^0.4.0 diff --git a/packages/video_player/video_player_android/lib/src/android_video_player.dart b/packages/video_player/video_player_android/lib/src/android_video_player.dart index f65b83b8a84..fbcb38fdc31 100644 --- a/packages/video_player/video_player_android/lib/src/android_video_player.dart +++ b/packages/video_player/video_player_android/lib/src/android_video_player.dart @@ -225,6 +225,47 @@ class AndroidVideoPlayer extends VideoPlayerPlatform { return _api.setMixWithOthers(mixWithOthers); } + @override + Future> getAudioTracks(int playerId) async { + final NativeAudioTrackData nativeData = await _playerWith( + id: playerId, + ).getAudioTracks(); + final List tracks = []; + + // Convert ExoPlayer tracks to VideoAudioTrack + if (nativeData.exoPlayerTracks != null) { + for (final ExoPlayerAudioTrackData track in nativeData.exoPlayerTracks!) { + // Construct a string ID from groupIndex and trackIndex for compatibility + final String trackId = '${track.groupIndex}_${track.trackIndex}'; + tracks.add( + VideoAudioTrack( + id: trackId, + label: track.label, + language: track.language, + isSelected: track.isSelected, + bitrate: track.bitrate, + sampleRate: track.sampleRate, + channelCount: track.channelCount, + codec: track.codec, + ), + ); + } + } + + return tracks; + } + + @override + Future selectAudioTrack(int playerId, String trackId) { + return _playerWith(id: playerId).selectAudioTrack(trackId); + } + + @override + bool isAudioTrackSupportAvailable() { + // Android with ExoPlayer supports audio track selection + return true; + } + _PlayerInstance _playerWith({required int id}) { final _PlayerInstance? player = _players[id]; return player ?? (throw StateError('No active player with ID $id.')); @@ -272,6 +313,7 @@ class _PlayerInstance { Timer? _bufferPollingTimer; int _lastBufferPosition = -1; bool _isBuffering = false; + Completer? _audioTrackSelectionCompleter; final VideoPlayerViewState viewState; @@ -307,6 +349,41 @@ class _PlayerInstance { return _eventStreamController.stream; } + Future getAudioTracks() { + return _api.getAudioTracks(); + } + + Future selectAudioTrack(String trackId) async { + // Parse the trackId to get groupIndex and trackIndex + final List parts = trackId.split('_'); + if (parts.length != 2) { + throw ArgumentError( + 'Invalid trackId format: "$trackId". Expected format: "groupIndex_trackIndex"', + ); + } + + final int groupIndex = int.parse(parts[0]); + final int trackIndex = int.parse(parts[1]); + + // Create a completer to wait for the track selection to complete + _audioTrackSelectionCompleter = Completer(); + + try { + await _api.selectAudioTrack(groupIndex, trackIndex); + + // Wait for the onTracksChanged event from ExoPlayer with a timeout + await _audioTrackSelectionCompleter!.future.timeout( + const Duration(seconds: 5), + onTimeout: () { + // If we timeout, just continue - the track may still have been selected + // This is a fallback in case the event doesn't arrive for some reason + }, + ); + } finally { + _audioTrackSelectionCompleter = null; + } + } + Future dispose() async { _isDisposed = true; _bufferPollingTimer?.cancel(); @@ -403,6 +480,13 @@ class _PlayerInstance { if (event.state != PlatformPlaybackState.buffering) { _setBuffering(false); } + case AudioTrackChangedEvent _: + // Complete the audio track selection completer if it exists + // This signals that the track selection has completed + if (_audioTrackSelectionCompleter != null && + !_audioTrackSelectionCompleter!.isCompleted) { + _audioTrackSelectionCompleter!.complete(); + } } } diff --git a/packages/video_player/video_player_android/lib/src/messages.g.dart b/packages/video_player/video_player_android/lib/src/messages.g.dart index 5674729aeb1..75bce2b2cb1 100644 --- a/packages/video_player/video_player_android/lib/src/messages.g.dart +++ b/packages/video_player/video_player_android/lib/src/messages.g.dart @@ -178,6 +178,46 @@ class IsPlayingStateEvent extends PlatformVideoEvent { int get hashCode => Object.hashAll(_toList()); } +/// Sent when audio tracks change. +/// +/// This includes when the selected audio track changes after calling selectAudioTrack. +/// Corresponds to ExoPlayer's onTracksChanged. +class AudioTrackChangedEvent extends PlatformVideoEvent { + AudioTrackChangedEvent({this.selectedTrackId}); + + /// The ID of the newly selected audio track, if any. + String? selectedTrackId; + + List _toList() { + return [selectedTrackId]; + } + + Object encode() { + return _toList(); + } + + static AudioTrackChangedEvent decode(Object result) { + result as List; + return AudioTrackChangedEvent(selectedTrackId: result[0] as String?); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AudioTrackChangedEvent || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + /// Information passed to the platform view creation. class PlatformVideoViewCreationParams { PlatformVideoViewCreationParams({required this.playerId}); @@ -307,6 +347,247 @@ class TexturePlayerIds { int get hashCode => Object.hashAll(_toList()); } +class PlaybackState { + PlaybackState({required this.playPosition, required this.bufferPosition}); + + /// The current playback position, in milliseconds. + int playPosition; + + /// The current buffer position, in milliseconds. + int bufferPosition; + + List _toList() { + return [playPosition, bufferPosition]; + } + + Object encode() { + return _toList(); + } + + static PlaybackState decode(Object result) { + result as List; + return PlaybackState( + playPosition: result[0]! as int, + bufferPosition: result[1]! as int, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! PlaybackState || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + + String label; + + String language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AudioTrackMessage decode(Object result) { + result as List; + return AudioTrackMessage( + id: result[0]! as String, + label: result[1]! as String, + language: result[2]! as String, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AudioTrackMessage || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from ExoPlayer Format objects. +class ExoPlayerAudioTrackData { + ExoPlayerAudioTrackData({ + required this.groupIndex, + required this.trackIndex, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int groupIndex; + + int trackIndex; + + String? label; + + String? language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + groupIndex, + trackIndex, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static ExoPlayerAudioTrackData decode(Object result) { + result as List; + return ExoPlayerAudioTrackData( + groupIndex: result[0]! as int, + trackIndex: result[1]! as int, + label: result[2] as String?, + language: result[3] as String?, + isSelected: result[4]! as bool, + bitrate: result[5] as int?, + sampleRate: result[6] as int?, + channelCount: result[7] as int?, + codec: result[8] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! ExoPlayerAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Container for raw audio track data from Android ExoPlayer. +class NativeAudioTrackData { + NativeAudioTrackData({this.exoPlayerTracks}); + + /// ExoPlayer-based tracks + List? exoPlayerTracks; + + List _toList() { + return [exoPlayerTracks]; + } + + Object encode() { + return _toList(); + } + + static NativeAudioTrackData decode(Object result) { + result as List; + return NativeAudioTrackData( + exoPlayerTracks: (result[0] as List?) + ?.cast(), + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! NativeAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -329,15 +610,30 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is IsPlayingStateEvent) { buffer.putUint8(133); writeValue(buffer, value.encode()); - } else if (value is PlatformVideoViewCreationParams) { + } else if (value is AudioTrackChangedEvent) { buffer.putUint8(134); writeValue(buffer, value.encode()); - } else if (value is CreationOptions) { + } else if (value is PlatformVideoViewCreationParams) { buffer.putUint8(135); writeValue(buffer, value.encode()); - } else if (value is TexturePlayerIds) { + } else if (value is CreationOptions) { buffer.putUint8(136); writeValue(buffer, value.encode()); + } else if (value is TexturePlayerIds) { + buffer.putUint8(137); + writeValue(buffer, value.encode()); + } else if (value is PlaybackState) { + buffer.putUint8(138); + writeValue(buffer, value.encode()); + } else if (value is AudioTrackMessage) { + buffer.putUint8(139); + writeValue(buffer, value.encode()); + } else if (value is ExoPlayerAudioTrackData) { + buffer.putUint8(140); + writeValue(buffer, value.encode()); + } else if (value is NativeAudioTrackData) { + buffer.putUint8(141); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -359,11 +655,21 @@ class _PigeonCodec extends StandardMessageCodec { case 133: return IsPlayingStateEvent.decode(readValue(buffer)!); case 134: - return PlatformVideoViewCreationParams.decode(readValue(buffer)!); + return AudioTrackChangedEvent.decode(readValue(buffer)!); case 135: - return CreationOptions.decode(readValue(buffer)!); + return PlatformVideoViewCreationParams.decode(readValue(buffer)!); case 136: + return CreationOptions.decode(readValue(buffer)!); + case 137: return TexturePlayerIds.decode(readValue(buffer)!); + case 138: + return PlaybackState.decode(readValue(buffer)!); + case 139: + return AudioTrackMessage.decode(readValue(buffer)!); + case 140: + return ExoPlayerAudioTrackData.decode(readValue(buffer)!); + case 141: + return NativeAudioTrackData.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -809,6 +1115,65 @@ class VideoPlayerInstanceApi { return (pigeonVar_replyList[0] as int?)!; } } + + /// Gets the available audio tracks for the video. + Future getAudioTracks() async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.getAudioTracks$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as NativeAudioTrackData?)!; + } + } + + /// Selects which audio track is chosen for playback from its [groupIndex] and [trackIndex] + Future selectAudioTrack(int groupIndex, int trackIndex) async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.selectAudioTrack$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [groupIndex, trackIndex], + ); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } } Stream videoEvents({String instanceName = ''}) { diff --git a/packages/video_player/video_player_android/pigeons/messages.dart b/packages/video_player/video_player_android/pigeons/messages.dart index 6fee5973760..8666b074969 100644 --- a/packages/video_player/video_player_android/pigeons/messages.dart +++ b/packages/video_player/video_player_android/pigeons/messages.dart @@ -51,6 +51,15 @@ class IsPlayingStateEvent extends PlatformVideoEvent { late final bool isPlaying; } +/// Sent when audio tracks change. +/// +/// This includes when the selected audio track changes after calling selectAudioTrack. +/// Corresponds to ExoPlayer's onTracksChanged. +class AudioTrackChangedEvent extends PlatformVideoEvent { + /// The ID of the newly selected audio track, if any. + late final String? selectedTrackId; +} + /// Information passed to the platform view creation. class PlatformVideoViewCreationParams { const PlatformVideoViewCreationParams({required this.playerId}); @@ -73,6 +82,72 @@ class TexturePlayerIds { final int textureId; } +class PlaybackState { + PlaybackState({required this.playPosition, required this.bufferPosition}); + + /// The current playback position, in milliseconds. + final int playPosition; + + /// The current buffer position, in milliseconds. + final int bufferPosition; +} + +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + String label; + String language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from ExoPlayer Format objects. +class ExoPlayerAudioTrackData { + ExoPlayerAudioTrackData({ + required this.groupIndex, + required this.trackIndex, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int groupIndex; + int trackIndex; + String? label; + String? language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Container for raw audio track data from Android ExoPlayer. +class NativeAudioTrackData { + NativeAudioTrackData({this.exoPlayerTracks}); + + /// ExoPlayer-based tracks + List? exoPlayerTracks; +} + @HostApi() abstract class AndroidVideoPlayerApi { void initialize(); @@ -111,6 +186,12 @@ abstract class VideoPlayerInstanceApi { /// Returns the current buffer position, in milliseconds. int getBufferedPosition(); + + /// Gets the available audio tracks for the video. + NativeAudioTrackData getAudioTracks(); + + /// Selects which audio track is chosen for playback from its [groupIndex] and [trackIndex] + void selectAudioTrack(int groupIndex, int trackIndex); } @EventChannelApi() diff --git a/packages/video_player/video_player_android/pubspec.yaml b/packages/video_player/video_player_android/pubspec.yaml index 8c996569854..c3c7f4648be 100644 --- a/packages/video_player/video_player_android/pubspec.yaml +++ b/packages/video_player/video_player_android/pubspec.yaml @@ -20,7 +20,7 @@ flutter: dependencies: flutter: sdk: flutter - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 dev_dependencies: build_runner: ^2.3.3 diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index 5af78861509..a7f235e9beb 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,3 +1,8 @@ +## 2.9.0 + +* Implements `getAudioTracks()` and `selectAudioTrack()` methods. +* Updates minimum supported SDK version to Flutter 3.29/Dart 3.7. + ## 2.8.5 * Updates minimum supported version to iOS 13 and macOS 10.15. diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m index fc3716e427e..7923e5e3a8f 100644 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m @@ -1024,4 +1024,362 @@ - (nonnull AVPlayerItem *)playerItemWithURL:(NSURL *)url { return [AVPlayerItem playerItemWithAsset:[AVURLAsset URLAssetWithURL:url options:nil]]; } +#pragma mark - Audio Track Tests + +- (void)testGetAudioTracksWithRegularAssetTracks { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock asset tracks + id mockTrack1 = OCMClassMock([AVAssetTrack class]); + id mockTrack2 = OCMClassMock([AVAssetTrack class]); + + // Configure track 1 + OCMStub([mockTrack1 trackID]).andReturn(1); + OCMStub([mockTrack1 languageCode]).andReturn(@"en"); + OCMStub([mockTrack1 estimatedDataRate]).andReturn(128000.0f); + + // Configure track 2 + OCMStub([mockTrack2 trackID]).andReturn(2); + OCMStub([mockTrack2 languageCode]).andReturn(@"es"); + OCMStub([mockTrack2 estimatedDataRate]).andReturn(96000.0f); + + // Mock empty format descriptions to avoid Core Media crashes in test environment + OCMStub([mockTrack1 formatDescriptions]).andReturn(@[]); + OCMStub([mockTrack2 formatDescriptions]).andReturn(@[]); + + // Mock the asset to return our tracks + NSArray *mockTracks = @[ mockTrack1, mockTrack2 ]; + OCMStub([mockAsset tracksWithMediaType:AVMediaTypeAudio]).andReturn(mockTracks); + + // Mock no media selection group (regular asset) + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + XCTAssertEqual(result.assetTracks.count, 2); + + // Verify first track + FVPAssetAudioTrackData *track1 = result.assetTracks[0]; + XCTAssertEqual(track1.trackId, 1); + XCTAssertEqualObjects(track1.language, @"en"); + XCTAssertTrue(track1.isSelected); // First track should be selected + XCTAssertEqualObjects(track1.bitrate, @128000); + + // Verify second track + FVPAssetAudioTrackData *track2 = result.assetTracks[1]; + XCTAssertEqual(track2.trackId, 2); + XCTAssertEqualObjects(track2.language, @"es"); + XCTAssertFalse(track2.isSelected); // Second track should not be selected + XCTAssertEqualObjects(track2.bitrate, @96000); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithMediaSelectionOptions { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock media selection group and options + id mockMediaSelectionGroup = OCMClassMock([AVMediaSelectionGroup class]); + id mockOption1 = OCMClassMock([AVMediaSelectionOption class]); + id mockOption2 = OCMClassMock([AVMediaSelectionOption class]); + + // Configure option 1 + OCMStub([mockOption1 displayName]).andReturn(@"English"); + id mockLocale1 = OCMClassMock([NSLocale class]); + OCMStub([mockLocale1 languageCode]).andReturn(@"en"); + OCMStub([mockOption1 locale]).andReturn(mockLocale1); + + // Configure option 2 + OCMStub([mockOption2 displayName]).andReturn(@"Español"); + id mockLocale2 = OCMClassMock([NSLocale class]); + OCMStub([mockLocale2 languageCode]).andReturn(@"es"); + OCMStub([mockOption2 locale]).andReturn(mockLocale2); + + // Mock metadata for option 1 + id mockMetadataItem = OCMClassMock([AVMetadataItem class]); + OCMStub([mockMetadataItem commonKey]).andReturn(AVMetadataCommonKeyTitle); + OCMStub([mockMetadataItem stringValue]).andReturn(@"English Audio Track"); + OCMStub([mockOption1 commonMetadata]).andReturn(@[ mockMetadataItem ]); + + // Configure media selection group + NSArray *options = @[ mockOption1, mockOption2 ]; + OCMStub([(AVMediaSelectionGroup *)mockMediaSelectionGroup options]).andReturn(options); + OCMStub([[(AVMediaSelectionGroup *)mockMediaSelectionGroup options] count]).andReturn(2); + + // Mock the asset to return media selection group + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(mockMediaSelectionGroup); + + // Mock current selection for both iOS 11+ and older versions + id mockCurrentMediaSelection = OCMClassMock([AVMediaSelection class]); + OCMStub([mockPlayerItem currentMediaSelection]).andReturn(mockCurrentMediaSelection); + OCMStub( + [mockCurrentMediaSelection selectedMediaOptionInMediaSelectionGroup:mockMediaSelectionGroup]) + .andReturn(mockOption1); + + // Also mock the deprecated method for iOS < 11 + OCMStub([mockPlayerItem selectedMediaOptionInMediaSelectionGroup:mockMediaSelectionGroup]) + .andReturn(mockOption1); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNil(result.assetTracks); + XCTAssertNotNil(result.mediaSelectionTracks); + XCTAssertEqual(result.mediaSelectionTracks.count, 2); + + // Verify first option + FVPMediaSelectionAudioTrackData *option1Data = result.mediaSelectionTracks[0]; + XCTAssertEqual(option1Data.index, 0); + XCTAssertEqualObjects(option1Data.displayName, @"English"); + XCTAssertEqualObjects(option1Data.languageCode, @"en"); + XCTAssertTrue(option1Data.isSelected); + XCTAssertEqualObjects(option1Data.commonMetadataTitle, @"English Audio Track"); + + // Verify second option + FVPMediaSelectionAudioTrackData *option2Data = result.mediaSelectionTracks[1]; + XCTAssertEqual(option2Data.index, 1); + XCTAssertEqualObjects(option2Data.displayName, @"Español"); + XCTAssertEqualObjects(option2Data.languageCode, @"es"); + XCTAssertFalse(option2Data.isSelected); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithNoCurrentItem { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Mock player with no current item + OCMStub([mockPlayer currentItem]).andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithNoAsset { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Mock player item with no asset + OCMStub([mockPlayerItem asset]).andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksCodecDetection { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock asset track with format description + id mockTrack = OCMClassMock([AVAssetTrack class]); + OCMStub([mockTrack trackID]).andReturn(1); + OCMStub([mockTrack languageCode]).andReturn(@"en"); + + // Mock empty format descriptions to avoid Core Media crashes in test environment + OCMStub([mockTrack formatDescriptions]).andReturn(@[]); + + // Mock the asset + OCMStub([mockAsset tracksWithMediaType:AVMediaTypeAudio]).andReturn(@[ mockTrack ]); + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.assetTracks); + XCTAssertEqual(result.assetTracks.count, 1); + + FVPAssetAudioTrackData *track = result.assetTracks[0]; + XCTAssertEqual(track.trackId, 1); + XCTAssertEqualObjects(track.language, @"en"); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithEmptyMediaSelectionOptions { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock media selection group with no options + id mockMediaSelectionGroup = OCMClassMock([AVMediaSelectionGroup class]); + OCMStub([(AVMediaSelectionGroup *)mockMediaSelectionGroup options]).andReturn(@[]); + OCMStub([[(AVMediaSelectionGroup *)mockMediaSelectionGroup options] count]).andReturn(0); + + // Mock the asset + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(mockMediaSelectionGroup); + OCMStub([mockAsset tracksWithMediaType:AVMediaTypeAudio]).andReturn(@[]); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results - should fall back to asset tracks + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + XCTAssertEqual(result.assetTracks.count, 0); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithNilMediaSelectionOption { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock media selection group with nil option + id mockMediaSelectionGroup = OCMClassMock([AVMediaSelectionGroup class]); + NSArray *options = @[ [NSNull null] ]; // Simulate nil option + OCMStub([(AVMediaSelectionGroup *)mockMediaSelectionGroup options]).andReturn(options); + OCMStub([[(AVMediaSelectionGroup *)mockMediaSelectionGroup options] count]).andReturn(1); + + // Mock the asset + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(mockMediaSelectionGroup); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results - should handle nil option gracefully + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.mediaSelectionTracks); + XCTAssertEqual(result.mediaSelectionTracks.count, 0); // Should skip nil options + + [player disposeWithError:&error]; +} + @end diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m index ea1084b9dd8..f35ce5215eb 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m @@ -73,6 +73,8 @@ static void FVPRemoveKeyValueObservers(NSObject *observer, @implementation FVPVideoPlayer { // Whether or not player and player item listeners have ever been registered. BOOL _listenersRegistered; + // Cached media selection options for audio tracks (HLS streams) + NSArray *_cachedAudioSelectionOptions; } - (instancetype)initWithPlayerItem:(AVPlayerItem *)item @@ -152,6 +154,9 @@ - (void)disposeWithError:(FlutterError *_Nullable *_Nonnull)error { FVPRemoveKeyValueObservers(self, FVPGetPlayerObservations(), self.player); } + // Clear cached audio selection options + _cachedAudioSelectionOptions = nil; + [self.player replaceCurrentItemWithPlayerItem:nil]; if (_onDisposed) { @@ -466,6 +471,216 @@ - (void)setPlaybackSpeed:(double)speed error:(FlutterError *_Nullable *_Nonnull) [self updatePlayingState]; } +- (nullable FVPNativeAudioTrackData *)getAudioTracks:(FlutterError *_Nullable *_Nonnull)error { + AVPlayerItem *currentItem = _player.currentItem; + if (!currentItem || !currentItem.asset) { + return [FVPNativeAudioTrackData makeWithAssetTracks:nil mediaSelectionTracks:nil]; + } + + AVAsset *asset = currentItem.asset; + + // First, try to get tracks from media selection (for HLS streams) + AVMediaSelectionGroup *audioGroup = + [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + if (audioGroup && audioGroup.options.count > 0) { + // Cache the options array for later use in selectAudioTrack + _cachedAudioSelectionOptions = audioGroup.options; + + NSMutableArray *mediaSelectionTracks = + [[NSMutableArray alloc] init]; + AVMediaSelectionOption *currentSelection = nil; + if (@available(iOS 11.0, macOS 10.13, *)) { + AVMediaSelection *mediaSelection = currentItem.currentMediaSelection; + currentSelection = [mediaSelection selectedMediaOptionInMediaSelectionGroup:audioGroup]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + currentSelection = [currentItem selectedMediaOptionInMediaSelectionGroup:audioGroup]; +#pragma clang diagnostic pop + } + + for (NSInteger i = 0; i < audioGroup.options.count; i++) { + AVMediaSelectionOption *option = audioGroup.options[i]; + + // Skip nil options + if (!option || [option isKindOfClass:[NSNull class]]) { + continue; + } + + NSString *displayName = option.displayName; + + NSString *languageCode = nil; + if (option.locale) { + languageCode = option.locale.languageCode; + } + + NSString *commonMetadataTitle = nil; + NSArray *titleItems = + [AVMetadataItem metadataItemsFromArray:option.commonMetadata + withKey:AVMetadataCommonKeyTitle + keySpace:AVMetadataKeySpaceCommon]; + if (titleItems.count > 0 && titleItems.firstObject.stringValue) { + commonMetadataTitle = titleItems.firstObject.stringValue; + } + + BOOL isSelected = (currentSelection == option) || [currentSelection isEqual:option]; + + FVPMediaSelectionAudioTrackData *trackData = + [FVPMediaSelectionAudioTrackData makeWithIndex:i + displayName:displayName + languageCode:languageCode + isSelected:isSelected + commonMetadataTitle:commonMetadataTitle]; + + [mediaSelectionTracks addObject:trackData]; + } + + // Always return media selection tracks when there's a media selection group + // even if all options were nil/invalid (empty array) + return [FVPNativeAudioTrackData makeWithAssetTracks:nil + mediaSelectionTracks:mediaSelectionTracks]; + } + + // If no media selection group or empty, try to get tracks from AVAsset (for regular video files) + NSArray *assetAudioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; + NSMutableArray *assetTracks = [[NSMutableArray alloc] init]; + + for (NSInteger i = 0; i < assetAudioTracks.count; i++) { + AVAssetTrack *track = assetAudioTracks[i]; + + // Extract metadata from the track + NSString *language = nil; + NSString *label = nil; + + // Try to get language from track + if ([track.languageCode length] > 0) { + language = track.languageCode; + } + + // Try to get label from metadata + for (AVMetadataItem *item in track.commonMetadata) { + if ([item.commonKey isEqualToString:AVMetadataCommonKeyTitle] && item.stringValue) { + label = item.stringValue; + break; + } + } + + // Extract format information + NSNumber *bitrate = nil; + NSNumber *sampleRate = nil; + NSNumber *channelCount = nil; + NSString *codec = nil; + + // Attempt format description parsing + if (track.formatDescriptions.count > 0) { + @try { + id formatDescObj = track.formatDescriptions[0]; + + // Validate that we have a valid format description object + if (formatDescObj && [formatDescObj respondsToSelector:@selector(self)]) { + NSString *className = NSStringFromClass([formatDescObj class]); + + // Only process objects that are clearly Core Media format descriptions + if ([className hasPrefix:@"CMAudioFormatDescription"] || + [className hasPrefix:@"CMVideoFormatDescription"] || + [className hasPrefix:@"CMFormatDescription"]) { + CMFormatDescriptionRef formatDesc = (__bridge CMFormatDescriptionRef)formatDescObj; + + // Validate the format description reference before using Core Media APIs + if (formatDesc && CFGetTypeID(formatDesc) == CMFormatDescriptionGetTypeID()) { + // Get audio stream basic description + const AudioStreamBasicDescription *audioDesc = + CMAudioFormatDescriptionGetStreamBasicDescription(formatDesc); + if (audioDesc) { + if (audioDesc->mSampleRate > 0) { + sampleRate = @((NSInteger)audioDesc->mSampleRate); + } + if (audioDesc->mChannelsPerFrame > 0) { + channelCount = @(audioDesc->mChannelsPerFrame); + } + } + + // Try to get codec information + FourCharCode codecType = CMFormatDescriptionGetMediaSubType(formatDesc); + switch (codecType) { + case kAudioFormatMPEG4AAC: + codec = @"aac"; + break; + case kAudioFormatAC3: + codec = @"ac3"; + break; + case kAudioFormatEnhancedAC3: + codec = @"eac3"; + break; + case kAudioFormatMPEGLayer3: + codec = @"mp3"; + break; + default: + codec = nil; + break; + } + } + } + } + } @catch (NSException *exception) { + // Handle any exceptions from format description parsing gracefully + // This ensures the method continues to work even with mock objects or invalid data + // In tests, this allows the method to return track data with nil format fields + } + } + + // Estimate bitrate from track + if (track.estimatedDataRate > 0) { + bitrate = @((NSInteger)track.estimatedDataRate); + } + + // For now, assume the first track is selected (we don't have easy access to current selection + // for asset tracks) + BOOL isSelected = (i == 0); + + FVPAssetAudioTrackData *trackData = [FVPAssetAudioTrackData makeWithTrackId:track.trackID + label:label + language:language + isSelected:isSelected + bitrate:bitrate + sampleRate:sampleRate + channelCount:channelCount + codec:codec]; + + [assetTracks addObject:trackData]; + } + + // Return asset tracks (even if empty), media selection tracks should be nil + return [FVPNativeAudioTrackData makeWithAssetTracks:assetTracks mediaSelectionTracks:nil]; +} + +- (void)selectAudioTrackWithType:(nonnull NSString *)trackType + trackId:(NSInteger)trackId + error:(FlutterError *_Nullable __autoreleasing *_Nonnull)error { + AVPlayerItem *currentItem = _player.currentItem; + if (!currentItem || !currentItem.asset) { + return; + } + + AVAsset *asset = currentItem.asset; + + // Check if this is a media selection track (for HLS streams) + if ([trackType isEqualToString:@"mediaSelection"]) { + // Validate that we have cached options and the trackId (index) is valid + if (_cachedAudioSelectionOptions && trackId >= 0 && + trackId < (NSInteger)_cachedAudioSelectionOptions.count) { + AVMediaSelectionOption *option = _cachedAudioSelectionOptions[trackId]; + AVMediaSelectionGroup *audioGroup = + [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + if (audioGroup) { + [currentItem selectMediaOption:option inMediaSelectionGroup:audioGroup]; + } + } + } + // For asset tracks, we don't have a direct way to select them in AVFoundation + // This would require more complex track selection logic that's not commonly used +} + #pragma mark - Private - (int64_t)duration { diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h index becb97700e9..59934546c28 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h @@ -16,6 +16,10 @@ NS_ASSUME_NONNULL_BEGIN @class FVPPlatformVideoViewCreationParams; @class FVPCreationOptions; @class FVPTexturePlayerIds; +@class FVPAudioTrackMessage; +@class FVPAssetAudioTrackData; +@class FVPMediaSelectionAudioTrackData; +@class FVPNativeAudioTrackData; /// Information passed to the platform view creation. @interface FVPPlatformVideoViewCreationParams : NSObject @@ -42,6 +46,78 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, assign) NSInteger textureId; @end +/// Represents an audio track in a video. +@interface FVPAudioTrackMessage : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithId:(NSString *)id + label:(NSString *)label + language:(NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec; +@property(nonatomic, copy) NSString *id; +@property(nonatomic, copy) NSString *label; +@property(nonatomic, copy) NSString *language; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, strong, nullable) NSNumber *bitrate; +@property(nonatomic, strong, nullable) NSNumber *sampleRate; +@property(nonatomic, strong, nullable) NSNumber *channelCount; +@property(nonatomic, copy, nullable) NSString *codec; +@end + +/// Raw audio track data from AVAssetTrack (for regular assets). +@interface FVPAssetAudioTrackData : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithTrackId:(NSInteger)trackId + label:(nullable NSString *)label + language:(nullable NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec; +@property(nonatomic, assign) NSInteger trackId; +@property(nonatomic, copy, nullable) NSString *label; +@property(nonatomic, copy, nullable) NSString *language; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, strong, nullable) NSNumber *bitrate; +@property(nonatomic, strong, nullable) NSNumber *sampleRate; +@property(nonatomic, strong, nullable) NSNumber *channelCount; +@property(nonatomic, copy, nullable) NSString *codec; +@end + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +@interface FVPMediaSelectionAudioTrackData : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithIndex:(NSInteger)index + displayName:(nullable NSString *)displayName + languageCode:(nullable NSString *)languageCode + isSelected:(BOOL)isSelected + commonMetadataTitle:(nullable NSString *)commonMetadataTitle; +@property(nonatomic, assign) NSInteger index; +@property(nonatomic, copy, nullable) NSString *displayName; +@property(nonatomic, copy, nullable) NSString *languageCode; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, copy, nullable) NSString *commonMetadataTitle; +@end + +/// Container for raw audio track data from native platforms. +@interface FVPNativeAudioTrackData : NSObject ++ (instancetype)makeWithAssetTracks:(nullable NSArray *)assetTracks + mediaSelectionTracks: + (nullable NSArray *)mediaSelectionTracks; +/// Asset-based tracks (for regular video files) +@property(nonatomic, copy, nullable) NSArray *assetTracks; +/// Media selection-based tracks (for HLS streams) +@property(nonatomic, copy, nullable) + NSArray *mediaSelectionTracks; +@end + /// The codec used by all APIs. NSObject *FVPGetMessagesCodec(void); @@ -78,6 +154,11 @@ extern void SetUpFVPAVFoundationVideoPlayerApiWithSuffix( - (void)seekTo:(NSInteger)position completion:(void (^)(FlutterError *_Nullable))completion; - (void)pauseWithError:(FlutterError *_Nullable *_Nonnull)error; - (void)disposeWithError:(FlutterError *_Nullable *_Nonnull)error; +/// @return `nil` only when `error != nil`. +- (nullable FVPNativeAudioTrackData *)getAudioTracks:(FlutterError *_Nullable *_Nonnull)error; +- (void)selectAudioTrackWithType:(NSString *)trackType + trackId:(NSInteger)trackId + error:(FlutterError *_Nullable *_Nonnull)error; @end extern void SetUpFVPVideoPlayerInstanceApi(id binaryMessenger, diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m index 5caf390b96a..b71764b5261 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m @@ -48,6 +48,30 @@ + (nullable FVPTexturePlayerIds *)nullableFromList:(NSArray *)list; - (NSArray *)toList; @end +@interface FVPAudioTrackMessage () ++ (FVPAudioTrackMessage *)fromList:(NSArray *)list; ++ (nullable FVPAudioTrackMessage *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPAssetAudioTrackData () ++ (FVPAssetAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPAssetAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPMediaSelectionAudioTrackData () ++ (FVPMediaSelectionAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPMediaSelectionAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPNativeAudioTrackData () ++ (FVPNativeAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPNativeAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + @implementation FVPPlatformVideoViewCreationParams + (instancetype)makeWithPlayerId:(NSInteger)playerId { FVPPlatformVideoViewCreationParams *pigeonResult = @@ -120,6 +144,167 @@ + (nullable FVPTexturePlayerIds *)nullableFromList:(NSArray *)list { } @end +@implementation FVPAudioTrackMessage ++ (instancetype)makeWithId:(NSString *)id + label:(NSString *)label + language:(NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec { + FVPAudioTrackMessage *pigeonResult = [[FVPAudioTrackMessage alloc] init]; + pigeonResult.id = id; + pigeonResult.label = label; + pigeonResult.language = language; + pigeonResult.isSelected = isSelected; + pigeonResult.bitrate = bitrate; + pigeonResult.sampleRate = sampleRate; + pigeonResult.channelCount = channelCount; + pigeonResult.codec = codec; + return pigeonResult; +} ++ (FVPAudioTrackMessage *)fromList:(NSArray *)list { + FVPAudioTrackMessage *pigeonResult = [[FVPAudioTrackMessage alloc] init]; + pigeonResult.id = GetNullableObjectAtIndex(list, 0); + pigeonResult.label = GetNullableObjectAtIndex(list, 1); + pigeonResult.language = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.bitrate = GetNullableObjectAtIndex(list, 4); + pigeonResult.sampleRate = GetNullableObjectAtIndex(list, 5); + pigeonResult.channelCount = GetNullableObjectAtIndex(list, 6); + pigeonResult.codec = GetNullableObjectAtIndex(list, 7); + return pigeonResult; +} ++ (nullable FVPAudioTrackMessage *)nullableFromList:(NSArray *)list { + return (list) ? [FVPAudioTrackMessage fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + self.id ?: [NSNull null], + self.label ?: [NSNull null], + self.language ?: [NSNull null], + @(self.isSelected), + self.bitrate ?: [NSNull null], + self.sampleRate ?: [NSNull null], + self.channelCount ?: [NSNull null], + self.codec ?: [NSNull null], + ]; +} +@end + +@implementation FVPAssetAudioTrackData ++ (instancetype)makeWithTrackId:(NSInteger)trackId + label:(nullable NSString *)label + language:(nullable NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec { + FVPAssetAudioTrackData *pigeonResult = [[FVPAssetAudioTrackData alloc] init]; + pigeonResult.trackId = trackId; + pigeonResult.label = label; + pigeonResult.language = language; + pigeonResult.isSelected = isSelected; + pigeonResult.bitrate = bitrate; + pigeonResult.sampleRate = sampleRate; + pigeonResult.channelCount = channelCount; + pigeonResult.codec = codec; + return pigeonResult; +} ++ (FVPAssetAudioTrackData *)fromList:(NSArray *)list { + FVPAssetAudioTrackData *pigeonResult = [[FVPAssetAudioTrackData alloc] init]; + pigeonResult.trackId = [GetNullableObjectAtIndex(list, 0) integerValue]; + pigeonResult.label = GetNullableObjectAtIndex(list, 1); + pigeonResult.language = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.bitrate = GetNullableObjectAtIndex(list, 4); + pigeonResult.sampleRate = GetNullableObjectAtIndex(list, 5); + pigeonResult.channelCount = GetNullableObjectAtIndex(list, 6); + pigeonResult.codec = GetNullableObjectAtIndex(list, 7); + return pigeonResult; +} ++ (nullable FVPAssetAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPAssetAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.trackId), + self.label ?: [NSNull null], + self.language ?: [NSNull null], + @(self.isSelected), + self.bitrate ?: [NSNull null], + self.sampleRate ?: [NSNull null], + self.channelCount ?: [NSNull null], + self.codec ?: [NSNull null], + ]; +} +@end + +@implementation FVPMediaSelectionAudioTrackData ++ (instancetype)makeWithIndex:(NSInteger)index + displayName:(nullable NSString *)displayName + languageCode:(nullable NSString *)languageCode + isSelected:(BOOL)isSelected + commonMetadataTitle:(nullable NSString *)commonMetadataTitle { + FVPMediaSelectionAudioTrackData *pigeonResult = [[FVPMediaSelectionAudioTrackData alloc] init]; + pigeonResult.index = index; + pigeonResult.displayName = displayName; + pigeonResult.languageCode = languageCode; + pigeonResult.isSelected = isSelected; + pigeonResult.commonMetadataTitle = commonMetadataTitle; + return pigeonResult; +} ++ (FVPMediaSelectionAudioTrackData *)fromList:(NSArray *)list { + FVPMediaSelectionAudioTrackData *pigeonResult = [[FVPMediaSelectionAudioTrackData alloc] init]; + pigeonResult.index = [GetNullableObjectAtIndex(list, 0) integerValue]; + pigeonResult.displayName = GetNullableObjectAtIndex(list, 1); + pigeonResult.languageCode = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.commonMetadataTitle = GetNullableObjectAtIndex(list, 4); + return pigeonResult; +} ++ (nullable FVPMediaSelectionAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPMediaSelectionAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.index), + self.displayName ?: [NSNull null], + self.languageCode ?: [NSNull null], + @(self.isSelected), + self.commonMetadataTitle ?: [NSNull null], + ]; +} +@end + +@implementation FVPNativeAudioTrackData ++ (instancetype)makeWithAssetTracks:(nullable NSArray *)assetTracks + mediaSelectionTracks: + (nullable NSArray *)mediaSelectionTracks { + FVPNativeAudioTrackData *pigeonResult = [[FVPNativeAudioTrackData alloc] init]; + pigeonResult.assetTracks = assetTracks; + pigeonResult.mediaSelectionTracks = mediaSelectionTracks; + return pigeonResult; +} ++ (FVPNativeAudioTrackData *)fromList:(NSArray *)list { + FVPNativeAudioTrackData *pigeonResult = [[FVPNativeAudioTrackData alloc] init]; + pigeonResult.assetTracks = GetNullableObjectAtIndex(list, 0); + pigeonResult.mediaSelectionTracks = GetNullableObjectAtIndex(list, 1); + return pigeonResult; +} ++ (nullable FVPNativeAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPNativeAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + self.assetTracks ?: [NSNull null], + self.mediaSelectionTracks ?: [NSNull null], + ]; +} +@end + @interface FVPMessagesPigeonCodecReader : FlutterStandardReader @end @implementation FVPMessagesPigeonCodecReader @@ -131,6 +316,14 @@ - (nullable id)readValueOfType:(UInt8)type { return [FVPCreationOptions fromList:[self readValue]]; case 131: return [FVPTexturePlayerIds fromList:[self readValue]]; + case 132: + return [FVPAudioTrackMessage fromList:[self readValue]]; + case 133: + return [FVPAssetAudioTrackData fromList:[self readValue]]; + case 134: + return [FVPMediaSelectionAudioTrackData fromList:[self readValue]]; + case 135: + return [FVPNativeAudioTrackData fromList:[self readValue]]; default: return [super readValueOfType:type]; } @@ -150,6 +343,18 @@ - (void)writeValue:(id)value { } else if ([value isKindOfClass:[FVPTexturePlayerIds class]]) { [self writeByte:131]; [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPAudioTrackMessage class]]) { + [self writeByte:132]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPAssetAudioTrackData class]]) { + [self writeByte:133]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPMediaSelectionAudioTrackData class]]) { + [self writeByte:134]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPNativeAudioTrackData class]]) { + [self writeByte:135]; + [self writeValue:[value toList]]; } else { [super writeValue:value]; } @@ -502,4 +707,50 @@ void SetUpFVPVideoPlayerInstanceApiWithSuffix(id binaryM [channel setMessageHandler:nil]; } } + { + FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] + initWithName:[NSString stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.video_player_avfoundation." + @"VideoPlayerInstanceApi.getAudioTracks", + messageChannelSuffix] + binaryMessenger:binaryMessenger + codec:FVPGetMessagesCodec()]; + if (api) { + NSCAssert([api respondsToSelector:@selector(getAudioTracks:)], + @"FVPVideoPlayerInstanceApi api (%@) doesn't respond to @selector(getAudioTracks:)", + api); + [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { + FlutterError *error; + FVPNativeAudioTrackData *output = [api getAudioTracks:&error]; + callback(wrapResult(output, error)); + }]; + } else { + [channel setMessageHandler:nil]; + } + } + { + FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] + initWithName:[NSString stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.video_player_avfoundation." + @"VideoPlayerInstanceApi.selectAudioTrack", + messageChannelSuffix] + binaryMessenger:binaryMessenger + codec:FVPGetMessagesCodec()]; + if (api) { + NSCAssert([api respondsToSelector:@selector(selectAudioTrackWithType:trackId:error:)], + @"FVPVideoPlayerInstanceApi api (%@) doesn't respond to " + @"@selector(selectAudioTrackWithType:trackId:error:)", + api); + [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { + NSArray *args = message; + NSString *arg_trackType = GetNullableObjectAtIndex(args, 0); + NSInteger arg_trackId = [GetNullableObjectAtIndex(args, 1) integerValue]; + FlutterError *error; + [api selectAudioTrackWithType:arg_trackType trackId:arg_trackId error:&error]; + callback(wrapResult(nil, error)); + }]; + } else { + [channel setMessageHandler:nil]; + } + } } diff --git a/packages/video_player/video_player_avfoundation/example/pubspec.yaml b/packages/video_player/video_player_avfoundation/example/pubspec.yaml index cc176e75c3f..902bf087303 100644 --- a/packages/video_player/video_player_avfoundation/example/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/example/pubspec.yaml @@ -16,7 +16,7 @@ dependencies: # The example app is bundled with the plugin so we use a path dependency on # the parent directory to use the current plugin's version. path: ../ - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 dev_dependencies: flutter_test: diff --git a/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart b/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart index 4c1719578f6..7f8c1c8bb25 100644 --- a/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart +++ b/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart @@ -211,6 +211,77 @@ class AVFoundationVideoPlayer extends VideoPlayerPlatform { return _api.setMixWithOthers(mixWithOthers); } + @override + Future> getAudioTracks(int playerId) async { + final NativeAudioTrackData nativeData = await _playerWith( + id: playerId, + ).getAudioTracks(); + final List tracks = []; + + // Convert asset tracks to VideoAudioTrack + if (nativeData.assetTracks != null) { + for (final AssetAudioTrackData track in nativeData.assetTracks!) { + tracks.add( + VideoAudioTrack( + id: track.trackId.toString(), + label: track.label, + language: track.language, + isSelected: track.isSelected, + bitrate: track.bitrate, + sampleRate: track.sampleRate, + channelCount: track.channelCount, + codec: track.codec, + ), + ); + } + } + + // Convert media selection tracks to VideoAudioTrack (for HLS streams) + if (nativeData.mediaSelectionTracks != null) { + for (final MediaSelectionAudioTrackData track + in nativeData.mediaSelectionTracks!) { + final String trackId = 'media_selection_${track.index}'; + final String? label = track.commonMetadataTitle ?? track.displayName; + tracks.add( + VideoAudioTrack( + id: trackId, + label: label, + language: track.languageCode, + isSelected: track.isSelected, + ), + ); + } + } + + return tracks; + } + + @override + Future selectAudioTrack(int playerId, String trackId) { + // Parse the trackId to determine type and extract the integer ID + String trackType; + int numericTrackId; + + if (trackId.startsWith('media_selection_')) { + trackType = 'mediaSelection'; + numericTrackId = int.parse(trackId.substring('media_selection_'.length)); + } else { + // Asset track - the trackId is just the integer as a string + trackType = 'asset'; + numericTrackId = int.parse(trackId); + } + + return _playerWith( + id: playerId, + ).selectAudioTrack(trackType, numericTrackId); + } + + @override + bool isAudioTrackSupportAvailable() { + // iOS/macOS with AVFoundation supports audio track selection + return true; + } + @override Widget buildView(int playerId) { return buildViewWithOptions(VideoViewOptions(playerId: playerId)); diff --git a/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart b/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart index 82958bf9ece..dcdc75b6c2f 100644 --- a/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart +++ b/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart @@ -154,6 +154,267 @@ class TexturePlayerIds { int get hashCode => Object.hashAll(_toList()); } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + + String label; + + String language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AudioTrackMessage decode(Object result) { + result as List; + return AudioTrackMessage( + id: result[0]! as String, + label: result[1]! as String, + language: result[2]! as String, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AudioTrackMessage || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from AVAssetTrack (for regular assets). +class AssetAudioTrackData { + AssetAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int trackId; + + String? label; + + String? language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + trackId, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AssetAudioTrackData decode(Object result) { + result as List; + return AssetAudioTrackData( + trackId: result[0]! as int, + label: result[1] as String?, + language: result[2] as String?, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AssetAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +class MediaSelectionAudioTrackData { + MediaSelectionAudioTrackData({ + required this.index, + this.displayName, + this.languageCode, + required this.isSelected, + this.commonMetadataTitle, + }); + + int index; + + String? displayName; + + String? languageCode; + + bool isSelected; + + String? commonMetadataTitle; + + List _toList() { + return [ + index, + displayName, + languageCode, + isSelected, + commonMetadataTitle, + ]; + } + + Object encode() { + return _toList(); + } + + static MediaSelectionAudioTrackData decode(Object result) { + result as List; + return MediaSelectionAudioTrackData( + index: result[0]! as int, + displayName: result[1] as String?, + languageCode: result[2] as String?, + isSelected: result[3]! as bool, + commonMetadataTitle: result[4] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! MediaSelectionAudioTrackData || + other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Container for raw audio track data from native platforms. +class NativeAudioTrackData { + NativeAudioTrackData({this.assetTracks, this.mediaSelectionTracks}); + + /// Asset-based tracks (for regular video files) + List? assetTracks; + + /// Media selection-based tracks (for HLS streams) + List? mediaSelectionTracks; + + List _toList() { + return [assetTracks, mediaSelectionTracks]; + } + + Object encode() { + return _toList(); + } + + static NativeAudioTrackData decode(Object result) { + result as List; + return NativeAudioTrackData( + assetTracks: (result[0] as List?)?.cast(), + mediaSelectionTracks: (result[1] as List?) + ?.cast(), + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! NativeAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -170,6 +431,18 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is TexturePlayerIds) { buffer.putUint8(131); writeValue(buffer, value.encode()); + } else if (value is AudioTrackMessage) { + buffer.putUint8(132); + writeValue(buffer, value.encode()); + } else if (value is AssetAudioTrackData) { + buffer.putUint8(133); + writeValue(buffer, value.encode()); + } else if (value is MediaSelectionAudioTrackData) { + buffer.putUint8(134); + writeValue(buffer, value.encode()); + } else if (value is NativeAudioTrackData) { + buffer.putUint8(135); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -184,6 +457,14 @@ class _PigeonCodec extends StandardMessageCodec { return CreationOptions.decode(readValue(buffer)!); case 131: return TexturePlayerIds.decode(readValue(buffer)!); + case 132: + return AudioTrackMessage.decode(readValue(buffer)!); + case 133: + return AssetAudioTrackData.decode(readValue(buffer)!); + case 134: + return MediaSelectionAudioTrackData.decode(readValue(buffer)!); + case 135: + return NativeAudioTrackData.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -582,4 +863,61 @@ class VideoPlayerInstanceApi { return; } } + + Future getAudioTracks() async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_avfoundation.VideoPlayerInstanceApi.getAudioTracks$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as NativeAudioTrackData?)!; + } + } + + Future selectAudioTrack(String trackType, int trackId) async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_avfoundation.VideoPlayerInstanceApi.selectAudioTrack$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [trackType, trackId], + ); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } } diff --git a/packages/video_player/video_player_avfoundation/pigeons/messages.dart b/packages/video_player/video_player_avfoundation/pigeons/messages.dart index 6e872dec145..6f5137af409 100644 --- a/packages/video_player/video_player_avfoundation/pigeons/messages.dart +++ b/packages/video_player/video_player_avfoundation/pigeons/messages.dart @@ -39,6 +39,80 @@ class TexturePlayerIds { final int textureId; } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + String label; + String language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from AVAssetTrack (for regular assets). +class AssetAudioTrackData { + AssetAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int trackId; + String? label; + String? language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +class MediaSelectionAudioTrackData { + MediaSelectionAudioTrackData({ + required this.index, + this.displayName, + this.languageCode, + required this.isSelected, + this.commonMetadataTitle, + }); + + int index; + String? displayName; + String? languageCode; + bool isSelected; + String? commonMetadataTitle; +} + +/// Container for raw audio track data from native platforms. +class NativeAudioTrackData { + NativeAudioTrackData({this.assetTracks, this.mediaSelectionTracks}); + + /// Asset-based tracks (for regular video files) + List? assetTracks; + + /// Media selection-based tracks (for HLS streams) + List? mediaSelectionTracks; +} + @HostApi() abstract class AVFoundationVideoPlayerApi { @ObjCSelector('initialize') @@ -72,4 +146,8 @@ abstract class VideoPlayerInstanceApi { void seekTo(int position); void pause(); void dispose(); + @ObjCSelector('getAudioTracks') + NativeAudioTrackData getAudioTracks(); + @ObjCSelector('selectAudioTrackWithType:trackId:') + void selectAudioTrack(String trackType, int trackId); } diff --git a/packages/video_player/video_player_avfoundation/pubspec.yaml b/packages/video_player/video_player_avfoundation/pubspec.yaml index ee4bd6a25b5..7e6491669fb 100644 --- a/packages/video_player/video_player_avfoundation/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/pubspec.yaml @@ -24,7 +24,7 @@ flutter: dependencies: flutter: sdk: flutter - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 dev_dependencies: build_runner: ^2.3.3 diff --git a/packages/video_player/video_player_web/example/pubspec.yaml b/packages/video_player/video_player_web/example/pubspec.yaml index e3bce694990..40fbda18526 100644 --- a/packages/video_player/video_player_web/example/pubspec.yaml +++ b/packages/video_player/video_player_web/example/pubspec.yaml @@ -8,7 +8,7 @@ environment: dependencies: flutter: sdk: flutter - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 video_player_web: path: ../ web: ^1.0.0 diff --git a/packages/video_player/video_player_web/lib/video_player_web.dart b/packages/video_player/video_player_web/lib/video_player_web.dart index a7061b6652a..1398c7dfd8d 100644 --- a/packages/video_player/video_player_web/lib/video_player_web.dart +++ b/packages/video_player/video_player_web/lib/video_player_web.dart @@ -170,4 +170,22 @@ class VideoPlayerPlugin extends VideoPlayerPlatform { /// Sets the audio mode to mix with other sources (ignored). @override Future setMixWithOthers(bool mixWithOthers) => Future.value(); + + @override + Future> getAudioTracks(int playerId) async { + // Not supported in web platform currently + throw UnimplementedError('getAudioTracks() is not supported on web'); + } + + @override + Future selectAudioTrack(int playerId, String trackId) async { + // Not supported in web platform currently + throw UnimplementedError('selectAudioTrack() is not supported on web'); + } + + @override + bool isAudioTrackSupportAvailable() { + // Not supported in web platform currently + return false; + } } diff --git a/packages/video_player/video_player_web/pubspec.yaml b/packages/video_player/video_player_web/pubspec.yaml index ca36ffe35ee..9b39c264332 100644 --- a/packages/video_player/video_player_web/pubspec.yaml +++ b/packages/video_player/video_player_web/pubspec.yaml @@ -21,7 +21,7 @@ dependencies: sdk: flutter flutter_web_plugins: sdk: flutter - video_player_platform_interface: ^6.4.0 + video_player_platform_interface: ^6.6.0 web: ">=0.5.1 <2.0.0" dev_dependencies: