diff --git a/packages/video_player/video_player/CHANGELOG.md b/packages/video_player/video_player/CHANGELOG.md index d01c0ec1d9a..bb45657f9ee 100644 --- a/packages/video_player/video_player/CHANGELOG.md +++ b/packages/video_player/video_player/CHANGELOG.md @@ -1,5 +1,6 @@ ## NEXT +* Adds `getAudioTracks()` and `selectAudioTrack()` methods to retrieve and select available audio tracks. * Updates minimum supported SDK version to Flutter 3.29/Dart 3.7. ## 2.10.0 diff --git a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj index 2ab10fb9081..cb65513e549 100644 --- a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj @@ -140,6 +140,7 @@ 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 40E43985C26639614BC3B419 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -221,6 +222,23 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; + 40E43985C26639614BC3B419 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; diff --git a/packages/video_player/video_player/example/lib/audio_tracks_demo.dart b/packages/video_player/video_player/example/lib/audio_tracks_demo.dart new file mode 100644 index 00000000000..152082e31c4 --- /dev/null +++ b/packages/video_player/video_player/example/lib/audio_tracks_demo.dart @@ -0,0 +1,326 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import 'package:flutter/material.dart'; +import 'package:video_player/video_player.dart'; + +/// A demo page that showcases audio track functionality. +class AudioTracksDemo extends StatefulWidget { + /// Creates an AudioTracksDemo widget. + const AudioTracksDemo({super.key}); + + @override + State createState() => _AudioTracksDemoState(); +} + +class _AudioTracksDemoState extends State { + VideoPlayerController? _controller; + List _audioTracks = []; + bool _isLoading = false; + String? _error; + + // Sample video URLs with multiple audio tracks + final List _sampleVideos = [ + 'https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4', + 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8', + // Add HLS stream with multiple audio tracks if available + 'https://devstreaming-cdn.apple.com/videos/streaming/examples/img_bipbop_adv_example_fmp4/master.m3u8', + ]; + + int _selectedVideoIndex = 0; + + @override + void initState() { + super.initState(); + _initializeVideo(); + } + + Future _initializeVideo() async { + setState(() { + _isLoading = true; + _error = null; + }); + + try { + await _controller?.dispose(); + + _controller = VideoPlayerController.networkUrl( + Uri.parse(_sampleVideos[_selectedVideoIndex]), + ); + + await _controller!.initialize(); + + // Get audio tracks after initialization + await _loadAudioTracks(); + + setState(() { + _isLoading = false; + }); + } catch (e) { + setState(() { + _error = 'Failed to initialize video: $e'; + _isLoading = false; + }); + } + } + + Future _loadAudioTracks() async { + if (_controller == null || !_controller!.value.isInitialized) { + return; + } + + try { + final List tracks = await _controller!.getAudioTracks(); + setState(() { + _audioTracks = tracks; + }); + } catch (e) { + setState(() { + _error = 'Failed to load audio tracks: $e'; + }); + } + } + + Future _selectAudioTrack(String trackId) async { + if (_controller == null) { + return; + } + + try { + await _controller!.selectAudioTrack(trackId); + + // Add a small delay to allow ExoPlayer to process the track selection change + // This is needed because ExoPlayer's track selection update is asynchronous + await Future.delayed(const Duration(milliseconds: 100)); + + // Reload tracks to update selection status + await _loadAudioTracks(); + + if (!mounted) { + return; + } + ScaffoldMessenger.of( + context, + ).showSnackBar(SnackBar(content: Text('Selected audio track: $trackId'))); + } catch (e) { + if (!mounted) { + return; + } + ScaffoldMessenger.of(context).showSnackBar( + SnackBar(content: Text('Failed to select audio track: $e')), + ); + } + } + + @override + void dispose() { + _controller?.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Audio Tracks Demo'), + backgroundColor: Theme.of(context).colorScheme.inversePrimary, + ), + body: Column( + children: [ + // Video selection dropdown + Padding( + padding: const EdgeInsets.all(16.0), + child: DropdownButtonFormField( + value: _selectedVideoIndex, + decoration: const InputDecoration( + labelText: 'Select Video', + border: OutlineInputBorder(), + ), + items: + _sampleVideos.asMap().entries.map((MapEntry entry) { + return DropdownMenuItem( + value: entry.key, + child: Text('Video ${entry.key + 1}'), + ); + }).toList(), + onChanged: (int? value) { + if (value != null && value != _selectedVideoIndex) { + setState(() { + _selectedVideoIndex = value; + }); + _initializeVideo(); + } + }, + ), + ), + + // Video player + Expanded( + flex: 2, + child: ColoredBox(color: Colors.black, child: _buildVideoPlayer()), + ), + + // Audio tracks list + Expanded(flex: 3, child: _buildAudioTracksList()), + ], + ), + floatingActionButton: FloatingActionButton( + onPressed: _loadAudioTracks, + tooltip: 'Refresh Audio Tracks', + child: const Icon(Icons.refresh), + ), + ); + } + + Widget _buildVideoPlayer() { + if (_isLoading) { + return const Center(child: CircularProgressIndicator()); + } + + if (_error != null) { + return Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Icon(Icons.error, size: 48, color: Colors.red[300]), + const SizedBox(height: 16), + Text( + _error!, + style: const TextStyle(color: Colors.white), + textAlign: TextAlign.center, + ), + const SizedBox(height: 16), + ElevatedButton( + onPressed: _initializeVideo, + child: const Text('Retry'), + ), + ], + ), + ); + } + + if (_controller?.value.isInitialized ?? false) { + return Stack( + alignment: Alignment.center, + children: [ + AspectRatio( + aspectRatio: _controller!.value.aspectRatio, + child: VideoPlayer(_controller!), + ), + _buildPlayPauseButton(), + ], + ); + } + + return const Center( + child: Text('No video loaded', style: TextStyle(color: Colors.white)), + ); + } + + Widget _buildPlayPauseButton() { + return Container( + decoration: BoxDecoration( + color: Colors.black54, + borderRadius: BorderRadius.circular(30), + ), + child: IconButton( + iconSize: 48, + color: Colors.white, + onPressed: () { + if (_controller!.value.isPlaying) { + _controller!.pause(); + } else { + _controller!.play(); + } + setState(() {}); + }, + icon: Icon( + _controller!.value.isPlaying ? Icons.pause : Icons.play_arrow, + ), + ), + ); + } + + Widget _buildAudioTracksList() { + return Container( + padding: const EdgeInsets.all(16.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + const Icon(Icons.audiotrack), + const SizedBox(width: 8), + Text( + 'Audio Tracks (${_audioTracks.length})', + style: Theme.of(context).textTheme.headlineSmall, + ), + ], + ), + const SizedBox(height: 16), + + if (_audioTracks.isEmpty) + const Expanded( + child: Center( + child: Text( + 'No audio tracks available.\nTry loading a video with multiple audio tracks.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 16, color: Colors.grey), + ), + ), + ) + else + Expanded( + child: ListView.builder( + itemCount: _audioTracks.length, + itemBuilder: (BuildContext context, int index) { + final VideoAudioTrack track = _audioTracks[index]; + return _buildAudioTrackTile(track); + }, + ), + ), + ], + ), + ); + } + + Widget _buildAudioTrackTile(VideoAudioTrack track) { + return Card( + margin: const EdgeInsets.only(bottom: 8.0), + child: ListTile( + leading: CircleAvatar( + backgroundColor: track.isSelected ? Colors.green : Colors.grey, + child: Icon( + track.isSelected ? Icons.check : Icons.audiotrack, + color: Colors.white, + ), + ), + title: Text( + track.label.isNotEmpty ? track.label : 'Track ${track.id}', + style: TextStyle( + fontWeight: track.isSelected ? FontWeight.bold : FontWeight.normal, + ), + ), + subtitle: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text('ID: ${track.id}'), + Text('Language: ${track.language}'), + if (track.codec != null) Text('Codec: ${track.codec}'), + if (track.bitrate != null) Text('Bitrate: ${track.bitrate} bps'), + if (track.sampleRate != null) + Text('Sample Rate: ${track.sampleRate} Hz'), + if (track.channelCount != null) + Text('Channels: ${track.channelCount}'), + ], + ), + trailing: + track.isSelected + ? const Icon(Icons.radio_button_checked, color: Colors.green) + : const Icon(Icons.radio_button_unchecked), + onTap: track.isSelected ? null : () => _selectAudioTrack(track.id), + ), + ); + } +} diff --git a/packages/video_player/video_player/example/lib/main.dart b/packages/video_player/video_player/example/lib/main.dart index eb86d32ad61..21286f2fa7b 100644 --- a/packages/video_player/video_player/example/lib/main.dart +++ b/packages/video_player/video_player/example/lib/main.dart @@ -11,6 +11,8 @@ library; import 'package:flutter/material.dart'; import 'package:video_player/video_player.dart'; +import 'audio_tracks_demo.dart'; + void main() { runApp(MaterialApp(home: _App())); } @@ -37,6 +39,19 @@ class _App extends StatelessWidget { ); }, ), + IconButton( + key: const ValueKey('audio_tracks_demo'), + icon: const Icon(Icons.audiotrack), + tooltip: 'Audio Tracks Demo', + onPressed: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => const AudioTracksDemo(), + ), + ); + }, + ), ], bottom: const TabBar( isScrollable: true, diff --git a/packages/video_player/video_player/example/pubspec.yaml b/packages/video_player/video_player/example/pubspec.yaml index 6c990c8b34f..20580717a9f 100644 --- a/packages/video_player/video_player/example/pubspec.yaml +++ b/packages/video_player/video_player/example/pubspec.yaml @@ -35,3 +35,9 @@ flutter: - assets/bumble_bee_captions.srt - assets/bumble_bee_captions.vtt - assets/Audio.mp3 +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_android: {path: ../../../../packages/video_player/video_player_android} + video_player_avfoundation: {path: ../../../../packages/video_player/video_player_avfoundation} + video_player_platform_interface: {path: ../../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player/lib/video_player.dart b/packages/video_player/video_player/lib/video_player.dart index 46eff91f316..0b577664603 100644 --- a/packages/video_player/video_player/lib/video_player.dart +++ b/packages/video_player/video_player/lib/video_player.dart @@ -16,6 +16,7 @@ export 'package:video_player_platform_interface/video_player_platform_interface. show DataSourceType, DurationRange, + VideoAudioTrack, VideoFormat, VideoPlayerOptions, VideoPlayerWebOptions, @@ -819,6 +820,37 @@ class VideoPlayerController extends ValueNotifier { } } + /// Gets the available audio tracks for the video. + /// + /// Returns a list of [VideoAudioTrack] objects containing metadata about + /// each available audio track. The list may be empty if no audio tracks + /// are available or if the video is not initialized. + /// + /// Throws an exception if the video player is disposed. + Future> getAudioTracks() async { + if (_isDisposed) { + throw Exception('VideoPlayerController is disposed'); + } + if (!value.isInitialized) { + return []; + } + return _videoPlayerPlatform.getAudioTracks(_playerId); + } + + /// Selects an audio track by its ID. + /// + /// The [trackId] should match the ID of one of the tracks returned by + /// [getAudioTracks]. If the track ID is not found or invalid, the + /// platform may ignore the request or throw an exception. + /// + /// Throws an exception if the video player is disposed or not initialized. + Future selectAudioTrack(String trackId) async { + if (_isDisposedOrNotInitialized) { + throw Exception('VideoPlayerController is disposed or not initialized'); + } + await _videoPlayerPlatform.selectAudioTrack(_playerId, trackId); + } + bool get _isDisposedOrNotInitialized => _isDisposed || !value.isInitialized; } diff --git a/packages/video_player/video_player/pubspec.yaml b/packages/video_player/video_player/pubspec.yaml index c8863f632ff..7569c8310ee 100644 --- a/packages/video_player/video_player/pubspec.yaml +++ b/packages/video_player/video_player/pubspec.yaml @@ -38,3 +38,9 @@ dev_dependencies: topics: - video - video-player +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_android: {path: ../../../packages/video_player/video_player_android} + video_player_avfoundation: {path: ../../../packages/video_player/video_player_avfoundation} + video_player_platform_interface: {path: ../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player/test/video_player_test.dart b/packages/video_player/video_player/test/video_player_test.dart index c4bd4a573bf..0dda8ab9321 100644 --- a/packages/video_player/video_player/test/video_player_test.dart +++ b/packages/video_player/video_player/test/video_player_test.dart @@ -84,6 +84,43 @@ class FakeController extends ValueNotifier Future setClosedCaptionFile( Future? closedCaptionFile, ) async {} + + @override + Future> getAudioTracks() async { + return [ + const VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ), + const VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ), + const VideoAudioTrack( + id: 'track_3', + label: 'French', + language: 'fr', + isSelected: false, + bitrate: 96000, + ), + ]; + } + + @override + Future selectAudioTrack(String trackId) async { + // Store the selected track ID for verification in tests + selectedAudioTrackId = trackId; + } + + String? selectedAudioTrackId; } Future _loadClosedCaption() async => @@ -769,6 +806,197 @@ void main() { }); }); + group('audio tracks', () { + test('getAudioTracks returns list of tracks', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + final List tracks = await controller.getAudioTracks(); + + expect(tracks.length, 3); + expect(tracks[0].id, 'track_1'); + expect(tracks[0].label, 'English'); + expect(tracks[0].language, 'en'); + expect(tracks[0].isSelected, true); + expect(tracks[0].bitrate, null); + expect(tracks[0].sampleRate, null); + expect(tracks[0].channelCount, null); + expect(tracks[0].codec, null); + + expect(tracks[1].id, 'track_2'); + expect(tracks[1].label, 'Spanish'); + expect(tracks[1].language, 'es'); + expect(tracks[1].isSelected, false); + expect(tracks[1].bitrate, 128000); + expect(tracks[1].sampleRate, 44100); + expect(tracks[1].channelCount, 2); + expect(tracks[1].codec, 'aac'); + + expect(tracks[2].id, 'track_3'); + expect(tracks[2].label, 'French'); + expect(tracks[2].language, 'fr'); + expect(tracks[2].isSelected, false); + expect(tracks[2].bitrate, 96000); + expect(tracks[2].sampleRate, null); + expect(tracks[2].channelCount, null); + expect(tracks[2].codec, null); + }); + + test('getAudioTracks before initialization returns empty list', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + final List tracks = await controller.getAudioTracks(); + expect(tracks, isEmpty); + }); + + test('selectAudioTrack works with valid track ID', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + await controller.selectAudioTrack('track_2'); + + // Verify the platform recorded the selection + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_2', + ); + }); + + test('selectAudioTrack before initialization throws', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + expect( + () => controller.selectAudioTrack('track_1'), + throwsA(isA()), + ); + }); + + test('selectAudioTrack with empty track ID', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + await controller.selectAudioTrack(''); + + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + '', + ); + }); + + test('multiple track selections update correctly', () async { + final VideoPlayerController controller = + VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + + await controller.selectAudioTrack('track_1'); + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_1', + ); + + await controller.selectAudioTrack('track_3'); + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_3', + ); + }); + }); + + group('VideoAudioTrack', () { + test('equality works correctly', () { + const VideoAudioTrack track1 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const VideoAudioTrack track2 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const VideoAudioTrack track3 = VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + ); + + expect(track1, equals(track2)); + expect(track1, isNot(equals(track3))); + }); + + test('hashCode works correctly', () { + const VideoAudioTrack track1 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const VideoAudioTrack track2 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + expect(track1.hashCode, equals(track2.hashCode)); + }); + + test('toString works correctly', () { + const VideoAudioTrack track = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ); + + final String trackString = track.toString(); + expect(trackString, contains('track_1')); + expect(trackString, contains('English')); + expect(trackString, contains('en')); + expect(trackString, contains('true')); + expect(trackString, contains('128000')); + expect(trackString, contains('44100')); + expect(trackString, contains('2')); + expect(trackString, contains('aac')); + }); + + test('optional fields can be null', () { + const VideoAudioTrack track = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + expect(track.bitrate, null); + expect(track.sampleRate, null); + expect(track.channelCount, null); + expect(track.codec, null); + }); + }); + group('caption', () { test('works when position updates', () async { final VideoPlayerController controller = @@ -1587,4 +1815,42 @@ class FakeVideoPlayerPlatform extends VideoPlayerPlatform { calls.add('setWebOptions'); webOptions[playerId] = options; } + + @override + Future> getAudioTracks(int playerId) async { + calls.add('getAudioTracks'); + return [ + const VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ), + const VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ), + const VideoAudioTrack( + id: 'track_3', + label: 'French', + language: 'fr', + isSelected: false, + bitrate: 96000, + ), + ]; + } + + @override + Future selectAudioTrack(int playerId, String trackId) async { + calls.add('selectAudioTrack'); + selectedAudioTrackIds[playerId] = trackId; + } + + final Map selectedAudioTrackIds = {}; } diff --git a/packages/video_player/video_player_android/CHANGELOG.md b/packages/video_player/video_player_android/CHANGELOG.md index 52d6dca4366..934e66034b2 100644 --- a/packages/video_player/video_player_android/CHANGELOG.md +++ b/packages/video_player/video_player_android/CHANGELOG.md @@ -1,3 +1,7 @@ +## NEXT + +* Implements `getAudioTracks()` and `selectAudioTrack()` methods for Android using ExoPlayer. + ## 2.8.13 * Bumps com.android.tools.build:gradle to 8.12.1. diff --git a/packages/video_player/video_player_android/android/build.gradle b/packages/video_player/video_player_android/android/build.gradle index 899ad562a8b..5e79785c660 100644 --- a/packages/video_player/video_player_android/android/build.gradle +++ b/packages/video_player/video_player_android/android/build.gradle @@ -23,7 +23,7 @@ apply plugin: 'com.android.library' android { namespace 'io.flutter.plugins.videoplayer' - compileSdk = flutter.compileSdkVersion + compileSdk = 35 defaultConfig { minSdkVersion 21 diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/Messages.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/Messages.java index ffd89e6137e..b576d2f493a 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/Messages.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/Messages.java @@ -21,6 +21,7 @@ import java.lang.annotation.Target; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Objects; @@ -418,6 +419,551 @@ ArrayList toList() { } } + /** + * Represents an audio track in a video. + * + *

Generated class from Pigeon that represents data sent in messages. + */ + public static final class AudioTrackMessage { + private @NonNull String id; + + public @NonNull String getId() { + return id; + } + + public void setId(@NonNull String setterArg) { + if (setterArg == null) { + throw new IllegalStateException("Nonnull field \"id\" is null."); + } + this.id = setterArg; + } + + private @NonNull String label; + + public @NonNull String getLabel() { + return label; + } + + public void setLabel(@NonNull String setterArg) { + if (setterArg == null) { + throw new IllegalStateException("Nonnull field \"label\" is null."); + } + this.label = setterArg; + } + + private @NonNull String language; + + public @NonNull String getLanguage() { + return language; + } + + public void setLanguage(@NonNull String setterArg) { + if (setterArg == null) { + throw new IllegalStateException("Nonnull field \"language\" is null."); + } + this.language = setterArg; + } + + private @NonNull Boolean isSelected; + + public @NonNull Boolean getIsSelected() { + return isSelected; + } + + public void setIsSelected(@NonNull Boolean setterArg) { + if (setterArg == null) { + throw new IllegalStateException("Nonnull field \"isSelected\" is null."); + } + this.isSelected = setterArg; + } + + private @Nullable Long bitrate; + + public @Nullable Long getBitrate() { + return bitrate; + } + + public void setBitrate(@Nullable Long setterArg) { + this.bitrate = setterArg; + } + + private @Nullable Long sampleRate; + + public @Nullable Long getSampleRate() { + return sampleRate; + } + + public void setSampleRate(@Nullable Long setterArg) { + this.sampleRate = setterArg; + } + + private @Nullable Long channelCount; + + public @Nullable Long getChannelCount() { + return channelCount; + } + + public void setChannelCount(@Nullable Long setterArg) { + this.channelCount = setterArg; + } + + private @Nullable String codec; + + public @Nullable String getCodec() { + return codec; + } + + public void setCodec(@Nullable String setterArg) { + this.codec = setterArg; + } + + /** Constructor is non-public to enforce null safety; use Builder. */ + AudioTrackMessage() {} + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + AudioTrackMessage that = (AudioTrackMessage) o; + return id.equals(that.id) + && label.equals(that.label) + && language.equals(that.language) + && isSelected.equals(that.isSelected) + && Objects.equals(bitrate, that.bitrate) + && Objects.equals(sampleRate, that.sampleRate) + && Objects.equals(channelCount, that.channelCount) + && Objects.equals(codec, that.codec); + } + + @Override + public int hashCode() { + return Objects.hash( + id, label, language, isSelected, bitrate, sampleRate, channelCount, codec); + } + + public static final class Builder { + + private @Nullable String id; + + @CanIgnoreReturnValue + public @NonNull Builder setId(@NonNull String setterArg) { + this.id = setterArg; + return this; + } + + private @Nullable String label; + + @CanIgnoreReturnValue + public @NonNull Builder setLabel(@NonNull String setterArg) { + this.label = setterArg; + return this; + } + + private @Nullable String language; + + @CanIgnoreReturnValue + public @NonNull Builder setLanguage(@NonNull String setterArg) { + this.language = setterArg; + return this; + } + + private @Nullable Boolean isSelected; + + @CanIgnoreReturnValue + public @NonNull Builder setIsSelected(@NonNull Boolean setterArg) { + this.isSelected = setterArg; + return this; + } + + private @Nullable Long bitrate; + + @CanIgnoreReturnValue + public @NonNull Builder setBitrate(@Nullable Long setterArg) { + this.bitrate = setterArg; + return this; + } + + private @Nullable Long sampleRate; + + @CanIgnoreReturnValue + public @NonNull Builder setSampleRate(@Nullable Long setterArg) { + this.sampleRate = setterArg; + return this; + } + + private @Nullable Long channelCount; + + @CanIgnoreReturnValue + public @NonNull Builder setChannelCount(@Nullable Long setterArg) { + this.channelCount = setterArg; + return this; + } + + private @Nullable String codec; + + @CanIgnoreReturnValue + public @NonNull Builder setCodec(@Nullable String setterArg) { + this.codec = setterArg; + return this; + } + + public @NonNull AudioTrackMessage build() { + AudioTrackMessage pigeonReturn = new AudioTrackMessage(); + pigeonReturn.setId(id); + pigeonReturn.setLabel(label); + pigeonReturn.setLanguage(language); + pigeonReturn.setIsSelected(isSelected); + pigeonReturn.setBitrate(bitrate); + pigeonReturn.setSampleRate(sampleRate); + pigeonReturn.setChannelCount(channelCount); + pigeonReturn.setCodec(codec); + return pigeonReturn; + } + } + + @NonNull + ArrayList toList() { + ArrayList toListResult = new ArrayList<>(8); + toListResult.add(id); + toListResult.add(label); + toListResult.add(language); + toListResult.add(isSelected); + toListResult.add(bitrate); + toListResult.add(sampleRate); + toListResult.add(channelCount); + toListResult.add(codec); + return toListResult; + } + + static @NonNull AudioTrackMessage fromList(@NonNull ArrayList pigeonVar_list) { + AudioTrackMessage pigeonResult = new AudioTrackMessage(); + Object id = pigeonVar_list.get(0); + pigeonResult.setId((String) id); + Object label = pigeonVar_list.get(1); + pigeonResult.setLabel((String) label); + Object language = pigeonVar_list.get(2); + pigeonResult.setLanguage((String) language); + Object isSelected = pigeonVar_list.get(3); + pigeonResult.setIsSelected((Boolean) isSelected); + Object bitrate = pigeonVar_list.get(4); + pigeonResult.setBitrate((Long) bitrate); + Object sampleRate = pigeonVar_list.get(5); + pigeonResult.setSampleRate((Long) sampleRate); + Object channelCount = pigeonVar_list.get(6); + pigeonResult.setChannelCount((Long) channelCount); + Object codec = pigeonVar_list.get(7); + pigeonResult.setCodec((String) codec); + return pigeonResult; + } + } + + /** + * Raw audio track data from ExoPlayer Format objects. + * + *

Generated class from Pigeon that represents data sent in messages. + */ + public static final class ExoPlayerAudioTrackData { + private @NonNull String trackId; + + public @NonNull String getTrackId() { + return trackId; + } + + public void setTrackId(@NonNull String setterArg) { + if (setterArg == null) { + throw new IllegalStateException("Nonnull field \"trackId\" is null."); + } + this.trackId = setterArg; + } + + private @Nullable String label; + + public @Nullable String getLabel() { + return label; + } + + public void setLabel(@Nullable String setterArg) { + this.label = setterArg; + } + + private @Nullable String language; + + public @Nullable String getLanguage() { + return language; + } + + public void setLanguage(@Nullable String setterArg) { + this.language = setterArg; + } + + private @NonNull Boolean isSelected; + + public @NonNull Boolean getIsSelected() { + return isSelected; + } + + public void setIsSelected(@NonNull Boolean setterArg) { + if (setterArg == null) { + throw new IllegalStateException("Nonnull field \"isSelected\" is null."); + } + this.isSelected = setterArg; + } + + private @Nullable Long bitrate; + + public @Nullable Long getBitrate() { + return bitrate; + } + + public void setBitrate(@Nullable Long setterArg) { + this.bitrate = setterArg; + } + + private @Nullable Long sampleRate; + + public @Nullable Long getSampleRate() { + return sampleRate; + } + + public void setSampleRate(@Nullable Long setterArg) { + this.sampleRate = setterArg; + } + + private @Nullable Long channelCount; + + public @Nullable Long getChannelCount() { + return channelCount; + } + + public void setChannelCount(@Nullable Long setterArg) { + this.channelCount = setterArg; + } + + private @Nullable String codec; + + public @Nullable String getCodec() { + return codec; + } + + public void setCodec(@Nullable String setterArg) { + this.codec = setterArg; + } + + /** Constructor is non-public to enforce null safety; use Builder. */ + ExoPlayerAudioTrackData() {} + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ExoPlayerAudioTrackData that = (ExoPlayerAudioTrackData) o; + return trackId.equals(that.trackId) + && Objects.equals(label, that.label) + && Objects.equals(language, that.language) + && isSelected.equals(that.isSelected) + && Objects.equals(bitrate, that.bitrate) + && Objects.equals(sampleRate, that.sampleRate) + && Objects.equals(channelCount, that.channelCount) + && Objects.equals(codec, that.codec); + } + + @Override + public int hashCode() { + return Objects.hash( + trackId, label, language, isSelected, bitrate, sampleRate, channelCount, codec); + } + + public static final class Builder { + + private @Nullable String trackId; + + @CanIgnoreReturnValue + public @NonNull Builder setTrackId(@NonNull String setterArg) { + this.trackId = setterArg; + return this; + } + + private @Nullable String label; + + @CanIgnoreReturnValue + public @NonNull Builder setLabel(@Nullable String setterArg) { + this.label = setterArg; + return this; + } + + private @Nullable String language; + + @CanIgnoreReturnValue + public @NonNull Builder setLanguage(@Nullable String setterArg) { + this.language = setterArg; + return this; + } + + private @Nullable Boolean isSelected; + + @CanIgnoreReturnValue + public @NonNull Builder setIsSelected(@NonNull Boolean setterArg) { + this.isSelected = setterArg; + return this; + } + + private @Nullable Long bitrate; + + @CanIgnoreReturnValue + public @NonNull Builder setBitrate(@Nullable Long setterArg) { + this.bitrate = setterArg; + return this; + } + + private @Nullable Long sampleRate; + + @CanIgnoreReturnValue + public @NonNull Builder setSampleRate(@Nullable Long setterArg) { + this.sampleRate = setterArg; + return this; + } + + private @Nullable Long channelCount; + + @CanIgnoreReturnValue + public @NonNull Builder setChannelCount(@Nullable Long setterArg) { + this.channelCount = setterArg; + return this; + } + + private @Nullable String codec; + + @CanIgnoreReturnValue + public @NonNull Builder setCodec(@Nullable String setterArg) { + this.codec = setterArg; + return this; + } + + public @NonNull ExoPlayerAudioTrackData build() { + ExoPlayerAudioTrackData pigeonReturn = new ExoPlayerAudioTrackData(); + pigeonReturn.setTrackId(trackId); + pigeonReturn.setLabel(label); + pigeonReturn.setLanguage(language); + pigeonReturn.setIsSelected(isSelected); + pigeonReturn.setBitrate(bitrate); + pigeonReturn.setSampleRate(sampleRate); + pigeonReturn.setChannelCount(channelCount); + pigeonReturn.setCodec(codec); + return pigeonReturn; + } + } + + @NonNull + ArrayList toList() { + ArrayList toListResult = new ArrayList<>(8); + toListResult.add(trackId); + toListResult.add(label); + toListResult.add(language); + toListResult.add(isSelected); + toListResult.add(bitrate); + toListResult.add(sampleRate); + toListResult.add(channelCount); + toListResult.add(codec); + return toListResult; + } + + static @NonNull ExoPlayerAudioTrackData fromList(@NonNull ArrayList pigeonVar_list) { + ExoPlayerAudioTrackData pigeonResult = new ExoPlayerAudioTrackData(); + Object trackId = pigeonVar_list.get(0); + pigeonResult.setTrackId((String) trackId); + Object label = pigeonVar_list.get(1); + pigeonResult.setLabel((String) label); + Object language = pigeonVar_list.get(2); + pigeonResult.setLanguage((String) language); + Object isSelected = pigeonVar_list.get(3); + pigeonResult.setIsSelected((Boolean) isSelected); + Object bitrate = pigeonVar_list.get(4); + pigeonResult.setBitrate((Long) bitrate); + Object sampleRate = pigeonVar_list.get(5); + pigeonResult.setSampleRate((Long) sampleRate); + Object channelCount = pigeonVar_list.get(6); + pigeonResult.setChannelCount((Long) channelCount); + Object codec = pigeonVar_list.get(7); + pigeonResult.setCodec((String) codec); + return pigeonResult; + } + } + + /** + * Container for raw audio track data from Android ExoPlayer. + * + *

Generated class from Pigeon that represents data sent in messages. + */ + public static final class NativeAudioTrackData { + /** ExoPlayer-based tracks */ + private @Nullable List exoPlayerTracks; + + public @Nullable List getExoPlayerTracks() { + return exoPlayerTracks; + } + + public void setExoPlayerTracks(@Nullable List setterArg) { + this.exoPlayerTracks = setterArg; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NativeAudioTrackData that = (NativeAudioTrackData) o; + return Objects.equals(exoPlayerTracks, that.exoPlayerTracks); + } + + @Override + public int hashCode() { + return Objects.hash(exoPlayerTracks); + } + + public static final class Builder { + + private @Nullable List exoPlayerTracks; + + @CanIgnoreReturnValue + public @NonNull Builder setExoPlayerTracks( + @Nullable List setterArg) { + this.exoPlayerTracks = setterArg; + return this; + } + + public @NonNull NativeAudioTrackData build() { + NativeAudioTrackData pigeonReturn = new NativeAudioTrackData(); + pigeonReturn.setExoPlayerTracks(exoPlayerTracks); + return pigeonReturn; + } + } + + @NonNull + ArrayList toList() { + ArrayList toListResult = new ArrayList<>(1); + toListResult.add(exoPlayerTracks); + return toListResult; + } + + static @NonNull NativeAudioTrackData fromList(@NonNull ArrayList pigeonVar_list) { + NativeAudioTrackData pigeonResult = new NativeAudioTrackData(); + Object exoPlayerTracks = pigeonVar_list.get(0); + pigeonResult.setExoPlayerTracks((List) exoPlayerTracks); + return pigeonResult; + } + } + private static class PigeonCodec extends StandardMessageCodec { public static final PigeonCodec INSTANCE = new PigeonCodec(); @@ -442,6 +988,12 @@ protected Object readValueOfType(byte type, @NonNull ByteBuffer buffer) { return CreateMessage.fromList((ArrayList) readValue(buffer)); case (byte) 133: return PlaybackState.fromList((ArrayList) readValue(buffer)); + case (byte) 134: + return AudioTrackMessage.fromList((ArrayList) readValue(buffer)); + case (byte) 135: + return ExoPlayerAudioTrackData.fromList((ArrayList) readValue(buffer)); + case (byte) 136: + return NativeAudioTrackData.fromList((ArrayList) readValue(buffer)); default: return super.readValueOfType(type, buffer); } @@ -464,6 +1016,15 @@ protected void writeValue(@NonNull ByteArrayOutputStream stream, Object value) { } else if (value instanceof PlaybackState) { stream.write(133); writeValue(stream, ((PlaybackState) value).toList()); + } else if (value instanceof AudioTrackMessage) { + stream.write(134); + writeValue(stream, ((AudioTrackMessage) value).toList()); + } else if (value instanceof ExoPlayerAudioTrackData) { + stream.write(135); + writeValue(stream, ((ExoPlayerAudioTrackData) value).toList()); + } else if (value instanceof NativeAudioTrackData) { + stream.write(136); + writeValue(stream, ((NativeAudioTrackData) value).toList()); } else { super.writeValue(stream, value); } @@ -651,6 +1212,11 @@ public interface VideoPlayerInstanceApi { */ @NonNull PlaybackState getPlaybackState(); + /** Gets the available audio tracks for the video. */ + @NonNull + NativeAudioTrackData getAudioTracks(); + /** Selects an audio track by its ID. */ + void selectAudioTrack(@NonNull String trackId); /** The codec used by VideoPlayerInstanceApi. */ static @NonNull MessageCodec getCodec() { @@ -839,6 +1405,54 @@ static void setUp( channel.setMessageHandler(null); } } + { + BasicMessageChannel channel = + new BasicMessageChannel<>( + binaryMessenger, + "dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.getAudioTracks" + + messageChannelSuffix, + getCodec()); + if (api != null) { + channel.setMessageHandler( + (message, reply) -> { + ArrayList wrapped = new ArrayList<>(); + try { + NativeAudioTrackData output = api.getAudioTracks(); + wrapped.add(0, output); + } catch (Throwable exception) { + wrapped = wrapError(exception); + } + reply.reply(wrapped); + }); + } else { + channel.setMessageHandler(null); + } + } + { + BasicMessageChannel channel = + new BasicMessageChannel<>( + binaryMessenger, + "dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.selectAudioTrack" + + messageChannelSuffix, + getCodec()); + if (api != null) { + channel.setMessageHandler( + (message, reply) -> { + ArrayList wrapped = new ArrayList<>(); + ArrayList args = (ArrayList) message; + String trackIdArg = (String) args.get(0); + try { + api.selectAudioTrack(trackIdArg); + wrapped.add(0, null); + } catch (Throwable exception) { + wrapped = wrapError(exception); + } + reply.reply(wrapped); + }); + } else { + channel.setMessageHandler(null); + } + } } } } diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java index 27dc9e95609..979ec26436e 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayer.java @@ -11,10 +11,18 @@ import androidx.annotation.Nullable; import androidx.media3.common.AudioAttributes; import androidx.media3.common.C; +import androidx.media3.common.Format; import androidx.media3.common.MediaItem; import androidx.media3.common.PlaybackParameters; +import androidx.media3.common.TrackGroup; +import androidx.media3.common.TrackSelectionOverride; +import androidx.media3.common.Tracks; +import androidx.media3.common.util.UnstableApi; import androidx.media3.exoplayer.ExoPlayer; +import androidx.media3.exoplayer.trackselection.DefaultTrackSelector; import io.flutter.view.TextureRegistry.SurfaceProducer; +import java.util.ArrayList; +import java.util.List; /** * A class responsible for managing video playback using {@link ExoPlayer}. @@ -26,6 +34,7 @@ public abstract class VideoPlayer implements Messages.VideoPlayerInstanceApi { @Nullable protected final SurfaceProducer surfaceProducer; @Nullable private DisposeHandler disposeHandler; @NonNull protected ExoPlayer exoPlayer; + @UnstableApi @Nullable protected DefaultTrackSelector trackSelector; /** A closure-compatible signature since {@link java.util.function.Supplier} is API level 24. */ public interface ExoPlayerProvider { @@ -43,6 +52,7 @@ public interface DisposeHandler { void onDispose(); } + @UnstableApi public VideoPlayer( @NonNull VideoPlayerCallbacks events, @NonNull MediaItem mediaItem, @@ -52,6 +62,12 @@ public VideoPlayer( this.videoPlayerEvents = events; this.surfaceProducer = surfaceProducer; exoPlayer = exoPlayerProvider.get(); + + // Try to get the track selector from the ExoPlayer if it was built with one + if (exoPlayer.getTrackSelector() instanceof DefaultTrackSelector) { + trackSelector = (DefaultTrackSelector) exoPlayer.getTrackSelector(); + } + exoPlayer.setMediaItem(mediaItem); exoPlayer.prepare(); exoPlayer.addListener(createExoPlayerEventListener(exoPlayer, surfaceProducer)); @@ -120,6 +136,91 @@ public ExoPlayer getExoPlayer() { return exoPlayer; } + @UnstableApi + @Override + public @NonNull Messages.NativeAudioTrackData getAudioTracks() { + List audioTracks = new ArrayList<>(); + + // Get the current tracks from ExoPlayer + Tracks tracks = exoPlayer.getCurrentTracks(); + + // Iterate through all track groups + for (int groupIndex = 0; groupIndex < tracks.getGroups().size(); groupIndex++) { + Tracks.Group group = tracks.getGroups().get(groupIndex); + + // Only process audio tracks + if (group.getType() == C.TRACK_TYPE_AUDIO) { + for (int trackIndex = 0; trackIndex < group.length; trackIndex++) { + Format format = group.getTrackFormat(trackIndex); + boolean isSelected = group.isTrackSelected(trackIndex); + + // Create AudioTrackMessage with metadata + Messages.ExoPlayerAudioTrackData audioTrack = + new Messages.ExoPlayerAudioTrackData.Builder() + .setTrackId(groupIndex + "_" + trackIndex) + .setLabel(format.label != null ? format.label : "Audio Track " + (trackIndex + 1)) + .setLanguage(format.language != null ? format.language : "und") + .setIsSelected(isSelected) + .setBitrate(format.bitrate != Format.NO_VALUE ? (long) format.bitrate : null) + .setSampleRate( + format.sampleRate != Format.NO_VALUE ? (long) format.sampleRate : null) + .setChannelCount( + format.channelCount != Format.NO_VALUE ? (long) format.channelCount : null) + .setCodec(format.codecs != null ? format.codecs : null) + .build(); + + audioTracks.add(audioTrack); + } + } + } + + return new Messages.NativeAudioTrackData.Builder().setExoPlayerTracks(audioTracks).build(); + } + + @UnstableApi + @Override + public void selectAudioTrack(@NonNull String trackId) { + if (trackSelector == null) { + return; + } + + try { + // Parse the trackId (format: "groupIndex_trackIndex") + String[] parts = trackId.split("_"); + if (parts.length != 2) { + return; + } + + int groupIndex = Integer.parseInt(parts[0]); + int trackIndex = Integer.parseInt(parts[1]); + + // Get current tracks + Tracks tracks = exoPlayer.getCurrentTracks(); + + if (groupIndex >= tracks.getGroups().size()) { + return; + } + + Tracks.Group group = tracks.getGroups().get(groupIndex); + + // Verify it's an audio track and the track index is valid + if (group.getType() != C.TRACK_TYPE_AUDIO || trackIndex >= group.length) { + return; + } + + // Get the track group and create a selection override + TrackGroup trackGroup = group.getMediaTrackGroup(); + TrackSelectionOverride override = new TrackSelectionOverride(trackGroup, trackIndex); + + // Apply the track selection override + trackSelector.setParameters( + trackSelector.buildUponParameters().setOverrideForType(override).build()); + + } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { + // Invalid trackId format, ignore + } + } + public void dispose() { if (disposeHandler != null) { disposeHandler.onDispose(); diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerPlugin.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerPlugin.java index 672c297c5a0..898ac4e711a 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerPlugin.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/VideoPlayerPlugin.java @@ -90,6 +90,7 @@ public void initialize() { } @Override + @androidx.media3.common.util.UnstableApi public @NonNull Long create(@NonNull CreateMessage arg) { final @NonNull String uri = arg.getUri(); final VideoAsset videoAsset; diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java index f1da6cf5b5e..8af2510cab9 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/platformview/PlatformViewVideoPlayer.java @@ -9,6 +9,7 @@ import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import androidx.media3.common.MediaItem; +import androidx.media3.common.util.UnstableApi; import androidx.media3.exoplayer.ExoPlayer; import io.flutter.plugins.videoplayer.ExoPlayerEventListener; import io.flutter.plugins.videoplayer.VideoAsset; @@ -22,6 +23,7 @@ * displaying the video in the app. */ public class PlatformViewVideoPlayer extends VideoPlayer { + @UnstableApi @VisibleForTesting public PlatformViewVideoPlayer( @NonNull VideoPlayerCallbacks events, @@ -40,6 +42,7 @@ public PlatformViewVideoPlayer( * @param options options for playback. * @return a video player instance. */ + @UnstableApi @NonNull public static PlatformViewVideoPlayer create( @NonNull Context context, @@ -51,8 +54,11 @@ public static PlatformViewVideoPlayer create( asset.getMediaItem(), options, () -> { + androidx.media3.exoplayer.trackselection.DefaultTrackSelector trackSelector = + new androidx.media3.exoplayer.trackselection.DefaultTrackSelector(context); ExoPlayer.Builder builder = new ExoPlayer.Builder(context) + .setTrackSelector(trackSelector) .setMediaSourceFactory(asset.getMediaSourceFactory(context)); return builder.build(); }); diff --git a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java index c9d18dccce4..684a95f008d 100644 --- a/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java +++ b/packages/video_player/video_player_android/android/src/main/java/io/flutter/plugins/videoplayer/texture/TextureVideoPlayer.java @@ -11,6 +11,7 @@ import androidx.annotation.RestrictTo; import androidx.annotation.VisibleForTesting; import androidx.media3.common.MediaItem; +import androidx.media3.common.util.UnstableApi; import androidx.media3.exoplayer.ExoPlayer; import io.flutter.plugins.videoplayer.ExoPlayerEventListener; import io.flutter.plugins.videoplayer.VideoAsset; @@ -39,6 +40,7 @@ public final class TextureVideoPlayer extends VideoPlayer implements SurfaceProd * @param options options for playback. * @return a video player instance. */ + @UnstableApi @NonNull public static TextureVideoPlayer create( @NonNull Context context, @@ -52,13 +54,17 @@ public static TextureVideoPlayer create( asset.getMediaItem(), options, () -> { + androidx.media3.exoplayer.trackselection.DefaultTrackSelector trackSelector = + new androidx.media3.exoplayer.trackselection.DefaultTrackSelector(context); ExoPlayer.Builder builder = new ExoPlayer.Builder(context) + .setTrackSelector(trackSelector) .setMediaSourceFactory(asset.getMediaSourceFactory(context)); return builder.build(); }); } + @UnstableApi @VisibleForTesting public TextureVideoPlayer( @NonNull VideoPlayerCallbacks events, diff --git a/packages/video_player/video_player_android/android/src/test/java/io/flutter/plugins/videoplayer/AudioTracksTest.java b/packages/video_player/video_player_android/android/src/test/java/io/flutter/plugins/videoplayer/AudioTracksTest.java new file mode 100644 index 00000000000..37fc6c94214 --- /dev/null +++ b/packages/video_player/video_player_android/android/src/test/java/io/flutter/plugins/videoplayer/AudioTracksTest.java @@ -0,0 +1,363 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package io.flutter.plugins.videoplayer; + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import androidx.media3.common.C; +import androidx.media3.common.Format; +import androidx.media3.common.MediaItem; +import androidx.media3.common.Tracks; +import androidx.media3.exoplayer.ExoPlayer; +import com.google.common.collect.ImmutableList; +import io.flutter.view.TextureRegistry; +import java.lang.reflect.Field; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.robolectric.RobolectricTestRunner; + +@RunWith(RobolectricTestRunner.class) +public class AudioTracksTest { + + @Mock private ExoPlayer mockExoPlayer; + @Mock private VideoPlayerCallbacks mockVideoPlayerCallbacks; + @Mock private TextureRegistry.SurfaceProducer mockSurfaceProducer; + @Mock private MediaItem mockMediaItem; + @Mock private VideoPlayerOptions mockVideoPlayerOptions; + @Mock private Tracks mockTracks; + @Mock private Tracks.Group mockAudioGroup1; + @Mock private Tracks.Group mockAudioGroup2; + @Mock private Tracks.Group mockVideoGroup; + + private VideoPlayer videoPlayer; + + @Before + public void setUp() { + MockitoAnnotations.openMocks(this); + + // Create a concrete VideoPlayer implementation for testing + videoPlayer = + new VideoPlayer( + mockVideoPlayerCallbacks, + mockMediaItem, + mockVideoPlayerOptions, + mockSurfaceProducer, + () -> mockExoPlayer) { + @Override + protected ExoPlayerEventListener createExoPlayerEventListener( + ExoPlayer exoPlayer, TextureRegistry.SurfaceProducer surfaceProducer) { + return mock(ExoPlayerEventListener.class); + } + }; + } + + // Helper method to set the length field on a mocked Tracks.Group + private void setGroupLength(Tracks.Group group, int length) { + try { + Field lengthField = group.getClass().getDeclaredField("length"); + lengthField.setAccessible(true); + lengthField.setInt(group, length); + } catch (Exception e) { + // If reflection fails, we'll handle it in the test + throw new RuntimeException("Failed to set length field", e); + } + } + + @Test + public void testGetAudioTracks_withMultipleAudioTracks() { + // Create mock formats for audio tracks + Format audioFormat1 = + new Format.Builder() + .setId("audio_track_1") + .setLabel("English") + .setLanguage("en") + .setAverageBitrate(128000) + .setSampleRate(48000) + .setChannelCount(2) + .setCodecs("mp4a.40.2") + .build(); + + Format audioFormat2 = + new Format.Builder() + .setId("audio_track_2") + .setLabel("Español") + .setLanguage("es") + .setAverageBitrate(96000) + .setSampleRate(44100) + .setChannelCount(2) + .setCodecs("mp4a.40.2") + .build(); + + // Mock audio groups and set length field + setGroupLength(mockAudioGroup1, 1); + setGroupLength(mockAudioGroup2, 1); + + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(audioFormat1); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(true); + + when(mockAudioGroup2.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup2.getTrackFormat(0)).thenReturn(audioFormat2); + when(mockAudioGroup2.isTrackSelected(0)).thenReturn(false); + + when(mockVideoGroup.getType()).thenReturn(C.TRACK_TYPE_VIDEO); + + // Mock tracks + ImmutableList groups = + ImmutableList.of(mockAudioGroup1, mockAudioGroup2, mockVideoGroup); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(2, result.size()); + + // Verify first track + Messages.ExoPlayerAudioTrackData track1 = result.get(0); + assertEquals("0_0", track1.getTrackId()); + assertEquals("English", track1.getLabel()); + assertEquals("en", track1.getLanguage()); + assertTrue(track1.getIsSelected()); + assertEquals(Long.valueOf(128000), track1.getBitrate()); + assertEquals(Long.valueOf(48000), track1.getSampleRate()); + assertEquals(Long.valueOf(2), track1.getChannelCount()); + assertEquals("mp4a.40.2", track1.getCodec()); + + // Verify second track + Messages.ExoPlayerAudioTrackData track2 = result.get(1); + assertEquals("1_0", track2.getTrackId()); + assertEquals("Español", track2.getLabel()); + assertEquals("es", track2.getLanguage()); + assertFalse(track2.getIsSelected()); + assertEquals(Long.valueOf(96000), track2.getBitrate()); + assertEquals(Long.valueOf(44100), track2.getSampleRate()); + assertEquals(Long.valueOf(2), track2.getChannelCount()); + assertEquals("mp4a.40.2", track2.getCodec()); + } + + @Test + public void testGetAudioTracks_withNoAudioTracks() { + // Mock video group only (no audio tracks) + when(mockVideoGroup.getType()).thenReturn(C.TRACK_TYPE_VIDEO); + + ImmutableList groups = ImmutableList.of(mockVideoGroup); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(0, result.size()); + } + + @Test + public void testGetAudioTracks_withNullValues() { + // Create format with null/missing values + Format audioFormat = + new Format.Builder() + .setId("audio_track_null") + .setLabel(null) // Null label + .setLanguage(null) // Null language + .setAverageBitrate(Format.NO_VALUE) // No bitrate + .setSampleRate(Format.NO_VALUE) // No sample rate + .setChannelCount(Format.NO_VALUE) // No channel count + .setCodecs(null) // Null codec + .build(); + + // Mock audio group and set length field + setGroupLength(mockAudioGroup1, 1); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(audioFormat); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(false); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(1, result.size()); + + Messages.ExoPlayerAudioTrackData track = result.get(0); + assertEquals("0_0", track.getTrackId()); + assertEquals("Audio Track 1", track.getLabel()); // Fallback label + assertEquals("und", track.getLanguage()); // Fallback language + assertFalse(track.getIsSelected()); + assertNull(track.getBitrate()); + assertNull(track.getSampleRate()); + assertNull(track.getChannelCount()); + assertNull(track.getCodec()); + } + + @Test + public void testGetAudioTracks_withMultipleTracksInSameGroup() { + // Create format for group with multiple tracks + Format audioFormat1 = + new Format.Builder() + .setId("audio_track_1") + .setLabel("Track 1") + .setLanguage("en") + .setAverageBitrate(128000) + .build(); + + Format audioFormat2 = + new Format.Builder() + .setId("audio_track_2") + .setLabel("Track 2") + .setLanguage("en") + .setAverageBitrate(192000) + .build(); + + // Mock audio group with multiple tracks + setGroupLength(mockAudioGroup1, 2); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(audioFormat1); + when(mockAudioGroup1.getTrackFormat(1)).thenReturn(audioFormat2); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(true); + when(mockAudioGroup1.isTrackSelected(1)).thenReturn(false); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(2, result.size()); + + // Verify track IDs are unique + Messages.ExoPlayerAudioTrackData track1 = result.get(0); + Messages.ExoPlayerAudioTrackData track2 = result.get(1); + assertEquals("0_0", track1.getTrackId()); + assertEquals("0_1", track2.getTrackId()); + assertNotEquals(track1.getTrackId(), track2.getTrackId()); + } + + @Test + public void testGetAudioTracks_withDifferentCodecs() { + // Test various codec formats + Format aacFormat = new Format.Builder().setCodecs("mp4a.40.2").setLabel("AAC Track").build(); + + Format ac3Format = new Format.Builder().setCodecs("ac-3").setLabel("AC3 Track").build(); + + Format eac3Format = new Format.Builder().setCodecs("ec-3").setLabel("EAC3 Track").build(); + + // Mock audio group with different codecs + setGroupLength(mockAudioGroup1, 3); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(aacFormat); + when(mockAudioGroup1.getTrackFormat(1)).thenReturn(ac3Format); + when(mockAudioGroup1.getTrackFormat(2)).thenReturn(eac3Format); + when(mockAudioGroup1.isTrackSelected(anyInt())).thenReturn(false); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(3, result.size()); + + assertEquals("mp4a.40.2", result.get(0).getCodec()); + assertEquals("ac-3", result.get(1).getCodec()); + assertEquals("ec-3", result.get(2).getCodec()); + } + + @Test + public void testGetAudioTracks_withHighBitrateValues() { + // Test with high bitrate values + Format highBitrateFormat = + new Format.Builder() + .setId("high_bitrate_track") + .setLabel("High Quality") + .setAverageBitrate(1536000) // 1.5 Mbps + .setSampleRate(96000) // 96 kHz + .setChannelCount(8) // 7.1 surround + .build(); + + // Mock audio group with high bitrate format + setGroupLength(mockAudioGroup1, 1); + when(mockAudioGroup1.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockAudioGroup1.getTrackFormat(0)).thenReturn(highBitrateFormat); + when(mockAudioGroup1.isTrackSelected(0)).thenReturn(true); + + ImmutableList groups = ImmutableList.of(mockAudioGroup1); + when(mockTracks.getGroups()).thenReturn(groups); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Test the method + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + + // Verify results + assertNotNull(result); + assertEquals(1, result.size()); + + Messages.ExoPlayerAudioTrackData track = result.get(0); + assertEquals(Long.valueOf(1536000), track.getBitrate()); + assertEquals(Long.valueOf(96000), track.getSampleRate()); + assertEquals(Long.valueOf(8), track.getChannelCount()); + } + + @Test + public void testGetAudioTracks_performanceWithManyTracks() { + // Test performance with many audio tracks + int numGroups = 50; + List groups = new java.util.ArrayList<>(); + + for (int i = 0; i < numGroups; i++) { + Format format = + new Format.Builder().setId("track_" + i).setLabel("Track " + i).setLanguage("en").build(); + + Tracks.Group mockGroup = mock(Tracks.Group.class); + setGroupLength(mockGroup, 1); + when(mockGroup.getType()).thenReturn(C.TRACK_TYPE_AUDIO); + when(mockGroup.getTrackFormat(0)).thenReturn(format); + when(mockGroup.isTrackSelected(0)).thenReturn(i == 0); // Only first track selected + groups.add(mockGroup); + } + + when(mockTracks.getGroups()).thenReturn(ImmutableList.copyOf(groups)); + when(mockExoPlayer.getCurrentTracks()).thenReturn(mockTracks); + + // Measure performance + long startTime = System.currentTimeMillis(); + Messages.NativeAudioTrackData nativeData = videoPlayer.getAudioTracks(); + List result = nativeData.getExoPlayerTracks(); + long endTime = System.currentTimeMillis(); + + // Verify results + assertNotNull(result); + assertEquals(numGroups, result.size()); + + // Should complete within reasonable time (1 second for 50 tracks) + assertTrue( + "getAudioTracks took too long: " + (endTime - startTime) + "ms", + (endTime - startTime) < 1000); + } +} diff --git a/packages/video_player/video_player_android/example/pubspec.yaml b/packages/video_player/video_player_android/example/pubspec.yaml index 286f6b89e69..fb2e387c7ee 100644 --- a/packages/video_player/video_player_android/example/pubspec.yaml +++ b/packages/video_player/video_player_android/example/pubspec.yaml @@ -34,3 +34,7 @@ flutter: assets: - assets/flutter-mark-square-64.png - assets/Butterfly-209.mp4 +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_platform_interface: {path: ../../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player_android/lib/src/android_video_player.dart b/packages/video_player/video_player_android/lib/src/android_video_player.dart index a3f147c31de..7ea0ead7a0c 100644 --- a/packages/video_player/video_player_android/lib/src/android_video_player.dart +++ b/packages/video_player/video_player_android/lib/src/android_video_player.dart @@ -213,6 +213,38 @@ class AndroidVideoPlayer extends VideoPlayerPlatform { return _api.setMixWithOthers(mixWithOthers); } + @override + Future> getAudioTracks(int playerId) async { + final NativeAudioTrackData nativeData = + await _playerWith(id: playerId).getAudioTracks(); + final List tracks = []; + + // Convert ExoPlayer tracks to VideoAudioTrack + if (nativeData.exoPlayerTracks != null) { + for (final ExoPlayerAudioTrackData track in nativeData.exoPlayerTracks!) { + tracks.add( + VideoAudioTrack( + id: track.trackId, + label: track.label ?? 'Unknown', + language: track.language ?? 'und', + isSelected: track.isSelected, + bitrate: track.bitrate, + sampleRate: track.sampleRate, + channelCount: track.channelCount, + codec: track.codec, + ), + ); + } + } + + return tracks; + } + + @override + Future selectAudioTrack(int playerId, String trackId) { + return _playerWith(id: playerId).selectAudioTrack(trackId); + } + _PlayerInstance _playerWith({required int id}) { final _PlayerInstance? player = _players[id]; return player ?? (throw StateError('No active player with ID $id.')); @@ -312,6 +344,14 @@ class _PlayerInstance { return _eventStreamController.stream; } + Future getAudioTracks() { + return _api.getAudioTracks(); + } + + Future selectAudioTrack(String trackId) { + return _api.selectAudioTrack(trackId); + } + Future dispose() async { await _eventSubscription.cancel(); } diff --git a/packages/video_player/video_player_android/lib/src/messages.g.dart b/packages/video_player/video_player_android/lib/src/messages.g.dart index e576b0336a4..ee793e71f2a 100644 --- a/packages/video_player/video_player_android/lib/src/messages.g.dart +++ b/packages/video_player/video_player_android/lib/src/messages.g.dart @@ -177,6 +177,200 @@ class PlaybackState { int get hashCode => Object.hashAll(_toList()); } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + + String label; + + String language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AudioTrackMessage decode(Object result) { + result as List; + return AudioTrackMessage( + id: result[0]! as String, + label: result[1]! as String, + language: result[2]! as String, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AudioTrackMessage || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from ExoPlayer Format objects. +class ExoPlayerAudioTrackData { + ExoPlayerAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String trackId; + + String? label; + + String? language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + trackId, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static ExoPlayerAudioTrackData decode(Object result) { + result as List; + return ExoPlayerAudioTrackData( + trackId: result[0]! as String, + label: result[1] as String?, + language: result[2] as String?, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! ExoPlayerAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Container for raw audio track data from Android ExoPlayer. +class NativeAudioTrackData { + NativeAudioTrackData({this.exoPlayerTracks}); + + /// ExoPlayer-based tracks + List? exoPlayerTracks; + + List _toList() { + return [exoPlayerTracks]; + } + + Object encode() { + return _toList(); + } + + static NativeAudioTrackData decode(Object result) { + result as List; + return NativeAudioTrackData( + exoPlayerTracks: + (result[0] as List?)?.cast(), + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! NativeAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -199,6 +393,15 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is PlaybackState) { buffer.putUint8(133); writeValue(buffer, value.encode()); + } else if (value is AudioTrackMessage) { + buffer.putUint8(134); + writeValue(buffer, value.encode()); + } else if (value is ExoPlayerAudioTrackData) { + buffer.putUint8(135); + writeValue(buffer, value.encode()); + } else if (value is NativeAudioTrackData) { + buffer.putUint8(136); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -219,6 +422,12 @@ class _PigeonCodec extends StandardMessageCodec { return CreateMessage.decode(readValue(buffer)!); case 133: return PlaybackState.decode(readValue(buffer)!); + case 134: + return AudioTrackMessage.decode(readValue(buffer)!); + case 135: + return ExoPlayerAudioTrackData.decode(readValue(buffer)!); + case 136: + return NativeAudioTrackData.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -598,4 +807,63 @@ class VideoPlayerInstanceApi { return (pigeonVar_replyList[0] as PlaybackState?)!; } } + + /// Gets the available audio tracks for the video. + Future getAudioTracks() async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.getAudioTracks$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as NativeAudioTrackData?)!; + } + } + + /// Selects an audio track by its ID. + Future selectAudioTrack(String trackId) async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_android.VideoPlayerInstanceApi.selectAudioTrack$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [trackId], + ); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } } diff --git a/packages/video_player/video_player_android/pigeons/messages.dart b/packages/video_player/video_player_android/pigeons/messages.dart index b2246ec6d33..bdf465b9bea 100644 --- a/packages/video_player/video_player_android/pigeons/messages.dart +++ b/packages/video_player/video_player_android/pigeons/messages.dart @@ -45,6 +45,60 @@ class PlaybackState { final int bufferPosition; } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + String label; + String language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from ExoPlayer Format objects. +class ExoPlayerAudioTrackData { + ExoPlayerAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String trackId; + String? label; + String? language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Container for raw audio track data from Android ExoPlayer. +class NativeAudioTrackData { + NativeAudioTrackData({this.exoPlayerTracks}); + + /// ExoPlayer-based tracks + List? exoPlayerTracks; +} + @HostApi() abstract class AndroidVideoPlayerApi { void initialize(); @@ -79,4 +133,10 @@ abstract class VideoPlayerInstanceApi { /// This is combined into a single call to minimize platform channel calls for /// state that needs to be polled frequently. PlaybackState getPlaybackState(); + + /// Gets the available audio tracks for the video. + NativeAudioTrackData getAudioTracks(); + + /// Selects an audio track by its ID. + void selectAudioTrack(String trackId); } diff --git a/packages/video_player/video_player_android/pubspec.yaml b/packages/video_player/video_player_android/pubspec.yaml index 00129de08bb..51d7c853c2e 100644 --- a/packages/video_player/video_player_android/pubspec.yaml +++ b/packages/video_player/video_player_android/pubspec.yaml @@ -32,3 +32,7 @@ dev_dependencies: topics: - video - video-player +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_platform_interface: {path: ../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index d358aae1f1c..2290901a5c7 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,5 +1,6 @@ ## NEXT +* Implements `getAudioTracks()` and `selectAudioTrack()` methods for iOS/macOS using AVFoundation. * Updates minimum supported SDK version to Flutter 3.29/Dart 3.7. ## 2.8.4 diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m index 6e2afec3c96..56541bb5249 100644 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m @@ -1024,4 +1024,365 @@ - (nonnull AVPlayerItem *)playerItemWithURL:(NSURL *)url { return [AVPlayerItem playerItemWithAsset:[AVURLAsset URLAssetWithURL:url options:nil]]; } +#pragma mark - Audio Track Tests + +- (void)testGetAudioTracksWithRegularAssetTracks { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock asset tracks + id mockTrack1 = OCMClassMock([AVAssetTrack class]); + id mockTrack2 = OCMClassMock([AVAssetTrack class]); + + // Configure track 1 + OCMStub([mockTrack1 trackID]).andReturn(1); + OCMStub([mockTrack1 languageCode]).andReturn(@"en"); + OCMStub([mockTrack1 estimatedDataRate]).andReturn(128000.0f); + + // Configure track 2 + OCMStub([mockTrack2 trackID]).andReturn(2); + OCMStub([mockTrack2 languageCode]).andReturn(@"es"); + OCMStub([mockTrack2 estimatedDataRate]).andReturn(96000.0f); + + // Mock format descriptions for track 1 + id mockFormatDesc1 = OCMClassMock([NSObject class]); + AudioStreamBasicDescription asbd1 = {0}; + asbd1.mSampleRate = 48000.0; + asbd1.mChannelsPerFrame = 2; + + OCMStub([mockTrack1 formatDescriptions]).andReturn(@[ mockFormatDesc1 ]); + + // Mock the asset to return our tracks + NSArray *mockTracks = @[ mockTrack1, mockTrack2 ]; + OCMStub([mockAsset tracksWithMediaType:AVMediaTypeAudio]).andReturn(mockTracks); + + // Mock no media selection group (regular asset) + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + XCTAssertEqual(result.assetTracks.count, 2); + + // Verify first track + FVPAssetAudioTrackData *track1 = result.assetTracks[0]; + XCTAssertEqual(track1.trackId, 1); + XCTAssertEqualObjects(track1.language, @"en"); + XCTAssertTrue(track1.isSelected); // First track should be selected + XCTAssertEqualObjects(track1.bitrate, @128000); + + // Verify second track + FVPAssetAudioTrackData *track2 = result.assetTracks[1]; + XCTAssertEqual(track2.trackId, 2); + XCTAssertEqualObjects(track2.language, @"es"); + XCTAssertFalse(track2.isSelected); // Second track should not be selected + XCTAssertEqualObjects(track2.bitrate, @96000); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithMediaSelectionOptions { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock media selection group and options + id mockMediaSelectionGroup = OCMClassMock([AVMediaSelectionGroup class]); + id mockOption1 = OCMClassMock([AVMediaSelectionOption class]); + id mockOption2 = OCMClassMock([AVMediaSelectionOption class]); + + // Configure option 1 + OCMStub([mockOption1 displayName]).andReturn(@"English"); + id mockLocale1 = OCMClassMock([NSLocale class]); + OCMStub([mockLocale1 languageCode]).andReturn(@"en"); + OCMStub([mockOption1 locale]).andReturn(mockLocale1); + + // Configure option 2 + OCMStub([mockOption2 displayName]).andReturn(@"Español"); + id mockLocale2 = OCMClassMock([NSLocale class]); + OCMStub([mockLocale2 languageCode]).andReturn(@"es"); + OCMStub([mockOption2 locale]).andReturn(mockLocale2); + + // Mock metadata for option 1 + id mockMetadataItem = OCMClassMock([AVMetadataItem class]); + OCMStub([mockMetadataItem commonKey]).andReturn(AVMetadataCommonKeyTitle); + OCMStub([mockMetadataItem stringValue]).andReturn(@"English Audio Track"); + OCMStub([mockOption1 commonMetadata]).andReturn(@[ mockMetadataItem ]); + + // Configure media selection group + NSArray *options = @[ mockOption1, mockOption2 ]; + OCMStub([(AVMediaSelectionGroup *)mockMediaSelectionGroup options]).andReturn(options); + OCMStub([[(AVMediaSelectionGroup *)mockMediaSelectionGroup options] count]).andReturn(2); + + // Mock the asset to return media selection group + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(mockMediaSelectionGroup); + + // Mock current selection for both iOS 11+ and older versions + id mockCurrentMediaSelection = OCMClassMock([AVMediaSelection class]); + OCMStub([mockPlayerItem currentMediaSelection]).andReturn(mockCurrentMediaSelection); + OCMStub([mockCurrentMediaSelection selectedMediaOptionInMediaSelectionGroup:mockMediaSelectionGroup]) + .andReturn(mockOption1); + + // Also mock the deprecated method for iOS < 11 + OCMStub([mockPlayerItem selectedMediaOptionInMediaSelectionGroup:mockMediaSelectionGroup]) + .andReturn(mockOption1); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNil(result.assetTracks); + XCTAssertNotNil(result.mediaSelectionTracks); + XCTAssertEqual(result.mediaSelectionTracks.count, 2); + + // Verify first option + FVPMediaSelectionAudioTrackData *option1Data = result.mediaSelectionTracks[0]; + XCTAssertEqual(option1Data.index, 0); + XCTAssertEqualObjects(option1Data.displayName, @"English"); + XCTAssertEqualObjects(option1Data.languageCode, @"en"); + XCTAssertTrue(option1Data.isSelected); + XCTAssertEqualObjects(option1Data.commonMetadataTitle, @"English Audio Track"); + + // Verify second option + FVPMediaSelectionAudioTrackData *option2Data = result.mediaSelectionTracks[1]; + XCTAssertEqual(option2Data.index, 1); + XCTAssertEqualObjects(option2Data.displayName, @"Español"); + XCTAssertEqualObjects(option2Data.languageCode, @"es"); + XCTAssertFalse(option2Data.isSelected); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithNoCurrentItem { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Mock player with no current item + OCMStub([mockPlayer currentItem]).andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithNoAsset { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Mock player item with no asset + OCMStub([mockPlayerItem asset]).andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksCodecDetection { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock asset track with format description + id mockTrack = OCMClassMock([AVAssetTrack class]); + OCMStub([mockTrack trackID]).andReturn(1); + OCMStub([mockTrack languageCode]).andReturn(@"en"); + + // Mock empty format descriptions to avoid Core Media crashes in test environment + OCMStub([mockTrack formatDescriptions]).andReturn(@[]); + + // Mock the asset + OCMStub([mockAsset tracksWithMediaType:AVMediaTypeAudio]).andReturn(@[ mockTrack ]); + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(nil); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.assetTracks); + XCTAssertEqual(result.assetTracks.count, 1); + + FVPAssetAudioTrackData *track = result.assetTracks[0]; + XCTAssertEqual(track.trackId, 1); + XCTAssertEqualObjects(track.language, @"en"); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithEmptyMediaSelectionOptions { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock media selection group with no options + id mockMediaSelectionGroup = OCMClassMock([AVMediaSelectionGroup class]); + OCMStub([(AVMediaSelectionGroup *)mockMediaSelectionGroup options]).andReturn(@[]); + OCMStub([[(AVMediaSelectionGroup *)mockMediaSelectionGroup options] count]).andReturn(0); + + // Mock the asset + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(mockMediaSelectionGroup); + OCMStub([mockAsset tracksWithMediaType:AVMediaTypeAudio]).andReturn(@[]); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results - should fall back to asset tracks + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.assetTracks); + XCTAssertNil(result.mediaSelectionTracks); + XCTAssertEqual(result.assetTracks.count, 0); + + [player disposeWithError:&error]; +} + +- (void)testGetAudioTracksWithNilMediaSelectionOption { + // Create mocks + id mockPlayer = OCMClassMock([AVPlayer class]); + id mockPlayerItem = OCMClassMock([AVPlayerItem class]); + id mockAsset = OCMClassMock([AVAsset class]); + id mockAVFactory = OCMProtocolMock(@protocol(FVPAVFactory)); + id mockViewProvider = OCMProtocolMock(@protocol(FVPViewProvider)); + + // Set up basic mock relationships + OCMStub([mockPlayer currentItem]).andReturn(mockPlayerItem); + OCMStub([mockPlayerItem asset]).andReturn(mockAsset); + OCMStub([mockAVFactory playerWithPlayerItem:OCMOCK_ANY]).andReturn(mockPlayer); + + // Create player with mocks + FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:mockPlayerItem + avFactory:mockAVFactory + viewProvider:mockViewProvider]; + + // Create mock media selection group with nil option + id mockMediaSelectionGroup = OCMClassMock([AVMediaSelectionGroup class]); + NSArray *options = @[ [NSNull null] ]; // Simulate nil option + OCMStub([(AVMediaSelectionGroup *)mockMediaSelectionGroup options]).andReturn(options); + OCMStub([[(AVMediaSelectionGroup *)mockMediaSelectionGroup options] count]).andReturn(1); + + // Mock the asset + OCMStub([mockAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]) + .andReturn(mockMediaSelectionGroup); + + // Test the method + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + // Verify results - should handle nil option gracefully + XCTAssertNil(error); + XCTAssertNotNil(result); + XCTAssertNotNil(result.mediaSelectionTracks); + XCTAssertEqual(result.mediaSelectionTracks.count, 0); // Should skip nil options + + [player disposeWithError:&error]; +} + @end diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m index 84d2ba9b32c..e095a5beef9 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m @@ -466,6 +466,216 @@ - (void)setPlaybackSpeed:(double)speed error:(FlutterError *_Nullable *_Nonnull) [self updatePlayingState]; } +- (nullable FVPNativeAudioTrackData *)getAudioTracks:(FlutterError *_Nullable *_Nonnull)error { + AVPlayerItem *currentItem = _player.currentItem; + if (!currentItem || !currentItem.asset) { + return [FVPNativeAudioTrackData makeWithAssetTracks:nil mediaSelectionTracks:nil]; + } + + AVAsset *asset = currentItem.asset; + + // First, try to get tracks from media selection (for HLS streams) + AVMediaSelectionGroup *audioGroup = + [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + if (audioGroup && audioGroup.options.count > 0) { + NSMutableArray *mediaSelectionTracks = + [[NSMutableArray alloc] init]; + AVMediaSelectionOption *currentSelection = nil; + if (@available(iOS 11.0, *)) { + AVMediaSelection *currentMediaSelection = currentItem.currentMediaSelection; + currentSelection = + [currentMediaSelection selectedMediaOptionInMediaSelectionGroup:audioGroup]; + } else { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + currentSelection = [currentItem selectedMediaOptionInMediaSelectionGroup:audioGroup]; +#pragma clang diagnostic pop + } + + for (NSInteger i = 0; i < audioGroup.options.count; i++) { + AVMediaSelectionOption *option = audioGroup.options[i]; + + // Skip nil options + if (!option || [option isKindOfClass:[NSNull class]]) { + continue; + } + + NSString *displayName = option.displayName; + if (!displayName || displayName.length == 0) { + displayName = [NSString stringWithFormat:@"Audio Track %ld", (long)(i + 1)]; + } + + NSString *languageCode = @"und"; + if (option.locale) { + languageCode = option.locale.languageCode ?: @"und"; + } + + NSString *commonMetadataTitle = nil; + for (AVMetadataItem *item in option.commonMetadata) { + if ([item.commonKey isEqualToString:AVMetadataCommonKeyTitle] && item.stringValue) { + commonMetadataTitle = item.stringValue; + break; + } + } + + BOOL isSelected = (currentSelection == option) || [currentSelection isEqual:option]; + + FVPMediaSelectionAudioTrackData *trackData = + [FVPMediaSelectionAudioTrackData makeWithIndex:i + displayName:displayName + languageCode:languageCode + isSelected:isSelected + commonMetadataTitle:commonMetadataTitle]; + + [mediaSelectionTracks addObject:trackData]; + } + + // Always return media selection tracks when there's a media selection group + // even if all options were nil/invalid (empty array) + return [FVPNativeAudioTrackData makeWithAssetTracks:nil + mediaSelectionTracks:mediaSelectionTracks]; + } + + // If no media selection group or empty, try to get tracks from AVAsset (for regular video files) + NSArray *assetAudioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; + NSMutableArray *assetTracks = [[NSMutableArray alloc] init]; + + for (NSInteger i = 0; i < assetAudioTracks.count; i++) { + AVAssetTrack *track = assetAudioTracks[i]; + + // Extract metadata from the track + NSString *language = @"und"; + NSString *label = [NSString stringWithFormat:@"Audio Track %ld", (long)(i + 1)]; + + // Try to get language from track + NSString *trackLanguage = [track.languageCode length] > 0 ? track.languageCode : nil; + if (trackLanguage) { + language = trackLanguage; + } + + // Try to get label from metadata + for (AVMetadataItem *item in track.commonMetadata) { + if ([item.commonKey isEqualToString:AVMetadataCommonKeyTitle] && item.stringValue) { + label = item.stringValue; + break; + } + } + + // Extract format information + NSNumber *bitrate = nil; + NSNumber *sampleRate = nil; + NSNumber *channelCount = nil; + NSString *codec = nil; + + // Only attempt format description parsing in production (non-test) environments + // Skip entirely if we detect any mock objects or test environment indicators + NSString *trackClassName = NSStringFromClass([track class]); + BOOL isTestEnvironment = [trackClassName containsString:@"OCMockObject"] || + [trackClassName containsString:@"Mock"] || + NSClassFromString(@"XCTestCase") != nil; + + if (track.formatDescriptions.count > 0 && !isTestEnvironment) { + @try { + id formatDescObj = track.formatDescriptions[0]; + NSString *className = NSStringFromClass([formatDescObj class]); + + // Additional safety: only process objects that are clearly Core Media format descriptions + if (formatDescObj && ([className hasPrefix:@"CMAudioFormatDescription"] || + [className hasPrefix:@"CMVideoFormatDescription"] || + [className hasPrefix:@"CMFormatDescription"])) { + CMFormatDescriptionRef formatDesc = (__bridge CMFormatDescriptionRef)formatDescObj; + + // Get audio stream basic description + const AudioStreamBasicDescription *audioDesc = + CMAudioFormatDescriptionGetStreamBasicDescription(formatDesc); + if (audioDesc) { + if (audioDesc->mSampleRate > 0) { + sampleRate = @((NSInteger)audioDesc->mSampleRate); + } + if (audioDesc->mChannelsPerFrame > 0) { + channelCount = @(audioDesc->mChannelsPerFrame); + } + } + + // Try to get codec information + FourCharCode codecType = CMFormatDescriptionGetMediaSubType(formatDesc); + switch (codecType) { + case kAudioFormatMPEG4AAC: + codec = @"aac"; + break; + case kAudioFormatAC3: + codec = @"ac3"; + break; + case kAudioFormatEnhancedAC3: + codec = @"eac3"; + break; + case kAudioFormatMPEGLayer3: + codec = @"mp3"; + break; + default: + codec = nil; + break; + } + } + } @catch (NSException *exception) { + // Silently handle any exceptions from format description parsing + // This can happen with mock objects in tests or invalid format descriptions + } + } + + // Estimate bitrate from track + if (track.estimatedDataRate > 0) { + bitrate = @((NSInteger)track.estimatedDataRate); + } + + // For now, assume the first track is selected (we don't have easy access to current selection + // for asset tracks) + BOOL isSelected = (i == 0); + + FVPAssetAudioTrackData *trackData = [FVPAssetAudioTrackData makeWithTrackId:track.trackID + label:label + language:language + isSelected:isSelected + bitrate:bitrate + sampleRate:sampleRate + channelCount:channelCount + codec:codec]; + + [assetTracks addObject:trackData]; + } + + // Return asset tracks (even if empty), media selection tracks should be nil + return [FVPNativeAudioTrackData makeWithAssetTracks:assetTracks mediaSelectionTracks:nil]; +} + +- (void)selectAudioTrack:(nonnull NSString *)trackId + error:(FlutterError *_Nullable __autoreleasing *_Nonnull)error { + AVPlayerItem *currentItem = _player.currentItem; + if (!currentItem || !currentItem.asset) { + return; + } + + AVAsset *asset = currentItem.asset; + + // Check if this is a media selection track (for HLS streams) + if ([trackId hasPrefix:@"media_selection_"]) { + AVMediaSelectionGroup *audioGroup = + [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + if (audioGroup && audioGroup.options.count > 0) { + // Parse the track ID to get the index + NSString *indexString = [trackId substringFromIndex:[@"media_selection_" length]]; + NSInteger index = [indexString integerValue]; + + if (index >= 0 && index < audioGroup.options.count) { + AVMediaSelectionOption *option = audioGroup.options[index]; + [currentItem selectMediaOption:option inMediaSelectionGroup:audioGroup]; + } + } + } + // For asset tracks, we don't have a direct way to select them in AVFoundation + // This would require more complex track selection logic that's not commonly used +} + #pragma mark - Private - (int64_t)duration { diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h index 311e25dbab4..6b431d49295 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h @@ -16,6 +16,10 @@ NS_ASSUME_NONNULL_BEGIN @class FVPPlatformVideoViewCreationParams; @class FVPCreationOptions; @class FVPTexturePlayerIds; +@class FVPAudioTrackMessage; +@class FVPAssetAudioTrackData; +@class FVPMediaSelectionAudioTrackData; +@class FVPNativeAudioTrackData; /// Information passed to the platform view creation. @interface FVPPlatformVideoViewCreationParams : NSObject @@ -42,6 +46,78 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, assign) NSInteger textureId; @end +/// Represents an audio track in a video. +@interface FVPAudioTrackMessage : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithId:(NSString *)id + label:(NSString *)label + language:(NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec; +@property(nonatomic, copy) NSString *id; +@property(nonatomic, copy) NSString *label; +@property(nonatomic, copy) NSString *language; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, strong, nullable) NSNumber *bitrate; +@property(nonatomic, strong, nullable) NSNumber *sampleRate; +@property(nonatomic, strong, nullable) NSNumber *channelCount; +@property(nonatomic, copy, nullable) NSString *codec; +@end + +/// Raw audio track data from AVAssetTrack (for regular assets). +@interface FVPAssetAudioTrackData : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithTrackId:(NSInteger)trackId + label:(nullable NSString *)label + language:(nullable NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec; +@property(nonatomic, assign) NSInteger trackId; +@property(nonatomic, copy, nullable) NSString *label; +@property(nonatomic, copy, nullable) NSString *language; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, strong, nullable) NSNumber *bitrate; +@property(nonatomic, strong, nullable) NSNumber *sampleRate; +@property(nonatomic, strong, nullable) NSNumber *channelCount; +@property(nonatomic, copy, nullable) NSString *codec; +@end + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +@interface FVPMediaSelectionAudioTrackData : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithIndex:(NSInteger)index + displayName:(nullable NSString *)displayName + languageCode:(nullable NSString *)languageCode + isSelected:(BOOL)isSelected + commonMetadataTitle:(nullable NSString *)commonMetadataTitle; +@property(nonatomic, assign) NSInteger index; +@property(nonatomic, copy, nullable) NSString *displayName; +@property(nonatomic, copy, nullable) NSString *languageCode; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, copy, nullable) NSString *commonMetadataTitle; +@end + +/// Container for raw audio track data from native platforms. +@interface FVPNativeAudioTrackData : NSObject ++ (instancetype)makeWithAssetTracks:(nullable NSArray *)assetTracks + mediaSelectionTracks: + (nullable NSArray *)mediaSelectionTracks; +/// Asset-based tracks (for regular video files) +@property(nonatomic, copy, nullable) NSArray *assetTracks; +/// Media selection-based tracks (for HLS streams) +@property(nonatomic, copy, nullable) + NSArray *mediaSelectionTracks; +@end + /// The codec used by all APIs. NSObject *FVPGetMessagesCodec(void); @@ -78,6 +154,9 @@ extern void SetUpFVPAVFoundationVideoPlayerApiWithSuffix( - (void)seekTo:(NSInteger)position completion:(void (^)(FlutterError *_Nullable))completion; - (void)pauseWithError:(FlutterError *_Nullable *_Nonnull)error; - (void)disposeWithError:(FlutterError *_Nullable *_Nonnull)error; +/// @return `nil` only when `error != nil`. +- (nullable FVPNativeAudioTrackData *)getAudioTracks:(FlutterError *_Nullable *_Nonnull)error; +- (void)selectAudioTrack:(NSString *)trackId error:(FlutterError *_Nullable *_Nonnull)error; @end extern void SetUpFVPVideoPlayerInstanceApi(id binaryMessenger, diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m index 172807b1347..d43a64bcef2 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m @@ -48,6 +48,30 @@ + (nullable FVPTexturePlayerIds *)nullableFromList:(NSArray *)list; - (NSArray *)toList; @end +@interface FVPAudioTrackMessage () ++ (FVPAudioTrackMessage *)fromList:(NSArray *)list; ++ (nullable FVPAudioTrackMessage *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPAssetAudioTrackData () ++ (FVPAssetAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPAssetAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPMediaSelectionAudioTrackData () ++ (FVPMediaSelectionAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPMediaSelectionAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPNativeAudioTrackData () ++ (FVPNativeAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPNativeAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + @implementation FVPPlatformVideoViewCreationParams + (instancetype)makeWithPlayerId:(NSInteger)playerId { FVPPlatformVideoViewCreationParams *pigeonResult = @@ -120,6 +144,167 @@ + (nullable FVPTexturePlayerIds *)nullableFromList:(NSArray *)list { } @end +@implementation FVPAudioTrackMessage ++ (instancetype)makeWithId:(NSString *)id + label:(NSString *)label + language:(NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec { + FVPAudioTrackMessage *pigeonResult = [[FVPAudioTrackMessage alloc] init]; + pigeonResult.id = id; + pigeonResult.label = label; + pigeonResult.language = language; + pigeonResult.isSelected = isSelected; + pigeonResult.bitrate = bitrate; + pigeonResult.sampleRate = sampleRate; + pigeonResult.channelCount = channelCount; + pigeonResult.codec = codec; + return pigeonResult; +} ++ (FVPAudioTrackMessage *)fromList:(NSArray *)list { + FVPAudioTrackMessage *pigeonResult = [[FVPAudioTrackMessage alloc] init]; + pigeonResult.id = GetNullableObjectAtIndex(list, 0); + pigeonResult.label = GetNullableObjectAtIndex(list, 1); + pigeonResult.language = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.bitrate = GetNullableObjectAtIndex(list, 4); + pigeonResult.sampleRate = GetNullableObjectAtIndex(list, 5); + pigeonResult.channelCount = GetNullableObjectAtIndex(list, 6); + pigeonResult.codec = GetNullableObjectAtIndex(list, 7); + return pigeonResult; +} ++ (nullable FVPAudioTrackMessage *)nullableFromList:(NSArray *)list { + return (list) ? [FVPAudioTrackMessage fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + self.id ?: [NSNull null], + self.label ?: [NSNull null], + self.language ?: [NSNull null], + @(self.isSelected), + self.bitrate ?: [NSNull null], + self.sampleRate ?: [NSNull null], + self.channelCount ?: [NSNull null], + self.codec ?: [NSNull null], + ]; +} +@end + +@implementation FVPAssetAudioTrackData ++ (instancetype)makeWithTrackId:(NSInteger)trackId + label:(nullable NSString *)label + language:(nullable NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec { + FVPAssetAudioTrackData *pigeonResult = [[FVPAssetAudioTrackData alloc] init]; + pigeonResult.trackId = trackId; + pigeonResult.label = label; + pigeonResult.language = language; + pigeonResult.isSelected = isSelected; + pigeonResult.bitrate = bitrate; + pigeonResult.sampleRate = sampleRate; + pigeonResult.channelCount = channelCount; + pigeonResult.codec = codec; + return pigeonResult; +} ++ (FVPAssetAudioTrackData *)fromList:(NSArray *)list { + FVPAssetAudioTrackData *pigeonResult = [[FVPAssetAudioTrackData alloc] init]; + pigeonResult.trackId = [GetNullableObjectAtIndex(list, 0) integerValue]; + pigeonResult.label = GetNullableObjectAtIndex(list, 1); + pigeonResult.language = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.bitrate = GetNullableObjectAtIndex(list, 4); + pigeonResult.sampleRate = GetNullableObjectAtIndex(list, 5); + pigeonResult.channelCount = GetNullableObjectAtIndex(list, 6); + pigeonResult.codec = GetNullableObjectAtIndex(list, 7); + return pigeonResult; +} ++ (nullable FVPAssetAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPAssetAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.trackId), + self.label ?: [NSNull null], + self.language ?: [NSNull null], + @(self.isSelected), + self.bitrate ?: [NSNull null], + self.sampleRate ?: [NSNull null], + self.channelCount ?: [NSNull null], + self.codec ?: [NSNull null], + ]; +} +@end + +@implementation FVPMediaSelectionAudioTrackData ++ (instancetype)makeWithIndex:(NSInteger)index + displayName:(nullable NSString *)displayName + languageCode:(nullable NSString *)languageCode + isSelected:(BOOL)isSelected + commonMetadataTitle:(nullable NSString *)commonMetadataTitle { + FVPMediaSelectionAudioTrackData *pigeonResult = [[FVPMediaSelectionAudioTrackData alloc] init]; + pigeonResult.index = index; + pigeonResult.displayName = displayName; + pigeonResult.languageCode = languageCode; + pigeonResult.isSelected = isSelected; + pigeonResult.commonMetadataTitle = commonMetadataTitle; + return pigeonResult; +} ++ (FVPMediaSelectionAudioTrackData *)fromList:(NSArray *)list { + FVPMediaSelectionAudioTrackData *pigeonResult = [[FVPMediaSelectionAudioTrackData alloc] init]; + pigeonResult.index = [GetNullableObjectAtIndex(list, 0) integerValue]; + pigeonResult.displayName = GetNullableObjectAtIndex(list, 1); + pigeonResult.languageCode = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.commonMetadataTitle = GetNullableObjectAtIndex(list, 4); + return pigeonResult; +} ++ (nullable FVPMediaSelectionAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPMediaSelectionAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.index), + self.displayName ?: [NSNull null], + self.languageCode ?: [NSNull null], + @(self.isSelected), + self.commonMetadataTitle ?: [NSNull null], + ]; +} +@end + +@implementation FVPNativeAudioTrackData ++ (instancetype)makeWithAssetTracks:(nullable NSArray *)assetTracks + mediaSelectionTracks: + (nullable NSArray *)mediaSelectionTracks { + FVPNativeAudioTrackData *pigeonResult = [[FVPNativeAudioTrackData alloc] init]; + pigeonResult.assetTracks = assetTracks; + pigeonResult.mediaSelectionTracks = mediaSelectionTracks; + return pigeonResult; +} ++ (FVPNativeAudioTrackData *)fromList:(NSArray *)list { + FVPNativeAudioTrackData *pigeonResult = [[FVPNativeAudioTrackData alloc] init]; + pigeonResult.assetTracks = GetNullableObjectAtIndex(list, 0); + pigeonResult.mediaSelectionTracks = GetNullableObjectAtIndex(list, 1); + return pigeonResult; +} ++ (nullable FVPNativeAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPNativeAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + self.assetTracks ?: [NSNull null], + self.mediaSelectionTracks ?: [NSNull null], + ]; +} +@end + @interface FVPMessagesPigeonCodecReader : FlutterStandardReader @end @implementation FVPMessagesPigeonCodecReader @@ -131,6 +316,14 @@ - (nullable id)readValueOfType:(UInt8)type { return [FVPCreationOptions fromList:[self readValue]]; case 131: return [FVPTexturePlayerIds fromList:[self readValue]]; + case 132: + return [FVPAudioTrackMessage fromList:[self readValue]]; + case 133: + return [FVPAssetAudioTrackData fromList:[self readValue]]; + case 134: + return [FVPMediaSelectionAudioTrackData fromList:[self readValue]]; + case 135: + return [FVPNativeAudioTrackData fromList:[self readValue]]; default: return [super readValueOfType:type]; } @@ -150,6 +343,18 @@ - (void)writeValue:(id)value { } else if ([value isKindOfClass:[FVPTexturePlayerIds class]]) { [self writeByte:131]; [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPAudioTrackMessage class]]) { + [self writeByte:132]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPAssetAudioTrackData class]]) { + [self writeByte:133]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPMediaSelectionAudioTrackData class]]) { + [self writeByte:134]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPNativeAudioTrackData class]]) { + [self writeByte:135]; + [self writeValue:[value toList]]; } else { [super writeValue:value]; } @@ -502,4 +707,49 @@ void SetUpFVPVideoPlayerInstanceApiWithSuffix(id binaryM [channel setMessageHandler:nil]; } } + { + FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] + initWithName:[NSString stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.video_player_avfoundation." + @"VideoPlayerInstanceApi.getAudioTracks", + messageChannelSuffix] + binaryMessenger:binaryMessenger + codec:FVPGetMessagesCodec()]; + if (api) { + NSCAssert([api respondsToSelector:@selector(getAudioTracks:)], + @"FVPVideoPlayerInstanceApi api (%@) doesn't respond to @selector(getAudioTracks:)", + api); + [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { + FlutterError *error; + FVPNativeAudioTrackData *output = [api getAudioTracks:&error]; + callback(wrapResult(output, error)); + }]; + } else { + [channel setMessageHandler:nil]; + } + } + { + FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] + initWithName:[NSString stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.video_player_avfoundation." + @"VideoPlayerInstanceApi.selectAudioTrack", + messageChannelSuffix] + binaryMessenger:binaryMessenger + codec:FVPGetMessagesCodec()]; + if (api) { + NSCAssert([api respondsToSelector:@selector(selectAudioTrack:error:)], + @"FVPVideoPlayerInstanceApi api (%@) doesn't respond to " + @"@selector(selectAudioTrack:error:)", + api); + [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { + NSArray *args = message; + NSString *arg_trackId = GetNullableObjectAtIndex(args, 0); + FlutterError *error; + [api selectAudioTrack:arg_trackId error:&error]; + callback(wrapResult(nil, error)); + }]; + } else { + [channel setMessageHandler:nil]; + } + } } diff --git a/packages/video_player/video_player_avfoundation/example/pubspec.yaml b/packages/video_player/video_player_avfoundation/example/pubspec.yaml index 8d52a355e9d..1f09101e4a1 100644 --- a/packages/video_player/video_player_avfoundation/example/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/example/pubspec.yaml @@ -31,3 +31,7 @@ flutter: assets: - assets/flutter-mark-square-64.png - assets/Butterfly-209.mp4 +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_platform_interface: {path: ../../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart b/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart index a9d3184b63e..c9b7e09eaa9 100644 --- a/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart +++ b/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart @@ -212,6 +212,56 @@ class AVFoundationVideoPlayer extends VideoPlayerPlatform { return _api.setMixWithOthers(mixWithOthers); } + @override + Future> getAudioTracks(int playerId) async { + final NativeAudioTrackData nativeData = + await _playerWith(id: playerId).getAudioTracks(); + final List tracks = []; + + // Convert asset tracks to VideoAudioTrack + if (nativeData.assetTracks != null) { + for (final AssetAudioTrackData track in nativeData.assetTracks!) { + tracks.add( + VideoAudioTrack( + id: track.trackId.toString(), + label: track.label ?? 'Unknown', + language: track.language ?? 'und', + isSelected: track.isSelected, + bitrate: track.bitrate, + sampleRate: track.sampleRate, + channelCount: track.channelCount, + codec: track.codec, + ), + ); + } + } + + // Convert media selection tracks to VideoAudioTrack (for HLS streams) + if (nativeData.mediaSelectionTracks != null) { + for (final MediaSelectionAudioTrackData track + in nativeData.mediaSelectionTracks!) { + final String trackId = 'media_selection_${track.index}'; + final String label = + track.commonMetadataTitle ?? track.displayName ?? 'Unknown'; + tracks.add( + VideoAudioTrack( + id: trackId, + label: label, + language: track.languageCode ?? 'und', + isSelected: track.isSelected, + ), + ); + } + } + + return tracks; + } + + @override + Future selectAudioTrack(int playerId, String trackId) { + return _playerWith(id: playerId).selectAudioTrack(trackId); + } + @override Widget buildView(int playerId) { return buildViewWithOptions(VideoViewOptions(playerId: playerId)); diff --git a/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart b/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart index 5fe36c52683..0b4b6df3bc9 100644 --- a/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart +++ b/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart @@ -154,6 +154,267 @@ class TexturePlayerIds { int get hashCode => Object.hashAll(_toList()); } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + + String label; + + String language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AudioTrackMessage decode(Object result) { + result as List; + return AudioTrackMessage( + id: result[0]! as String, + label: result[1]! as String, + language: result[2]! as String, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AudioTrackMessage || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from AVAssetTrack (for regular assets). +class AssetAudioTrackData { + AssetAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int trackId; + + String? label; + + String? language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + trackId, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AssetAudioTrackData decode(Object result) { + result as List; + return AssetAudioTrackData( + trackId: result[0]! as int, + label: result[1] as String?, + language: result[2] as String?, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AssetAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +class MediaSelectionAudioTrackData { + MediaSelectionAudioTrackData({ + required this.index, + this.displayName, + this.languageCode, + required this.isSelected, + this.commonMetadataTitle, + }); + + int index; + + String? displayName; + + String? languageCode; + + bool isSelected; + + String? commonMetadataTitle; + + List _toList() { + return [ + index, + displayName, + languageCode, + isSelected, + commonMetadataTitle, + ]; + } + + Object encode() { + return _toList(); + } + + static MediaSelectionAudioTrackData decode(Object result) { + result as List; + return MediaSelectionAudioTrackData( + index: result[0]! as int, + displayName: result[1] as String?, + languageCode: result[2] as String?, + isSelected: result[3]! as bool, + commonMetadataTitle: result[4] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! MediaSelectionAudioTrackData || + other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Container for raw audio track data from native platforms. +class NativeAudioTrackData { + NativeAudioTrackData({this.assetTracks, this.mediaSelectionTracks}); + + /// Asset-based tracks (for regular video files) + List? assetTracks; + + /// Media selection-based tracks (for HLS streams) + List? mediaSelectionTracks; + + List _toList() { + return [assetTracks, mediaSelectionTracks]; + } + + Object encode() { + return _toList(); + } + + static NativeAudioTrackData decode(Object result) { + result as List; + return NativeAudioTrackData( + assetTracks: (result[0] as List?)?.cast(), + mediaSelectionTracks: + (result[1] as List?)?.cast(), + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! NativeAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -170,6 +431,18 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is TexturePlayerIds) { buffer.putUint8(131); writeValue(buffer, value.encode()); + } else if (value is AudioTrackMessage) { + buffer.putUint8(132); + writeValue(buffer, value.encode()); + } else if (value is AssetAudioTrackData) { + buffer.putUint8(133); + writeValue(buffer, value.encode()); + } else if (value is MediaSelectionAudioTrackData) { + buffer.putUint8(134); + writeValue(buffer, value.encode()); + } else if (value is NativeAudioTrackData) { + buffer.putUint8(135); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -184,6 +457,14 @@ class _PigeonCodec extends StandardMessageCodec { return CreationOptions.decode(readValue(buffer)!); case 131: return TexturePlayerIds.decode(readValue(buffer)!); + case 132: + return AudioTrackMessage.decode(readValue(buffer)!); + case 133: + return AssetAudioTrackData.decode(readValue(buffer)!); + case 134: + return MediaSelectionAudioTrackData.decode(readValue(buffer)!); + case 135: + return NativeAudioTrackData.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -580,4 +861,61 @@ class VideoPlayerInstanceApi { return; } } + + Future getAudioTracks() async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_avfoundation.VideoPlayerInstanceApi.getAudioTracks$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as NativeAudioTrackData?)!; + } + } + + Future selectAudioTrack(String trackId) async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_avfoundation.VideoPlayerInstanceApi.selectAudioTrack$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [trackId], + ); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } } diff --git a/packages/video_player/video_player_avfoundation/pigeons/messages.dart b/packages/video_player/video_player_avfoundation/pigeons/messages.dart index 0fb40d59e80..076bf86a60c 100644 --- a/packages/video_player/video_player_avfoundation/pigeons/messages.dart +++ b/packages/video_player/video_player_avfoundation/pigeons/messages.dart @@ -39,6 +39,80 @@ class TexturePlayerIds { final int textureId; } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + String label; + String language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from AVAssetTrack (for regular assets). +class AssetAudioTrackData { + AssetAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int trackId; + String? label; + String? language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +class MediaSelectionAudioTrackData { + MediaSelectionAudioTrackData({ + required this.index, + this.displayName, + this.languageCode, + required this.isSelected, + this.commonMetadataTitle, + }); + + int index; + String? displayName; + String? languageCode; + bool isSelected; + String? commonMetadataTitle; +} + +/// Container for raw audio track data from native platforms. +class NativeAudioTrackData { + NativeAudioTrackData({this.assetTracks, this.mediaSelectionTracks}); + + /// Asset-based tracks (for regular video files) + List? assetTracks; + + /// Media selection-based tracks (for HLS streams) + List? mediaSelectionTracks; +} + @HostApi() abstract class AVFoundationVideoPlayerApi { @ObjCSelector('initialize') @@ -72,4 +146,8 @@ abstract class VideoPlayerInstanceApi { void seekTo(int position); void pause(); void dispose(); + @ObjCSelector('getAudioTracks') + NativeAudioTrackData getAudioTracks(); + @ObjCSelector('selectAudioTrack:') + void selectAudioTrack(String trackId); } diff --git a/packages/video_player/video_player_avfoundation/pubspec.yaml b/packages/video_player/video_player_avfoundation/pubspec.yaml index 8675038ba86..60280ea543b 100644 --- a/packages/video_player/video_player_avfoundation/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/pubspec.yaml @@ -36,3 +36,7 @@ dev_dependencies: topics: - video - video-player +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_platform_interface: {path: ../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player_platform_interface/CHANGELOG.md b/packages/video_player/video_player_platform_interface/CHANGELOG.md index b6f5a93013f..e94f7bf0f81 100644 --- a/packages/video_player/video_player_platform_interface/CHANGELOG.md +++ b/packages/video_player/video_player_platform_interface/CHANGELOG.md @@ -1,5 +1,6 @@ ## NEXT +* Adds `VideoAudioTrack` class and `getAudioTracks()`, `selectAudioTrack()` methods to platform interface for audio track management. * Updates minimum supported SDK version to Flutter 3.29/Dart 3.7. ## 6.4.0 diff --git a/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart b/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart index 3b562d1ff43..32d4cc17e5e 100644 --- a/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart +++ b/packages/video_player/video_player_platform_interface/lib/video_player_platform_interface.dart @@ -121,6 +121,16 @@ abstract class VideoPlayerPlatform extends PlatformInterface { Future setWebOptions(int playerId, VideoPlayerWebOptions options) { throw UnimplementedError('setWebOptions() has not been implemented.'); } + + /// Gets the available audio tracks for the video. + Future> getAudioTracks(int playerId) { + throw UnimplementedError('getAudioTracks() has not been implemented.'); + } + + /// Selects an audio track by its ID. + Future selectAudioTrack(int playerId, String trackId) { + throw UnimplementedError('selectAudioTrack() has not been implemented.'); + } } class _PlaceholderImplementation extends VideoPlayerPlatform {} @@ -529,3 +539,86 @@ class VideoCreationOptions { /// The type of view to be used for displaying the video player final VideoViewType viewType; } + +/// Represents an audio track in a video with its metadata. +@immutable +class VideoAudioTrack { + /// Constructs an instance of [VideoAudioTrack]. + const VideoAudioTrack({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + /// Unique identifier for the audio track. + final String id; + + /// Human-readable label for the track. + final String label; + + /// Language code of the audio track (e.g., 'en', 'es', 'und'). + final String language; + + /// Whether this track is currently selected. + final bool isSelected; + + /// Bitrate of the audio track in bits per second. + /// May be null if not available from the platform. + final int? bitrate; + + /// Sample rate of the audio track in Hz. + /// May be null if not available from the platform. + final int? sampleRate; + + /// Number of audio channels. + /// May be null if not available from the platform. + final int? channelCount; + + /// Audio codec used (e.g., 'aac', 'mp3', 'ac3'). + /// May be null if not available from the platform. + final String? codec; + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is VideoAudioTrack && + runtimeType == other.runtimeType && + id == other.id && + label == other.label && + language == other.language && + isSelected == other.isSelected && + bitrate == other.bitrate && + sampleRate == other.sampleRate && + channelCount == other.channelCount && + codec == other.codec; + } + + @override + int get hashCode => Object.hash( + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ); + + @override + String toString() => + 'VideoAudioTrack(' + 'id: $id, ' + 'label: $label, ' + 'language: $language, ' + 'isSelected: $isSelected, ' + 'bitrate: $bitrate, ' + 'sampleRate: $sampleRate, ' + 'channelCount: $channelCount, ' + 'codec: $codec)'; +} diff --git a/packages/video_player/video_player_platform_interface/pubspec.yaml b/packages/video_player/video_player_platform_interface/pubspec.yaml index 647225dd5bb..2c53743addb 100644 --- a/packages/video_player/video_player_platform_interface/pubspec.yaml +++ b/packages/video_player/video_player_platform_interface/pubspec.yaml @@ -4,7 +4,7 @@ repository: https://github.com/flutter/packages/tree/main/packages/video_player/ issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22 # NOTE: We strongly prefer non-breaking changes, even at the expense of a # less-clean API. See https://flutter.dev/go/platform-interface-breaking-changes -version: 6.4.0 +version: 6.5.0 environment: sdk: ^3.7.0 diff --git a/packages/video_player/video_player_web/example/integration_test/pkg_web_tweaks.dart b/packages/video_player/video_player_web/example/integration_test/pkg_web_tweaks.dart index f2c2fffb82f..e1db949a29c 100644 --- a/packages/video_player/video_player_web/example/integration_test/pkg_web_tweaks.dart +++ b/packages/video_player/video_player_web/example/integration_test/pkg_web_tweaks.dart @@ -57,7 +57,8 @@ extension type Descriptor._(JSObject _) implements JSObject { factory Descriptor.accessor({ void Function(JSAny? value)? set, JSAny? Function()? get, - }) => Descriptor._accessor(set: set?.toJS, get: get?.toJS); + }) => + Descriptor._accessor(set: set?.toJS, get: get?.toJS); external factory Descriptor._accessor({ // JSBoolean configurable, diff --git a/packages/video_player/video_player_web/example/integration_test/video_player_test.dart b/packages/video_player/video_player_web/example/integration_test/video_player_test.dart index 78c85c99d1b..db36143f6f6 100644 --- a/packages/video_player/video_player_web/example/integration_test/video_player_test.dart +++ b/packages/video_player/video_player_web/example/integration_test/video_player_test.dart @@ -24,19 +24,17 @@ void main() { setUp(() { // Never set "src" on the video, so this test doesn't hit the network! - video = - web.HTMLVideoElement() - ..controls = true - ..playsInline = false; + video = web.HTMLVideoElement() + ..controls = true + ..playsInline = false; }); testWidgets('initialize() calls load', (WidgetTester _) async { bool loadCalled = false; - video['load'] = - () { - loadCalled = true; - }.toJS; + video['load'] = () { + loadCalled = true; + }.toJS; VideoPlayer(videoElement: video).initialize(); @@ -193,17 +191,15 @@ void main() { WidgetTester tester, ) async { // Take all the "buffering" events that we see during the next few seconds - final Future> stream = - timedStream - .where( - (VideoEvent event) => - bufferingEvents.contains(event.eventType), - ) - .map( - (VideoEvent event) => - event.eventType == VideoEventType.bufferingStart, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => bufferingEvents.contains(event.eventType), + ) + .map( + (VideoEvent event) => + event.eventType == VideoEventType.bufferingStart, + ) + .toList(); // Simulate some events coming from the player... player.setBuffering(true); @@ -226,17 +222,15 @@ void main() { WidgetTester tester, ) async { // Take all the "buffering" events that we see during the next few seconds - final Future> stream = - timedStream - .where( - (VideoEvent event) => - bufferingEvents.contains(event.eventType), - ) - .map( - (VideoEvent event) => - event.eventType == VideoEventType.bufferingStart, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => bufferingEvents.contains(event.eventType), + ) + .map( + (VideoEvent event) => + event.eventType == VideoEventType.bufferingStart, + ) + .toList(); player.setBuffering(true); @@ -253,17 +247,15 @@ void main() { WidgetTester tester, ) async { // Take all the "buffering" events that we see during the next few seconds - final Future> stream = - timedStream - .where( - (VideoEvent event) => - bufferingEvents.contains(event.eventType), - ) - .map( - (VideoEvent event) => - event.eventType == VideoEventType.bufferingStart, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => bufferingEvents.contains(event.eventType), + ) + .map( + (VideoEvent event) => + event.eventType == VideoEventType.bufferingStart, + ) + .toList(); player.setBuffering(true); @@ -285,13 +277,12 @@ void main() { video.dispatchEvent(web.Event('canplay')); // Take all the "initialized" events that we see during the next few seconds - final Future> stream = - timedStream - .where( - (VideoEvent event) => - event.eventType == VideoEventType.initialized, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => + event.eventType == VideoEventType.initialized, + ) + .toList(); video.dispatchEvent(web.Event('canplay')); video.dispatchEvent(web.Event('canplay')); @@ -309,13 +300,12 @@ void main() { video.dispatchEvent(web.Event('loadedmetadata')); video.dispatchEvent(web.Event('loadedmetadata')); - final Future> stream = - timedStream - .where( - (VideoEvent event) => - event.eventType == VideoEventType.initialized, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => + event.eventType == VideoEventType.initialized, + ) + .toList(); final List events = await stream; @@ -328,13 +318,12 @@ void main() { video.dispatchEvent(web.Event('loadeddata')); video.dispatchEvent(web.Event('loadeddata')); - final Future> stream = - timedStream - .where( - (VideoEvent event) => - event.eventType == VideoEventType.initialized, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => + event.eventType == VideoEventType.initialized, + ) + .toList(); final List events = await stream; @@ -346,13 +335,12 @@ void main() { setInfinityDuration(video); expect(video.duration.isInfinite, isTrue); - final Future> stream = - timedStream - .where( - (VideoEvent event) => - event.eventType == VideoEventType.initialized, - ) - .toList(); + final Future> stream = timedStream + .where( + (VideoEvent event) => + event.eventType == VideoEventType.initialized, + ) + .toList(); video.dispatchEvent(web.Event('canplay')); diff --git a/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart b/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart index 9a2cd8c8e85..85618809109 100644 --- a/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart +++ b/packages/video_player/video_player_web/example/integration_test/video_player_web_test.dart @@ -124,19 +124,19 @@ void main() { ) async { final int videoPlayerId = (await VideoPlayerPlatform.instance.createWithOptions( - VideoCreationOptions( - dataSource: DataSource( - sourceType: DataSourceType.network, - uri: getUrlForAssetAsNetworkSource( - 'assets/__non_existent.webm', - ), - ), - viewType: VideoViewType.platformView, + VideoCreationOptions( + dataSource: DataSource( + sourceType: DataSourceType.network, + uri: getUrlForAssetAsNetworkSource( + 'assets/__non_existent.webm', ), - ))!; + ), + viewType: VideoViewType.platformView, + ), + ))!; - final Stream eventStream = VideoPlayerPlatform.instance - .videoEventsFor(videoPlayerId); + final Stream eventStream = + VideoPlayerPlatform.instance.videoEventsFor(videoPlayerId); // Mute video to allow autoplay (See https://goo.gl/xX8pDD) await VideoPlayerPlatform.instance.setVolume(videoPlayerId, 0); @@ -207,18 +207,15 @@ void main() { 'double call to play will emit a single isPlayingStateUpdate event', (WidgetTester tester) async { final int videoPlayerId = await playerId; - final Stream eventStream = VideoPlayerPlatform.instance - .videoEventsFor(videoPlayerId); - - final Future> stream = - eventStream - .timeout( - const Duration(seconds: 2), - onTimeout: (EventSink sink) { - sink.close(); - }, - ) - .toList(); + final Stream eventStream = + VideoPlayerPlatform.instance.videoEventsFor(videoPlayerId); + + final Future> stream = eventStream.timeout( + const Duration(seconds: 2), + onTimeout: (EventSink sink) { + sink.close(); + }, + ).toList(); await VideoPlayerPlatform.instance.setVolume(videoPlayerId, 0); await VideoPlayerPlatform.instance.play(videoPlayerId); @@ -250,18 +247,15 @@ void main() { 'video playback lifecycle', (WidgetTester tester) async { final int videoPlayerId = await playerId; - final Stream eventStream = VideoPlayerPlatform.instance - .videoEventsFor(videoPlayerId); - - final Future> stream = - eventStream - .timeout( - const Duration(seconds: 2), - onTimeout: (EventSink sink) { - sink.close(); - }, - ) - .toList(); + final Stream eventStream = + VideoPlayerPlatform.instance.videoEventsFor(videoPlayerId); + + final Future> stream = eventStream.timeout( + const Duration(seconds: 2), + onTimeout: (EventSink sink) { + sink.close(); + }, + ).toList(); await VideoPlayerPlatform.instance.setVolume(videoPlayerId, 0); await VideoPlayerPlatform.instance.play(videoPlayerId); diff --git a/packages/video_player/video_player_web/example/pubspec.yaml b/packages/video_player/video_player_web/example/pubspec.yaml index e3bce694990..c11ee9cfc8b 100644 --- a/packages/video_player/video_player_web/example/pubspec.yaml +++ b/packages/video_player/video_player_web/example/pubspec.yaml @@ -18,3 +18,7 @@ dev_dependencies: sdk: flutter integration_test: sdk: flutter +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_platform_interface: {path: ../../../../packages/video_player/video_player_platform_interface} diff --git a/packages/video_player/video_player_web/lib/src/video_player.dart b/packages/video_player/video_player_web/lib/src/video_player.dart index 3791fe5395a..fa314c8698f 100644 --- a/packages/video_player/video_player_web/lib/src/video_player.dart +++ b/packages/video_player/video_player_web/lib/src/video_player.dart @@ -42,8 +42,8 @@ class VideoPlayer { VideoPlayer({ required web.HTMLVideoElement videoElement, @visibleForTesting StreamController? eventController, - }) : _videoElement = videoElement, - _eventController = eventController ?? StreamController(); + }) : _videoElement = videoElement, + _eventController = eventController ?? StreamController(); final StreamController _eventController; final web.HTMLVideoElement _videoElement; @@ -313,13 +313,12 @@ class VideoPlayer { _videoElement.duration, ); - final Size? size = - _videoElement.videoHeight.isFinite - ? Size( - _videoElement.videoWidth.toDouble(), - _videoElement.videoHeight.toDouble(), - ) - : null; + final Size? size = _videoElement.videoHeight.isFinite + ? Size( + _videoElement.videoWidth.toDouble(), + _videoElement.videoHeight.toDouble(), + ) + : null; _eventController.add( VideoEvent( @@ -340,10 +339,9 @@ class VideoPlayer { _isBuffering = buffering; _eventController.add( VideoEvent( - eventType: - _isBuffering - ? VideoEventType.bufferingStart - : VideoEventType.bufferingEnd, + eventType: _isBuffering + ? VideoEventType.bufferingStart + : VideoEventType.bufferingEnd, ), ); } diff --git a/packages/video_player/video_player_web/lib/video_player_web.dart b/packages/video_player/video_player_web/lib/video_player_web.dart index 5fdc71a8db5..cbcf20b95bf 100644 --- a/packages/video_player/video_player_web/lib/video_player_web.dart +++ b/packages/video_player/video_player_web/lib/video_player_web.dart @@ -90,12 +90,11 @@ class VideoPlayerPlugin extends VideoPlayerPlatform { ); } - final web.HTMLVideoElement videoElement = - web.HTMLVideoElement() - ..id = 'videoElement-$playerId' - ..style.border = 'none' - ..style.height = '100%' - ..style.width = '100%'; + final web.HTMLVideoElement videoElement = web.HTMLVideoElement() + ..id = 'videoElement-$playerId' + ..style.border = 'none' + ..style.height = '100%' + ..style.width = '100%'; // TODO(hterkelsen): Use initialization parameters once they are available ui_web.platformViewRegistry.registerViewFactory( diff --git a/packages/video_player/video_player_web/pubspec.yaml b/packages/video_player/video_player_web/pubspec.yaml index ca36ffe35ee..d8ea9bd434b 100644 --- a/packages/video_player/video_player_web/pubspec.yaml +++ b/packages/video_player/video_player_web/pubspec.yaml @@ -31,3 +31,7 @@ dev_dependencies: topics: - video - video-player +# FOR TESTING AND INITIAL REVIEW ONLY. DO NOT MERGE. +# See https://github.com/flutter/flutter/blob/master/docs/ecosystem/contributing/README.md#changing-federated-plugins +dependency_overrides: + video_player_platform_interface: {path: ../../../packages/video_player/video_player_platform_interface}