diff --git a/packages/video_player/video_player/CHANGELOG.md b/packages/video_player/video_player/CHANGELOG.md index a618516e8863..3f70c390c861 100644 --- a/packages/video_player/video_player/CHANGELOG.md +++ b/packages/video_player/video_player/CHANGELOG.md @@ -1,6 +1,7 @@ -## NEXT +## 2.11.0 -* Updates minimum supported SDK version to Flutter 3.35/Dart 3.9. +* Adds `getAudioTracks()` and `selectAudioTrack()` methods to retrieve and select available audio tracks. +* Updates minimum supported SDK version to Flutter 3.38/Dart 3.10. * Updates README to reflect currently supported OS versions for the latest versions of the endorsed platform implementations. * Applications built with older versions of Flutter will continue to diff --git a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj index a003785afc3a..2c7c8b508260 100644 --- a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj @@ -239,6 +239,23 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; + 40E43985C26639614BC3B419 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; diff --git a/packages/video_player/video_player/example/lib/audio_tracks_demo.dart b/packages/video_player/video_player/example/lib/audio_tracks_demo.dart new file mode 100644 index 000000000000..c485db7efc63 --- /dev/null +++ b/packages/video_player/video_player/example/lib/audio_tracks_demo.dart @@ -0,0 +1,379 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import 'dart:collection'; + +import 'package:flutter/material.dart'; +import 'package:video_player/video_player.dart'; + +/// A demo page that showcases audio track functionality. +class AudioTracksDemo extends StatefulWidget { + /// Creates an AudioTracksDemo widget. + const AudioTracksDemo({super.key}); + + @override + State createState() => _AudioTracksDemoState(); +} + +class _AudioTracksDemoState extends State { + VideoPlayerController? _controller; + List _audioTracks = []; + bool _isLoading = false; + String? _error; + + // Track previous state to detect relevant changes + bool _wasPlaying = false; + bool _wasInitialized = false; + + // Sample video URLs with multiple audio tracks + static const List _sampleVideos = [ + 'https://flutter.github.io/assets-for-api-docs/assets/videos/butterfly.mp4', + 'https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_16x9/bipbop_16x9_variant.m3u8', + ]; + + int _selectedVideoIndex = 0; + + @override + void initState() { + super.initState(); + _initializeVideo(); + } + + Future _initializeVideo() async { + setState(() { + _isLoading = true; + _error = null; + }); + + try { + await _controller?.dispose(); + + final controller = VideoPlayerController.networkUrl( + Uri.parse(_sampleVideos[_selectedVideoIndex]), + ); + _controller = controller; + + await controller.initialize(); + + // Add listener for video player state changes + controller.addListener(_onVideoPlayerValueChanged); + + // Initialize tracking variables + _wasPlaying = controller.value.isPlaying; + _wasInitialized = controller.value.isInitialized; + + // Get audio tracks after initialization + await _loadAudioTracks(); + if (!mounted) { + return; + } + setState(() { + _isLoading = false; + }); + } catch (e) { + if (!mounted) { + return; + } + setState(() { + _error = 'Failed to initialize video: $e'; + _isLoading = false; + }); + } + } + + Future _loadAudioTracks() async { + final VideoPlayerController? controller = _controller; + if (controller == null || !controller.value.isInitialized) { + return; + } + + try { + final List tracks = await controller.getAudioTracks(); + if (!mounted) { + return; + } + setState(() { + _audioTracks = tracks; + }); + } catch (e) { + if (!mounted) { + return; + } + setState(() { + _error = 'Failed to load audio tracks: $e'; + }); + } + } + + Future _selectAudioTrack(String trackId) async { + final VideoPlayerController? controller = _controller; + if (controller == null) { + return; + } + + try { + await controller.selectAudioTrack(trackId); + + // Reload tracks to update selection status + await _loadAudioTracks(); + + if (!mounted) { + return; + } + ScaffoldMessenger.of( + context, + ).showSnackBar(SnackBar(content: Text('Selected audio track: $trackId'))); + } catch (e) { + if (!mounted) { + return; + } + ScaffoldMessenger.of(context).showSnackBar( + SnackBar(content: Text('Failed to select audio track: $e')), + ); + } + } + + void _onVideoPlayerValueChanged() { + final VideoPlayerController? controller = _controller; + if (controller == null) { + return; + } + + final VideoPlayerValue currentValue = controller.value; + var shouldUpdate = false; + + // Check for relevant state changes that affect UI + if (currentValue.isPlaying != _wasPlaying) { + _wasPlaying = currentValue.isPlaying; + shouldUpdate = true; + } + + if (currentValue.isInitialized != _wasInitialized) { + _wasInitialized = currentValue.isInitialized; + shouldUpdate = true; + } + + // Only call setState if there are relevant changes + if (shouldUpdate && mounted) { + setState(() {}); + } + } + + @override + void dispose() { + _controller?.removeListener(_onVideoPlayerValueChanged); + _controller?.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Audio Tracks Demo'), + backgroundColor: Theme.of(context).colorScheme.inversePrimary, + ), + body: Column( + children: [ + // Video selection dropdown + Padding( + padding: const EdgeInsets.all(16.0), + child: DropdownMenu( + initialSelection: _selectedVideoIndex, + label: const Text('Select Video'), + inputDecorationTheme: const InputDecorationTheme( + border: OutlineInputBorder(), + ), + dropdownMenuEntries: _sampleVideos.indexed.map(( + (int, String) record, + ) { + final (int index, _) = record; + return DropdownMenuEntry( + value: index, + label: 'Video ${index + 1}', + ); + }).toList(), + onSelected: (int? value) { + // value is null when the menu is dismissed without selection + if (value != null && value != _selectedVideoIndex) { + setState(() { + _selectedVideoIndex = value; + }); + _initializeVideo(); + } + }, + ), + ), + + // Video player + Expanded( + flex: 2, + child: ColoredBox(color: Colors.black, child: _buildVideoPlayer()), + ), + + // Audio tracks list + Expanded(flex: 3, child: _buildAudioTracksList()), + ], + ), + floatingActionButton: FloatingActionButton( + onPressed: _loadAudioTracks, + tooltip: 'Refresh Audio Tracks', + child: const Icon(Icons.refresh), + ), + ); + } + + Widget _buildVideoPlayer() { + if (_isLoading) { + return const Center(child: CircularProgressIndicator()); + } + + if (_error case final String error?) { + return Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Icon(Icons.error, size: 48, color: Colors.red[300]), + const SizedBox(height: 16), + Text( + error, + style: const TextStyle(color: Colors.white), + textAlign: TextAlign.center, + ), + const SizedBox(height: 16), + ElevatedButton( + onPressed: _initializeVideo, + child: const Text('Retry'), + ), + ], + ), + ); + } + + final VideoPlayerController? controller = _controller; + if (controller?.value.isInitialized ?? false) { + return Stack( + alignment: Alignment.center, + children: [ + AspectRatio( + aspectRatio: controller!.value.aspectRatio, + child: VideoPlayer(controller), + ), + _buildPlayPauseButton(), + ], + ); + } + + return const Center( + child: Text('No video loaded', style: TextStyle(color: Colors.white)), + ); + } + + Widget _buildPlayPauseButton() { + final VideoPlayerController? controller = _controller; + if (controller == null) { + return const SizedBox.shrink(); + } + + return Container( + decoration: BoxDecoration( + color: Colors.black54, + borderRadius: BorderRadius.circular(30), + ), + child: IconButton( + iconSize: 48, + color: Colors.white, + onPressed: () { + if (controller.value.isPlaying) { + controller.pause(); + } else { + controller.play(); + } + }, + icon: Icon(controller.value.isPlaying ? Icons.pause : Icons.play_arrow), + ), + ); + } + + Widget _buildAudioTracksList() { + return Container( + padding: const EdgeInsets.all(16.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + const Icon(Icons.audiotrack), + const SizedBox(width: 8), + Text( + 'Audio Tracks (${_audioTracks.length})', + style: Theme.of(context).textTheme.headlineSmall, + ), + ], + ), + const SizedBox(height: 16), + + if (_audioTracks.isEmpty) + const Expanded( + child: Center( + child: Text( + 'No audio tracks available.\nTry loading a video with multiple audio tracks.', + textAlign: TextAlign.center, + style: TextStyle(fontSize: 16, color: Colors.grey), + ), + ), + ) + else + Expanded( + child: ListView.builder( + itemCount: _audioTracks.length, + itemBuilder: (BuildContext context, int index) { + final VideoAudioTrack track = _audioTracks[index]; + return _buildAudioTrackTile(track); + }, + ), + ), + ], + ), + ); + } + + Widget _buildAudioTrackTile(VideoAudioTrack track) { + return Card( + margin: const EdgeInsets.only(bottom: 8.0), + child: ListTile( + leading: CircleAvatar( + backgroundColor: track.isSelected ? Colors.green : Colors.grey, + child: Icon( + track.isSelected ? Icons.check : Icons.audiotrack, + color: Colors.white, + ), + ), + title: Text( + track.label?.isNotEmpty ?? false ? track.label! : 'Track ${track.id}', + style: TextStyle( + fontWeight: track.isSelected ? FontWeight.bold : FontWeight.normal, + ), + ), + subtitle: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text('ID: ${track.id}'), + if (track.language != null) Text('Language: ${track.language}'), + if (track.codec != null) Text('Codec: ${track.codec}'), + if (track.bitrate != null) Text('Bitrate: ${track.bitrate} bps'), + if (track.sampleRate != null) + Text('Sample Rate: ${track.sampleRate} Hz'), + if (track.channelCount != null) + Text('Channels: ${track.channelCount}'), + ], + ), + trailing: track.isSelected + ? const Icon(Icons.radio_button_checked, color: Colors.green) + : const Icon(Icons.radio_button_unchecked), + onTap: track.isSelected ? null : () => _selectAudioTrack(track.id), + ), + ); + } +} diff --git a/packages/video_player/video_player/example/lib/main.dart b/packages/video_player/video_player/example/lib/main.dart index d47a5abc6015..4aa855e715e3 100644 --- a/packages/video_player/video_player/example/lib/main.dart +++ b/packages/video_player/video_player/example/lib/main.dart @@ -11,6 +11,8 @@ library; import 'package:flutter/material.dart'; import 'package:video_player/video_player.dart'; +import 'audio_tracks_demo.dart'; + void main() { runApp(MaterialApp(home: _App())); } @@ -37,6 +39,19 @@ class _App extends StatelessWidget { ); }, ), + IconButton( + key: const ValueKey('audio_tracks_demo'), + icon: const Icon(Icons.audiotrack), + tooltip: 'Audio Tracks Demo', + onPressed: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => const AudioTracksDemo(), + ), + ); + }, + ), ], bottom: const TabBar( isScrollable: true, diff --git a/packages/video_player/video_player/lib/video_player.dart b/packages/video_player/video_player/lib/video_player.dart index f0589cb46869..232dda0858b7 100644 --- a/packages/video_player/video_player/lib/video_player.dart +++ b/packages/video_player/video_player/lib/video_player.dart @@ -9,7 +9,8 @@ import 'dart:math' as math show max; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; -import 'package:video_player_platform_interface/video_player_platform_interface.dart'; +import 'package:video_player_platform_interface/video_player_platform_interface.dart' + as platform_interface; import 'src/closed_caption_file.dart'; @@ -25,10 +26,121 @@ export 'package:video_player_platform_interface/video_player_platform_interface. export 'src/closed_caption_file.dart'; -VideoPlayerPlatform? _lastVideoPlayerPlatform; +/// Represents an audio track in a video with its metadata. +@immutable +class VideoAudioTrack { + /// Constructs an instance of [VideoAudioTrack]. + const VideoAudioTrack({ + required this.id, + required this.isSelected, + this.label, + this.language, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + /// Unique identifier for the audio track. + final String id; + + /// Human-readable label for the track. + /// + /// May be null if not available from the platform. + final String? label; + + /// Language code of the audio track (e.g., 'en', 'es', 'und'). + /// + /// May be null if not available from the platform. + final String? language; -VideoPlayerPlatform get _videoPlayerPlatform { - final VideoPlayerPlatform currentInstance = VideoPlayerPlatform.instance; + /// Whether this track is currently selected. + final bool isSelected; + + /// Bitrate of the audio track in bits per second. + /// + /// May be null if not available from the platform. + final int? bitrate; + + /// Sample rate of the audio track in Hz. + /// + /// May be null if not available from the platform. + final int? sampleRate; + + /// Number of audio channels. + /// + /// May be null if not available from the platform. + final int? channelCount; + + /// Audio codec used (e.g., 'aac', 'mp3', 'ac3'). + /// + /// May be null if not available from the platform. + final String? codec; + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is VideoAudioTrack && + runtimeType == other.runtimeType && + id == other.id && + label == other.label && + language == other.language && + isSelected == other.isSelected && + bitrate == other.bitrate && + sampleRate == other.sampleRate && + channelCount == other.channelCount && + codec == other.codec; + } + + @override + int get hashCode => Object.hash( + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ); + + @override + String toString() => + 'VideoAudioTrack(' + 'id: $id, ' + 'label: $label, ' + 'language: $language, ' + 'isSelected: $isSelected, ' + 'bitrate: $bitrate, ' + 'sampleRate: $sampleRate, ' + 'channelCount: $channelCount, ' + 'codec: $codec)'; +} + +/// Converts a platform interface [VideoAudioTrack] to the public API type. +/// +/// This internal method is used to decouple the public API from the +/// platform interface implementation. +VideoAudioTrack _convertPlatformAudioTrack( + platform_interface.VideoAudioTrack platformTrack, +) { + return VideoAudioTrack( + id: platformTrack.id, + label: platformTrack.label, + language: platformTrack.language, + isSelected: platformTrack.isSelected, + bitrate: platformTrack.bitrate, + sampleRate: platformTrack.sampleRate, + channelCount: platformTrack.channelCount, + codec: platformTrack.codec, + ); +} + +platform_interface.VideoPlayerPlatform? _lastVideoPlayerPlatform; + +platform_interface.VideoPlayerPlatform get _videoPlayerPlatform { + final platform_interface.VideoPlayerPlatform currentInstance = + platform_interface.VideoPlayerPlatform.instance; if (_lastVideoPlayerPlatform != currentInstance) { // This will clear all open videos on the platform when a full restart is // performed. @@ -50,7 +162,7 @@ class VideoPlayerValue { this.position = Duration.zero, this.caption = Caption.none, this.captionOffset = Duration.zero, - this.buffered = const [], + this.buffered = const [], this.isInitialized = false, this.isPlaying = false, this.isLooping = false, @@ -98,7 +210,7 @@ class VideoPlayerValue { final Duration captionOffset; /// The currently buffered ranges. - final List buffered; + final List buffered; /// True if the video is playing. False if it's paused. final bool isPlaying; @@ -164,7 +276,7 @@ class VideoPlayerValue { Duration? position, Caption? caption, Duration? captionOffset, - List? buffered, + List? buffered, bool? isInitialized, bool? isPlaying, bool? isLooping, @@ -281,9 +393,9 @@ class VideoPlayerController extends ValueNotifier { this.package, Future? closedCaptionFile, this.videoPlayerOptions, - this.viewType = VideoViewType.textureView, + this.viewType = platform_interface.VideoViewType.textureView, }) : _closedCaptionFileFuture = closedCaptionFile, - dataSourceType = DataSourceType.asset, + dataSourceType = platform_interface.DataSourceType.asset, formatHint = null, httpHeaders = const {}, super(const VideoPlayerValue(duration: Duration.zero)); @@ -308,9 +420,9 @@ class VideoPlayerController extends ValueNotifier { Future? closedCaptionFile, this.videoPlayerOptions, this.httpHeaders = const {}, - this.viewType = VideoViewType.textureView, + this.viewType = platform_interface.VideoViewType.textureView, }) : _closedCaptionFileFuture = closedCaptionFile, - dataSourceType = DataSourceType.network, + dataSourceType = platform_interface.DataSourceType.network, package = null, super(const VideoPlayerValue(duration: Duration.zero)); @@ -329,10 +441,10 @@ class VideoPlayerController extends ValueNotifier { Future? closedCaptionFile, this.videoPlayerOptions, this.httpHeaders = const {}, - this.viewType = VideoViewType.textureView, + this.viewType = platform_interface.VideoViewType.textureView, }) : _closedCaptionFileFuture = closedCaptionFile, dataSource = url.toString(), - dataSourceType = DataSourceType.network, + dataSourceType = platform_interface.DataSourceType.network, package = null, super(const VideoPlayerValue(duration: Duration.zero)); @@ -345,10 +457,10 @@ class VideoPlayerController extends ValueNotifier { Future? closedCaptionFile, this.videoPlayerOptions, this.httpHeaders = const {}, - this.viewType = VideoViewType.textureView, + this.viewType = platform_interface.VideoViewType.textureView, }) : _closedCaptionFileFuture = closedCaptionFile, dataSource = Uri.file(file.absolute.path).toString(), - dataSourceType = DataSourceType.file, + dataSourceType = platform_interface.DataSourceType.file, package = null, formatHint = null, super(const VideoPlayerValue(duration: Duration.zero)); @@ -361,14 +473,14 @@ class VideoPlayerController extends ValueNotifier { Uri contentUri, { Future? closedCaptionFile, this.videoPlayerOptions, - this.viewType = VideoViewType.textureView, + this.viewType = platform_interface.VideoViewType.textureView, }) : assert( defaultTargetPlatform == TargetPlatform.android, 'VideoPlayerController.contentUri is only supported on Android.', ), _closedCaptionFileFuture = closedCaptionFile, dataSource = contentUri.toString(), - dataSourceType = DataSourceType.contentUri, + dataSourceType = platform_interface.DataSourceType.contentUri, package = null, formatHint = null, httpHeaders = const {}, @@ -385,14 +497,14 @@ class VideoPlayerController extends ValueNotifier { /// **Android only**. Will override the platform's generic file format /// detection with whatever is set here. - final VideoFormat? formatHint; + final platform_interface.VideoFormat? formatHint; /// Describes the type of data source this [VideoPlayerController] /// is constructed with. - final DataSourceType dataSourceType; + final platform_interface.DataSourceType dataSourceType; /// Provide additional configuration options (optional). Like setting the audio mode to mix - final VideoPlayerOptions? videoPlayerOptions; + final platform_interface.VideoPlayerOptions? videoPlayerOptions; /// Only set for [asset] videos. The package that the asset was loaded from. final String? package; @@ -400,7 +512,7 @@ class VideoPlayerController extends ValueNotifier { /// The requested display mode for the video. /// /// Platforms that do not support the request view type will ignore this. - final VideoViewType viewType; + final platform_interface.VideoViewType viewType; Future? _closedCaptionFileFuture; ClosedCaptionFile? _closedCaptionFile; @@ -430,35 +542,35 @@ class VideoPlayerController extends ValueNotifier { _lifeCycleObserver?.initialize(); _creatingCompleter = Completer(); - final DataSource dataSourceDescription; + final platform_interface.DataSource dataSourceDescription; switch (dataSourceType) { - case DataSourceType.asset: - dataSourceDescription = DataSource( - sourceType: DataSourceType.asset, + case platform_interface.DataSourceType.asset: + dataSourceDescription = platform_interface.DataSource( + sourceType: platform_interface.DataSourceType.asset, asset: dataSource, package: package, ); - case DataSourceType.network: - dataSourceDescription = DataSource( - sourceType: DataSourceType.network, + case platform_interface.DataSourceType.network: + dataSourceDescription = platform_interface.DataSource( + sourceType: platform_interface.DataSourceType.network, uri: dataSource, formatHint: formatHint, httpHeaders: httpHeaders, ); - case DataSourceType.file: - dataSourceDescription = DataSource( - sourceType: DataSourceType.file, + case platform_interface.DataSourceType.file: + dataSourceDescription = platform_interface.DataSource( + sourceType: platform_interface.DataSourceType.file, uri: dataSource, httpHeaders: httpHeaders, ); - case DataSourceType.contentUri: - dataSourceDescription = DataSource( - sourceType: DataSourceType.contentUri, + case platform_interface.DataSourceType.contentUri: + dataSourceDescription = platform_interface.DataSource( + sourceType: platform_interface.DataSourceType.contentUri, uri: dataSource, ); } - final creationOptions = VideoCreationOptions( + final creationOptions = platform_interface.VideoCreationOptions( dataSource: dataSourceDescription, viewType: viewType, ); @@ -483,13 +595,13 @@ class VideoPlayerController extends ValueNotifier { ); } - void eventListener(VideoEvent event) { + void eventListener(platform_interface.VideoEvent event) { if (_isDisposed) { return; } switch (event.eventType) { - case VideoEventType.initialized: + case platform_interface.VideoEventType.initialized: value = value.copyWith( duration: event.duration, size: event.size, @@ -512,20 +624,20 @@ class VideoPlayerController extends ValueNotifier { _applyLooping(); _applyVolume(); _applyPlayPause(); - case VideoEventType.completed: + case platform_interface.VideoEventType.completed: // In this case we need to stop _timer, set isPlaying=false, and // position=value.duration. Instead of setting the values directly, // we use pause() and seekTo() to ensure the platform stops playing // and seeks to the last frame of the video. pause().then((void pauseResult) => seekTo(value.duration)); value = value.copyWith(isCompleted: true); - case VideoEventType.bufferingUpdate: + case platform_interface.VideoEventType.bufferingUpdate: value = value.copyWith(buffered: event.buffered); - case VideoEventType.bufferingStart: + case platform_interface.VideoEventType.bufferingStart: value = value.copyWith(isBuffering: true); - case VideoEventType.bufferingEnd: + case platform_interface.VideoEventType.bufferingEnd: value = value.copyWith(isBuffering: false); - case VideoEventType.isPlayingStateUpdate: + case platform_interface.VideoEventType.isPlayingStateUpdate: if (event.isPlaying ?? false) { value = value.copyWith( isPlaying: event.isPlaying, @@ -534,7 +646,7 @@ class VideoPlayerController extends ValueNotifier { } else { value = value.copyWith(isPlaying: event.isPlaying); } - case VideoEventType.unknown: + case platform_interface.VideoEventType.unknown: break; } } @@ -819,6 +931,63 @@ class VideoPlayerController extends ValueNotifier { } } + /// Gets the available audio tracks for the video. + /// + /// Returns a list of [VideoAudioTrack] objects containing metadata about + /// each available audio track. The list may be empty if no audio tracks + /// are available or if the video is not initialized. + /// + /// Throws an error if the video player is disposed. + Future> getAudioTracks() async { + if (_isDisposed) { + throw StateError('VideoPlayerController is disposed'); + } + if (!value.isInitialized) { + return []; + } + final List platformTracks = + await _videoPlayerPlatform.getAudioTracks(_playerId); + return platformTracks.map(_convertPlatformAudioTrack).toList(); + } + + /// Selects which audio track is chosen for playback from its [trackId] + /// + /// The [trackId] should match the ID of one of the tracks returned by + /// [getAudioTracks]. If the track ID is not found or invalid, the + /// platform may ignore the request or throw an exception. + /// + /// Throws an error if the video player is disposed or not initialized. + Future selectAudioTrack(String trackId) async { + if (_isDisposedOrNotInitialized) { + throw StateError('VideoPlayerController is disposed or not initialized'); + } + // The platform implementation (e.g., Android) will wait for the track + // selection to complete by listening to platform-specific events + await _videoPlayerPlatform.selectAudioTrack(_playerId, trackId); + } + + /// Returns whether audio track selection is supported on this platform. + /// + /// This method allows developers to query at runtime whether the current + /// platform supports audio track selection functionality. This is useful + /// for platforms like web where audio track selection may not be available. + /// + /// Returns `true` if [getAudioTracks] and [selectAudioTrack] are supported, + /// `false` otherwise. + /// + /// Example usage: + /// ```dart + /// if (controller.isAudioTrackSupportAvailable()) { + /// final tracks = await controller.getAudioTracks(); + /// // Show audio track selection UI + /// } else { + /// // Hide audio track selection UI or show unsupported message + /// } + /// ``` + bool isAudioTrackSupportAvailable() { + return _videoPlayerPlatform.isAudioTrackSupportAvailable(); + } + bool get _isDisposedOrNotInitialized => _isDisposed || !value.isInitialized; } @@ -903,7 +1072,7 @@ class _VideoPlayerState extends State { : _VideoPlayerWithRotation( rotation: widget.controller.value.rotationCorrection, child: _videoPlayerPlatform.buildViewWithOptions( - VideoViewOptions(playerId: _playerId), + platform_interface.VideoViewOptions(playerId: _playerId), ), ); } @@ -1116,7 +1285,10 @@ class _VideoProgressIndicatorState extends State { final double maxBuffering = duration == 0.0 ? 0.0 : controller.value.buffered - .map((DurationRange range) => range.end.inMilliseconds) + .map( + (platform_interface.DurationRange range) => + range.end.inMilliseconds, + ) .fold(0, math.max) / duration; progressIndicator = Stack( diff --git a/packages/video_player/video_player/pubspec.yaml b/packages/video_player/video_player/pubspec.yaml index f9ffee7fa368..f48636e14cde 100644 --- a/packages/video_player/video_player/pubspec.yaml +++ b/packages/video_player/video_player/pubspec.yaml @@ -3,11 +3,11 @@ description: Flutter plugin for displaying inline video with other Flutter widgets on Android, iOS, macOS and web. repository: https://github.com/flutter/packages/tree/main/packages/video_player/video_player issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22 -version: 2.10.1 +version: 2.11.0 environment: - sdk: ^3.9.0 - flutter: ">=3.35.0" + sdk: ^3.10.0 + flutter: ">=3.38.0" flutter: plugin: @@ -25,9 +25,9 @@ dependencies: flutter: sdk: flutter html: ^0.15.0 - video_player_android: ^2.8.1 - video_player_avfoundation: ^2.7.0 - video_player_platform_interface: ^6.3.0 + video_player_android: ^2.9.1 + video_player_avfoundation: ^2.9.0 + video_player_platform_interface: ^6.6.0 video_player_web: ^2.1.0 dev_dependencies: diff --git a/packages/video_player/video_player/test/video_player_test.dart b/packages/video_player/video_player/test/video_player_test.dart index ea565bd90730..70bbf643675d 100644 --- a/packages/video_player/video_player/test/video_player_test.dart +++ b/packages/video_player/video_player/test/video_player_test.dart @@ -10,7 +10,11 @@ import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:video_player/video_player.dart'; -import 'package:video_player_platform_interface/video_player_platform_interface.dart'; +import 'package:video_player_platform_interface/video_player_platform_interface.dart' + hide VideoAudioTrack; +import 'package:video_player_platform_interface/video_player_platform_interface.dart' + as platform_interface + show VideoAudioTrack; const String _localhost = 'https://127.0.0.1'; final Uri _localhostUri = Uri.parse(_localhost); @@ -84,6 +88,49 @@ class FakeController extends ValueNotifier Future setClosedCaptionFile( Future? closedCaptionFile, ) async {} + + @override + Future> getAudioTracks() async { + return [ + const VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ), + const VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ), + const VideoAudioTrack( + id: 'track_3', + label: 'French', + language: 'fr', + isSelected: false, + bitrate: 96000, + ), + ]; + } + + @override + Future selectAudioTrack(String trackId) async { + // Store the selected track ID for verification in tests + selectedAudioTrackId = trackId; + } + + @override + bool isAudioTrackSupportAvailable() { + // Return true for testing purposes + return true; + } + + String? selectedAudioTrackId; } Future _loadClosedCaption() async => @@ -818,6 +865,191 @@ void main() { }); }); + group('audio tracks', () { + test('getAudioTracks returns list of tracks', () async { + final controller = VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + final List tracks = await controller.getAudioTracks(); + + expect(tracks.length, 3); + expect(tracks[0].id, 'track_1'); + expect(tracks[0].label, 'English'); + expect(tracks[0].language, 'en'); + expect(tracks[0].isSelected, true); + expect(tracks[0].bitrate, null); + expect(tracks[0].sampleRate, null); + expect(tracks[0].channelCount, null); + expect(tracks[0].codec, null); + + expect(tracks[1].id, 'track_2'); + expect(tracks[1].label, 'Spanish'); + expect(tracks[1].language, 'es'); + expect(tracks[1].isSelected, false); + expect(tracks[1].bitrate, 128000); + expect(tracks[1].sampleRate, 44100); + expect(tracks[1].channelCount, 2); + expect(tracks[1].codec, 'aac'); + + expect(tracks[2].id, 'track_3'); + expect(tracks[2].label, 'French'); + expect(tracks[2].language, 'fr'); + expect(tracks[2].isSelected, false); + expect(tracks[2].bitrate, 96000); + expect(tracks[2].sampleRate, null); + expect(tracks[2].channelCount, null); + expect(tracks[2].codec, null); + }); + + test('getAudioTracks before initialization returns empty list', () async { + final controller = VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + final List tracks = await controller.getAudioTracks(); + expect(tracks, isEmpty); + }); + + test('selectAudioTrack works with valid track ID', () async { + final controller = VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + await controller.selectAudioTrack('track_2'); + + // Verify the platform recorded the selection + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_2', + ); + }); + + test('selectAudioTrack before initialization throws', () async { + final controller = VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + expect( + () => controller.selectAudioTrack('track_1'), + throwsA(isA()), + ); + }); + + test('selectAudioTrack with empty track ID', () async { + final controller = VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + await controller.selectAudioTrack(''); + + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + '', + ); + }); + + test('multiple track selections update correctly', () async { + final controller = VideoPlayerController.networkUrl(_localhostUri); + addTearDown(controller.dispose); + + await controller.initialize(); + + await controller.selectAudioTrack('track_1'); + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_1', + ); + + await controller.selectAudioTrack('track_3'); + expect( + fakeVideoPlayerPlatform.selectedAudioTrackIds[controller.playerId], + 'track_3', + ); + }); + }); + + group('VideoAudioTrack', () { + test('equality works correctly', () { + const track1 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const track2 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const track3 = VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + ); + + expect(track1, equals(track2)); + expect(track1, isNot(equals(track3))); + }); + + test('hashCode works correctly', () { + const track1 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + const track2 = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + expect(track1.hashCode, equals(track2.hashCode)); + }); + + test('toString works correctly', () { + const track = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ); + + final trackString = track.toString(); + expect(trackString, contains('track_1')); + expect(trackString, contains('English')); + expect(trackString, contains('en')); + expect(trackString, contains('true')); + expect(trackString, contains('128000')); + expect(trackString, contains('44100')); + expect(trackString, contains('2')); + expect(trackString, contains('aac')); + }); + + test('optional fields can be null', () { + const track = VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ); + + expect(track.bitrate, null); + expect(track.sampleRate, null); + expect(track.channelCount, null); + expect(track.codec, null); + }); + }); + group('caption', () { test('works when position updates', () async { final controller = VideoPlayerController.networkUrl( @@ -1626,4 +1858,50 @@ class FakeVideoPlayerPlatform extends VideoPlayerPlatform { calls.add('setWebOptions'); webOptions[playerId] = options; } + + @override + Future> getAudioTracks( + int playerId, + ) async { + calls.add('getAudioTracks'); + return [ + const platform_interface.VideoAudioTrack( + id: 'track_1', + label: 'English', + language: 'en', + isSelected: true, + ), + const platform_interface.VideoAudioTrack( + id: 'track_2', + label: 'Spanish', + language: 'es', + isSelected: false, + bitrate: 128000, + sampleRate: 44100, + channelCount: 2, + codec: 'aac', + ), + const platform_interface.VideoAudioTrack( + id: 'track_3', + label: 'French', + language: 'fr', + isSelected: false, + bitrate: 96000, + ), + ]; + } + + @override + Future selectAudioTrack(int playerId, String trackId) async { + calls.add('selectAudioTrack'); + selectedAudioTrackIds[playerId] = trackId; + } + + @override + bool isAudioTrackSupportAvailable() { + calls.add('isAudioTrackSupportAvailable'); + return true; // Return true for testing purposes + } + + final Map selectedAudioTrackIds = {}; }