using System; using System.Collections.Generic; using System.Net.Http; using System.Threading; using System.Threading.Tasks; using Jellyfin.Api.Helpers; using Jellyfin.Api.Models.StreamingDtos; using MediaBrowser.Common.Configuration; using MediaBrowser.Controller.Configuration; using MediaBrowser.Controller.Devices; using MediaBrowser.Controller.Dlna; using MediaBrowser.Controller.Library; using MediaBrowser.Controller.MediaEncoding; using MediaBrowser.Controller.Net; using MediaBrowser.Model.Dlna; using MediaBrowser.Model.IO; using MediaBrowser.Model.MediaInfo; using MediaBrowser.Model.Net; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; namespace Jellyfin.Api.Controllers { /// /// The audio controller. /// // TODO: In order to autheneticate this in the future, Dlna playback will require updating public class AudioController : BaseJellyfinApiController { private readonly IDlnaManager _dlnaManager; private readonly IAuthorizationContext _authContext; private readonly IUserManager _userManager; private readonly ILibraryManager _libraryManager; private readonly IMediaSourceManager _mediaSourceManager; private readonly IServerConfigurationManager _serverConfigurationManager; private readonly IMediaEncoder _mediaEncoder; private readonly IFileSystem _fileSystem; private readonly ISubtitleEncoder _subtitleEncoder; private readonly IConfiguration _configuration; private readonly IDeviceManager _deviceManager; private readonly TranscodingJobHelper _transcodingJobHelper; private readonly IHttpClientFactory _httpClientFactory; private readonly TranscodingJobType _transcodingJobType = TranscodingJobType.Progressive; /// /// Initializes a new instance of the class. /// /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// The singleton. /// Instance of the interface. public AudioController( IDlnaManager dlnaManager, IUserManager userManger, IAuthorizationContext authorizationContext, ILibraryManager libraryManager, IMediaSourceManager mediaSourceManager, IServerConfigurationManager serverConfigurationManager, IMediaEncoder mediaEncoder, IFileSystem fileSystem, ISubtitleEncoder subtitleEncoder, IConfiguration configuration, IDeviceManager deviceManager, TranscodingJobHelper transcodingJobHelper, IHttpClientFactory httpClientFactory) { _dlnaManager = dlnaManager; _authContext = authorizationContext; _userManager = userManger; _libraryManager = libraryManager; _mediaSourceManager = mediaSourceManager; _serverConfigurationManager = serverConfigurationManager; _mediaEncoder = mediaEncoder; _fileSystem = fileSystem; _subtitleEncoder = subtitleEncoder; _configuration = configuration; _deviceManager = deviceManager; _transcodingJobHelper = transcodingJobHelper; _httpClientFactory = httpClientFactory; } /// /// Gets an audio stream. /// /// The item id. /// The audio container. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Audio stream returned. /// A containing the audio file. [HttpGet("{itemId}/{stream=stream}.{container?}", Name = "GetAudioStreamByContainer")] [HttpGet("{itemId}/stream", Name = "GetAudioStream")] [HttpHead("{itemId}/{stream=stream}.{container?}", Name = "HeadAudioStreamByContainer")] [HttpHead("{itemId}/stream", Name = "HeadAudioStream")] [ProducesResponseType(StatusCodes.Status200OK)] public async Task GetAudioStream( [FromRoute] Guid itemId, [FromRoute] string? container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext? context, [FromQuery] Dictionary? streamOptions) { bool isHeadRequest = Request.Method == System.Net.WebRequestMethods.Http.Head; var cancellationTokenSource = new CancellationTokenSource(); StreamingRequestDto streamingRequest = new StreamingRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context ?? EncodingContext.Static, StreamOptions = streamOptions }; using var state = await StreamingHelpers.GetStreamingState( streamingRequest, Request, _authContext, _mediaSourceManager, _userManager, _libraryManager, _serverConfigurationManager, _mediaEncoder, _fileSystem, _subtitleEncoder, _configuration, _dlnaManager, _deviceManager, _transcodingJobHelper, _transcodingJobType, cancellationTokenSource.Token) .ConfigureAwait(false); if (@static.HasValue && @static.Value && state.DirectStreamProvider != null) { StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, startTimeTicks, Request, _dlnaManager); await new ProgressiveFileCopier(state.DirectStreamProvider, null, _transcodingJobHelper, CancellationToken.None) { AllowEndOfFile = false }.WriteToAsync(Response.Body, CancellationToken.None) .ConfigureAwait(false); // TODO (moved from MediaBrowser.Api): Don't hardcode contentType return File(Response.Body, MimeTypes.GetMimeType("file.ts")!); } // Static remote stream if (@static.HasValue && @static.Value && state.InputProtocol == MediaProtocol.Http) { StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, startTimeTicks, Request, _dlnaManager); using var httpClient = _httpClientFactory.CreateClient(); return await FileStreamResponseHelpers.GetStaticRemoteStreamResult(state, isHeadRequest, this, httpClient).ConfigureAwait(false); } if (@static.HasValue && @static.Value && state.InputProtocol != MediaProtocol.File) { return BadRequest($"Input protocol {state.InputProtocol} cannot be streamed statically"); } var outputPath = state.OutputFilePath; var outputPathExists = System.IO.File.Exists(outputPath); var transcodingJob = _transcodingJobHelper.GetTranscodingJob(outputPath, TranscodingJobType.Progressive); var isTranscodeCached = outputPathExists && transcodingJob != null; StreamingHelpers.AddDlnaHeaders(state, Response.Headers, (@static.HasValue && @static.Value) || isTranscodeCached, startTimeTicks, Request, _dlnaManager); // Static stream if (@static.HasValue && @static.Value) { var contentType = state.GetMimeType("." + state.OutputContainer, false) ?? state.GetMimeType(state.MediaPath); if (state.MediaSource.IsInfiniteStream) { await new ProgressiveFileCopier(state.MediaPath, null, _transcodingJobHelper, CancellationToken.None) { AllowEndOfFile = false }.WriteToAsync(Response.Body, CancellationToken.None) .ConfigureAwait(false); return File(Response.Body, contentType); } return FileStreamResponseHelpers.GetStaticFileResult( state.MediaPath, contentType, isHeadRequest, this); } // Need to start ffmpeg (because media can't be returned directly) var encodingOptions = _serverConfigurationManager.GetEncodingOptions(); var encodingHelper = new EncodingHelper(_mediaEncoder, _fileSystem, _subtitleEncoder, _configuration); var ffmpegCommandLineArguments = encodingHelper.GetProgressiveAudioFullCommandLine(state, encodingOptions, outputPath); return await FileStreamResponseHelpers.GetTranscodedFile( state, isHeadRequest, this, _transcodingJobHelper, ffmpegCommandLineArguments, Request, _transcodingJobType, cancellationTokenSource).ConfigureAwait(false); } } }