using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Jellyfin.Api.Constants; using Jellyfin.Api.Helpers; using Jellyfin.Api.Models.PlaybackDtos; using Jellyfin.Api.Models.StreamingDtos; using MediaBrowser.Common.Configuration; using MediaBrowser.Common.Net; using MediaBrowser.Controller.Configuration; using MediaBrowser.Controller.Devices; using MediaBrowser.Controller.Dlna; using MediaBrowser.Controller.Library; using MediaBrowser.Controller.MediaEncoding; using MediaBrowser.Controller.Net; using MediaBrowser.Model.Configuration; using MediaBrowser.Model.Dlna; using MediaBrowser.Model.Entities; using MediaBrowser.Model.IO; using MediaBrowser.Model.Net; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; using Microsoft.Net.Http.Headers; namespace Jellyfin.Api.Controllers { /// /// Dynamic hls controller. /// [Authorize(Policy = Policies.DefaultAuthorization)] public class DynamicHlsController : BaseJellyfinApiController { private readonly ILibraryManager _libraryManager; private readonly IUserManager _userManager; private readonly IDlnaManager _dlnaManager; private readonly IAuthorizationContext _authContext; private readonly IMediaSourceManager _mediaSourceManager; private readonly IServerConfigurationManager _serverConfigurationManager; private readonly IMediaEncoder _mediaEncoder; private readonly IFileSystem _fileSystem; private readonly ISubtitleEncoder _subtitleEncoder; private readonly IConfiguration _configuration; private readonly IDeviceManager _deviceManager; private readonly TranscodingJobHelper _transcodingJobHelper; private readonly INetworkManager _networkManager; private readonly ILogger _logger; private readonly EncodingHelper _encodingHelper; private readonly TranscodingJobType _transcodingJobType = TranscodingJobType.Hls; /// /// Initializes a new instance of the class. /// /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. /// Instance of the class. /// Instance of the interface. /// Instance of the interface. public DynamicHlsController( ILibraryManager libraryManager, IUserManager userManager, IDlnaManager dlnaManager, IAuthorizationContext authContext, IMediaSourceManager mediaSourceManager, IServerConfigurationManager serverConfigurationManager, IMediaEncoder mediaEncoder, IFileSystem fileSystem, ISubtitleEncoder subtitleEncoder, IConfiguration configuration, IDeviceManager deviceManager, TranscodingJobHelper transcodingJobHelper, INetworkManager networkManager, ILogger logger) { _libraryManager = libraryManager; _userManager = userManager; _dlnaManager = dlnaManager; _authContext = authContext; _mediaSourceManager = mediaSourceManager; _serverConfigurationManager = serverConfigurationManager; _mediaEncoder = mediaEncoder; _fileSystem = fileSystem; _subtitleEncoder = subtitleEncoder; _configuration = configuration; _deviceManager = deviceManager; _transcodingJobHelper = transcodingJobHelper; _networkManager = networkManager; _logger = logger; _encodingHelper = new EncodingHelper(_mediaEncoder, _fileSystem, _subtitleEncoder, _configuration); } /// /// Gets a video hls playlist stream. /// /// The item id. /// The video container. Possible values are: ts, webm, asf, wmv, ogv, mp4, m4v, mkv, mpeg, mpg, avi, 3gp, wmv, wtv, m2ts, mov, iso, flv. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Enable adaptive bitrate streaming. /// Video stream returned. /// A containing the playlist file. [HttpGet("/Videos/{itemId}/master.m3u8")] [HttpHead("/Videos/{itemId}/master.m3u8", Name = "HeadMasterHlsVideoPlaylist")] [ProducesResponseType(StatusCodes.Status200OK)] public async Task GetMasterHlsVideoPlaylist( [FromRoute] Guid itemId, [FromRoute] string? container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery, Required] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext context, [FromQuery] Dictionary streamOptions, [FromQuery] bool enableAdaptiveBitrateStreaming = true) { var isHeadRequest = Request.Method == System.Net.WebRequestMethods.Http.Head; var cancellationTokenSource = new CancellationTokenSource(); var streamingRequest = new HlsVideoRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context, StreamOptions = streamOptions, EnableAdaptiveBitrateStreaming = enableAdaptiveBitrateStreaming }; return await GetMasterPlaylistInternal(streamingRequest, isHeadRequest, enableAdaptiveBitrateStreaming, cancellationTokenSource) .ConfigureAwait(false); } /// /// Gets an audio hls playlist stream. /// /// The item id. /// The video container. Possible values are: ts, webm, asf, wmv, ogv, mp4, m4v, mkv, mpeg, mpg, avi, 3gp, wmv, wtv, m2ts, mov, iso, flv. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Enable adaptive bitrate streaming. /// Audio stream returned. /// A containing the playlist file. [HttpGet("/Audio/{itemId}/master.m3u8")] [HttpHead("/Audio/{itemId}/master.m3u8", Name = "HeadMasterHlsAudioPlaylist")] [ProducesResponseType(StatusCodes.Status200OK)] public async Task GetMasterHlsAudioPlaylist( [FromRoute] Guid itemId, [FromRoute] string? container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery, Required] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext context, [FromQuery] Dictionary streamOptions, [FromQuery] bool enableAdaptiveBitrateStreaming = true) { var isHeadRequest = Request.Method == System.Net.WebRequestMethods.Http.Head; var cancellationTokenSource = new CancellationTokenSource(); var streamingRequest = new HlsAudioRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context, StreamOptions = streamOptions, EnableAdaptiveBitrateStreaming = enableAdaptiveBitrateStreaming }; return await GetMasterPlaylistInternal(streamingRequest, isHeadRequest, enableAdaptiveBitrateStreaming, cancellationTokenSource) .ConfigureAwait(false); } /// /// Gets a video stream using HTTP live streaming. /// /// The item id. /// The video container. Possible values are: ts, webm, asf, wmv, ogv, mp4, m4v, mkv, mpeg, mpg, avi, 3gp, wmv, wtv, m2ts, mov, iso, flv. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Video stream returned. /// A containing the audio file. [HttpGet("/Videos/{itemId}/main.m3u8")] [ProducesResponseType(StatusCodes.Status200OK)] public async Task GetVariantHlsVideoPlaylist( [FromRoute] Guid itemId, [FromRoute] string? container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext context, [FromQuery] Dictionary streamOptions) { var cancellationTokenSource = new CancellationTokenSource(); var streamingRequest = new VideoRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context, StreamOptions = streamOptions }; return await GetVariantPlaylistInternal(streamingRequest, "main", cancellationTokenSource) .ConfigureAwait(false); } /// /// Gets an audio stream using HTTP live streaming. /// /// The item id. /// The video container. Possible values are: ts, webm, asf, wmv, ogv, mp4, m4v, mkv, mpeg, mpg, avi, 3gp, wmv, wtv, m2ts, mov, iso, flv. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Audio stream returned. /// A containing the audio file. [HttpGet("/Audio/{itemId}/main.m3u8")] [ProducesResponseType(StatusCodes.Status200OK)] public async Task GetVariantHlsAudioPlaylist( [FromRoute] Guid itemId, [FromRoute] string? container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext context, [FromQuery] Dictionary streamOptions) { var cancellationTokenSource = new CancellationTokenSource(); var streamingRequest = new StreamingRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context, StreamOptions = streamOptions }; return await GetVariantPlaylistInternal(streamingRequest, "main", cancellationTokenSource) .ConfigureAwait(false); } /// /// Gets a video stream using HTTP live streaming. /// /// The item id. /// The playlist id. /// The segment id. /// The video container. Possible values are: ts, webm, asf, wmv, ogv, mp4, m4v, mkv, mpeg, mpg, avi, 3gp, wmv, wtv, m2ts, mov, iso, flv. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Video stream returned. /// A containing the audio file. [HttpGet("/Videos/{itemId}/hls1/{playlistId}/{segmentId}.{container}")] [ProducesResponseType(StatusCodes.Status200OK)] [SuppressMessage("Microsoft.Performance", "CA1801:ReviewUnusedParameters", MessageId = "playlistId", Justification = "Imported from ServiceStack")] public async Task GetHlsVideoSegment( [FromRoute] Guid itemId, [FromRoute] string playlistId, [FromRoute] int segmentId, [FromRoute] string container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext context, [FromQuery] Dictionary streamOptions) { var streamingRequest = new VideoRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context, StreamOptions = streamOptions }; return await GetDynamicSegment(streamingRequest, segmentId) .ConfigureAwait(false); } /// /// Gets a video stream using HTTP live streaming. /// /// The item id. /// The playlist id. /// The segment id. /// The video container. Possible values are: ts, webm, asf, wmv, ogv, mp4, m4v, mkv, mpeg, mpg, avi, 3gp, wmv, wtv, m2ts, mov, iso, flv. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment lenght. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Video stream returned. /// A containing the audio file. [HttpGet("/Audio/{itemId}/hls1/{playlistId}/{segmentId}.{container}")] [ProducesResponseType(StatusCodes.Status200OK)] [SuppressMessage("Microsoft.Performance", "CA1801:ReviewUnusedParameters", MessageId = "playlistId", Justification = "Imported from ServiceStack")] public async Task GetHlsAudioSegment( [FromRoute] Guid itemId, [FromRoute] string playlistId, [FromRoute] int segmentId, [FromRoute] string container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodingReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext context, [FromQuery] Dictionary streamOptions) { var streamingRequest = new StreamingRequestDto { Id = itemId, Container = container, Static = @static ?? true, Params = @params, Tag = tag, DeviceProfileId = deviceProfileId, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? true, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? true, DeInterlace = deInterlace ?? true, RequireNonAnamorphic = requireNonAnamorphic ?? true, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? true, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodingReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context, StreamOptions = streamOptions }; return await GetDynamicSegment(streamingRequest, segmentId) .ConfigureAwait(false); } private async Task GetMasterPlaylistInternal( StreamingRequestDto streamingRequest, bool isHeadRequest, bool enableAdaptiveBitrateStreaming, CancellationTokenSource cancellationTokenSource) { using var state = await StreamingHelpers.GetStreamingState( streamingRequest, Request, _authContext, _mediaSourceManager, _userManager, _libraryManager, _serverConfigurationManager, _mediaEncoder, _fileSystem, _subtitleEncoder, _configuration, _dlnaManager, _deviceManager, _transcodingJobHelper, _transcodingJobType, cancellationTokenSource.Token) .ConfigureAwait(false); Response.Headers.Add(HeaderNames.Expires, "0"); if (isHeadRequest) { return new FileContentResult(Array.Empty(), MimeTypes.GetMimeType("playlist.m3u8")); } var totalBitrate = state.OutputAudioBitrate ?? 0 + state.OutputVideoBitrate ?? 0; var builder = new StringBuilder(); builder.AppendLine("#EXTM3U"); var isLiveStream = state.IsSegmentedLiveStream; var queryString = Request.QueryString.ToString(); // from universal audio service if (queryString.IndexOf("SegmentContainer", StringComparison.OrdinalIgnoreCase) == -1 && !string.IsNullOrWhiteSpace(state.Request.SegmentContainer)) { queryString += "&SegmentContainer=" + state.Request.SegmentContainer; } // from universal audio service if (!string.IsNullOrWhiteSpace(state.Request.TranscodeReasons) && queryString.IndexOf("TranscodeReasons=", StringComparison.OrdinalIgnoreCase) == -1) { queryString += "&TranscodeReasons=" + state.Request.TranscodeReasons; } // Main stream var playlistUrl = isLiveStream ? "live.m3u8" : "main.m3u8"; playlistUrl += queryString; var subtitleStreams = state.MediaSource .MediaStreams .Where(i => i.IsTextSubtitleStream) .ToList(); var subtitleGroup = subtitleStreams.Count > 0 && (state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Hls || state.VideoRequest!.EnableSubtitlesInManifest) ? "subs" : null; // If we're burning in subtitles then don't add additional subs to the manifest if (state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode) { subtitleGroup = null; } if (!string.IsNullOrWhiteSpace(subtitleGroup)) { AddSubtitles(state, subtitleStreams, builder); } AppendPlaylist(builder, state, playlistUrl, totalBitrate, subtitleGroup); if (EnableAdaptiveBitrateStreaming(state, isLiveStream, enableAdaptiveBitrateStreaming)) { var requestedVideoBitrate = state.VideoRequest == null ? 0 : state.VideoRequest.VideoBitRate ?? 0; // By default, vary by just 200k var variation = GetBitrateVariation(totalBitrate); var newBitrate = totalBitrate - variation; var variantUrl = ReplaceBitrate(playlistUrl, requestedVideoBitrate, requestedVideoBitrate - variation); AppendPlaylist(builder, state, variantUrl, newBitrate, subtitleGroup); variation *= 2; newBitrate = totalBitrate - variation; variantUrl = ReplaceBitrate(playlistUrl, requestedVideoBitrate, requestedVideoBitrate - variation); AppendPlaylist(builder, state, variantUrl, newBitrate, subtitleGroup); } return new FileContentResult(Encoding.UTF8.GetBytes(builder.ToString()), MimeTypes.GetMimeType("playlist.m3u8")); } private async Task GetVariantPlaylistInternal(StreamingRequestDto streamingRequest, string name, CancellationTokenSource cancellationTokenSource) { using var state = await StreamingHelpers.GetStreamingState( streamingRequest, Request, _authContext, _mediaSourceManager, _userManager, _libraryManager, _serverConfigurationManager, _mediaEncoder, _fileSystem, _subtitleEncoder, _configuration, _dlnaManager, _deviceManager, _transcodingJobHelper, _transcodingJobType, cancellationTokenSource.Token) .ConfigureAwait(false); Response.Headers.Add(HeaderNames.Expires, "0"); var segmentLengths = GetSegmentLengths(state); var builder = new StringBuilder(); builder.AppendLine("#EXTM3U"); builder.AppendLine("#EXT-X-PLAYLIST-TYPE:VOD"); builder.AppendLine("#EXT-X-VERSION:3"); builder.AppendLine("#EXT-X-TARGETDURATION:" + Math.Ceiling(segmentLengths.Length > 0 ? segmentLengths.Max() : state.SegmentLength).ToString(CultureInfo.InvariantCulture)); builder.AppendLine("#EXT-X-MEDIA-SEQUENCE:0"); var queryString = Request.QueryString; var index = 0; var segmentExtension = GetSegmentFileExtension(streamingRequest.SegmentContainer); foreach (var length in segmentLengths) { builder.AppendLine("#EXTINF:" + length.ToString("0.0000", CultureInfo.InvariantCulture) + ", nodesc"); builder.AppendLine( string.Format( CultureInfo.InvariantCulture, "hls1/{0}/{1}{2}{3}", name, index.ToString(CultureInfo.InvariantCulture), segmentExtension, queryString)); index++; } builder.AppendLine("#EXT-X-ENDLIST"); return new FileContentResult(Encoding.UTF8.GetBytes(builder.ToString()), MimeTypes.GetMimeType("playlist.m3u8")); } private async Task GetDynamicSegment(StreamingRequestDto streamingRequest, int segmentId) { if ((streamingRequest.StartTimeTicks ?? 0) > 0) { throw new ArgumentException("StartTimeTicks is not allowed."); } var cancellationTokenSource = new CancellationTokenSource(); var cancellationToken = cancellationTokenSource.Token; using var state = await StreamingHelpers.GetStreamingState( streamingRequest, Request, _authContext, _mediaSourceManager, _userManager, _libraryManager, _serverConfigurationManager, _mediaEncoder, _fileSystem, _subtitleEncoder, _configuration, _dlnaManager, _deviceManager, _transcodingJobHelper, _transcodingJobType, cancellationTokenSource.Token) .ConfigureAwait(false); var playlistPath = Path.ChangeExtension(state.OutputFilePath, ".m3u8"); var segmentPath = GetSegmentPath(state, playlistPath, segmentId); var segmentExtension = GetSegmentFileExtension(state.Request.SegmentContainer); TranscodingJobDto? job; if (System.IO.File.Exists(segmentPath)) { job = _transcodingJobHelper.OnTranscodeBeginRequest(playlistPath, _transcodingJobType); _logger.LogDebug("returning {0} [it exists, try 1]", segmentPath); return await GetSegmentResult(state, playlistPath, segmentPath, segmentExtension, segmentId, job, cancellationToken).ConfigureAwait(false); } var transcodingLock = _transcodingJobHelper.GetTranscodingLock(playlistPath); await transcodingLock.WaitAsync(cancellationTokenSource.Token).ConfigureAwait(false); var released = false; var startTranscoding = false; try { if (System.IO.File.Exists(segmentPath)) { job = _transcodingJobHelper.OnTranscodeBeginRequest(playlistPath, _transcodingJobType); transcodingLock.Release(); released = true; _logger.LogDebug("returning {0} [it exists, try 2]", segmentPath); return await GetSegmentResult(state, playlistPath, segmentPath, segmentExtension, segmentId, job, cancellationToken).ConfigureAwait(false); } else { var currentTranscodingIndex = GetCurrentTranscodingIndex(playlistPath, segmentExtension); var segmentGapRequiringTranscodingChange = 24 / state.SegmentLength; if (currentTranscodingIndex == null) { _logger.LogDebug("Starting transcoding because currentTranscodingIndex=null"); startTranscoding = true; } else if (segmentId < currentTranscodingIndex.Value) { _logger.LogDebug("Starting transcoding because requestedIndex={0} and currentTranscodingIndex={1}", segmentId, currentTranscodingIndex); startTranscoding = true; } else if (segmentId - currentTranscodingIndex.Value > segmentGapRequiringTranscodingChange) { _logger.LogDebug("Starting transcoding because segmentGap is {0} and max allowed gap is {1}. requestedIndex={2}", segmentId - currentTranscodingIndex.Value, segmentGapRequiringTranscodingChange, segmentId); startTranscoding = true; } if (startTranscoding) { // If the playlist doesn't already exist, startup ffmpeg try { await _transcodingJobHelper.KillTranscodingJobs(streamingRequest.DeviceId, streamingRequest.PlaySessionId, p => false) .ConfigureAwait(false); if (currentTranscodingIndex.HasValue) { DeleteLastFile(playlistPath, segmentExtension, 0); } streamingRequest.StartTimeTicks = GetStartPositionTicks(state, segmentId); state.WaitForPath = segmentPath; var encodingOptions = _serverConfigurationManager.GetEncodingOptions(); job = await _transcodingJobHelper.StartFfMpeg( state, playlistPath, GetCommandLineArguments(playlistPath, encodingOptions, state, true, segmentId), Request, _transcodingJobType, cancellationTokenSource).ConfigureAwait(false); } catch { state.Dispose(); throw; } // await WaitForMinimumSegmentCount(playlistPath, 1, cancellationTokenSource.Token).ConfigureAwait(false); } else { job = _transcodingJobHelper.OnTranscodeBeginRequest(playlistPath, _transcodingJobType); if (job?.TranscodingThrottler != null) { await job.TranscodingThrottler.UnpauseTranscoding().ConfigureAwait(false); } } } } finally { if (!released) { transcodingLock.Release(); } } _logger.LogDebug("returning {0} [general case]", segmentPath); job ??= _transcodingJobHelper.OnTranscodeBeginRequest(playlistPath, _transcodingJobType); return await GetSegmentResult(state, playlistPath, segmentPath, segmentExtension, segmentId, job, cancellationToken).ConfigureAwait(false); } private void AddSubtitles(StreamState state, IEnumerable subtitles, StringBuilder builder) { var selectedIndex = state.SubtitleStream == null || state.SubtitleDeliveryMethod != SubtitleDeliveryMethod.Hls ? (int?)null : state.SubtitleStream.Index; const string Format = "#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID=\"subs\",NAME=\"{0}\",DEFAULT={1},FORCED={2},AUTOSELECT=YES,URI=\"{3}\",LANGUAGE=\"{4}\""; foreach (var stream in subtitles) { var name = stream.DisplayTitle; var isDefault = selectedIndex.HasValue && selectedIndex.Value == stream.Index; var isForced = stream.IsForced; var url = string.Format( CultureInfo.InvariantCulture, "{0}/Subtitles/{1}/subtitles.m3u8?SegmentLength={2}&api_key={3}", state.Request.MediaSourceId, stream.Index.ToString(CultureInfo.InvariantCulture), 30.ToString(CultureInfo.InvariantCulture), ClaimHelpers.GetToken(Request.HttpContext.User)); var line = string.Format( CultureInfo.InvariantCulture, Format, name, isDefault ? "YES" : "NO", isForced ? "YES" : "NO", url, stream.Language ?? "Unknown"); builder.AppendLine(line); } } private void AppendPlaylist(StringBuilder builder, StreamState state, string url, int bitrate, string? subtitleGroup) { builder.Append("#EXT-X-STREAM-INF:BANDWIDTH=") .Append(bitrate.ToString(CultureInfo.InvariantCulture)) .Append(",AVERAGE-BANDWIDTH=") .Append(bitrate.ToString(CultureInfo.InvariantCulture)); AppendPlaylistCodecsField(builder, state); AppendPlaylistResolutionField(builder, state); AppendPlaylistFramerateField(builder, state); if (!string.IsNullOrWhiteSpace(subtitleGroup)) { builder.Append(",SUBTITLES=\"") .Append(subtitleGroup) .Append('"'); } builder.Append(Environment.NewLine); builder.AppendLine(url); } /// /// Appends a CODECS field containing formatted strings of /// the active streams output video and audio codecs. /// /// /// /// /// StringBuilder to append the field to. /// StreamState of the current stream. private void AppendPlaylistCodecsField(StringBuilder builder, StreamState state) { // Video string videoCodecs = string.Empty; int? videoCodecLevel = GetOutputVideoCodecLevel(state); if (!string.IsNullOrEmpty(state.ActualOutputVideoCodec) && videoCodecLevel.HasValue) { videoCodecs = GetPlaylistVideoCodecs(state, state.ActualOutputVideoCodec, videoCodecLevel.Value); } // Audio string audioCodecs = string.Empty; if (!string.IsNullOrEmpty(state.ActualOutputAudioCodec)) { audioCodecs = GetPlaylistAudioCodecs(state); } StringBuilder codecs = new StringBuilder(); codecs.Append(videoCodecs) .Append(',') .Append(audioCodecs); if (codecs.Length > 1) { builder.Append(",CODECS=\"") .Append(codecs) .Append('"'); } } /// /// Appends a RESOLUTION field containing the resolution of the output stream. /// /// /// StringBuilder to append the field to. /// StreamState of the current stream. private void AppendPlaylistResolutionField(StringBuilder builder, StreamState state) { if (state.OutputWidth.HasValue && state.OutputHeight.HasValue) { builder.Append(",RESOLUTION=") .Append(state.OutputWidth.GetValueOrDefault()) .Append('x') .Append(state.OutputHeight.GetValueOrDefault()); } } /// /// Appends a FRAME-RATE field containing the framerate of the output stream. /// /// /// StringBuilder to append the field to. /// StreamState of the current stream. private void AppendPlaylistFramerateField(StringBuilder builder, StreamState state) { double? framerate = null; if (state.TargetFramerate.HasValue) { framerate = Math.Round(state.TargetFramerate.GetValueOrDefault(), 3); } else if (state.VideoStream?.RealFrameRate != null) { framerate = Math.Round(state.VideoStream.RealFrameRate.GetValueOrDefault(), 3); } if (framerate.HasValue) { builder.Append(",FRAME-RATE=") .Append(framerate.Value); } } private bool EnableAdaptiveBitrateStreaming(StreamState state, bool isLiveStream, bool enableAdaptiveBitrateStreaming) { // Within the local network this will likely do more harm than good. var ip = RequestHelpers.NormalizeIp(Request.HttpContext.Connection.RemoteIpAddress).ToString(); if (_networkManager.IsInLocalNetwork(ip)) { return false; } if (!enableAdaptiveBitrateStreaming) { return false; } if (isLiveStream || string.IsNullOrWhiteSpace(state.MediaPath)) { // Opening live streams is so slow it's not even worth it return false; } if (EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) { return false; } if (EncodingHelper.IsCopyCodec(state.OutputAudioCodec)) { return false; } if (!state.IsOutputVideo) { return false; } // Having problems in android return false; // return state.VideoRequest.VideoBitRate.HasValue; } /// /// Get the H.26X level of the output video stream. /// /// StreamState of the current stream. /// H.26X level of the output video stream. private int? GetOutputVideoCodecLevel(StreamState state) { string? levelString; if (EncodingHelper.IsCopyCodec(state.OutputVideoCodec) && state.VideoStream.Level.HasValue) { levelString = state.VideoStream?.Level.ToString(); } else { levelString = state.GetRequestedLevel(state.ActualOutputVideoCodec); } if (int.TryParse(levelString, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedLevel)) { return parsedLevel; } return null; } /// /// Gets a formatted string of the output audio codec, for use in the CODECS field. /// /// /// /// StreamState of the current stream. /// Formatted audio codec string. private string GetPlaylistAudioCodecs(StreamState state) { if (string.Equals(state.ActualOutputAudioCodec, "aac", StringComparison.OrdinalIgnoreCase)) { string? profile = state.GetRequestedProfiles("aac").FirstOrDefault(); return HlsCodecStringHelpers.GetAACString(profile); } if (string.Equals(state.ActualOutputAudioCodec, "mp3", StringComparison.OrdinalIgnoreCase)) { return HlsCodecStringHelpers.GetMP3String(); } if (string.Equals(state.ActualOutputAudioCodec, "ac3", StringComparison.OrdinalIgnoreCase)) { return HlsCodecStringHelpers.GetAC3String(); } if (string.Equals(state.ActualOutputAudioCodec, "eac3", StringComparison.OrdinalIgnoreCase)) { return HlsCodecStringHelpers.GetEAC3String(); } return string.Empty; } /// /// Gets a formatted string of the output video codec, for use in the CODECS field. /// /// /// /// StreamState of the current stream. /// Video codec. /// Video level. /// Formatted video codec string. private string GetPlaylistVideoCodecs(StreamState state, string codec, int level) { if (level == 0) { // This is 0 when there's no requested H.26X level in the device profile // and the source is not encoded in H.26X _logger.LogError("Got invalid H.26X level when building CODECS field for HLS master playlist"); return string.Empty; } if (string.Equals(codec, "h264", StringComparison.OrdinalIgnoreCase)) { string profile = state.GetRequestedProfiles("h264").FirstOrDefault(); return HlsCodecStringHelpers.GetH264String(profile, level); } if (string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase) || string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)) { string profile = state.GetRequestedProfiles("h265").FirstOrDefault(); return HlsCodecStringHelpers.GetH265String(profile, level); } return string.Empty; } private int GetBitrateVariation(int bitrate) { // By default, vary by just 50k var variation = 50000; if (bitrate >= 10000000) { variation = 2000000; } else if (bitrate >= 5000000) { variation = 1500000; } else if (bitrate >= 3000000) { variation = 1000000; } else if (bitrate >= 2000000) { variation = 500000; } else if (bitrate >= 1000000) { variation = 300000; } else if (bitrate >= 600000) { variation = 200000; } else if (bitrate >= 400000) { variation = 100000; } return variation; } private string ReplaceBitrate(string url, int oldValue, int newValue) { return url.Replace( "videobitrate=" + oldValue.ToString(CultureInfo.InvariantCulture), "videobitrate=" + newValue.ToString(CultureInfo.InvariantCulture), StringComparison.OrdinalIgnoreCase); } private double[] GetSegmentLengths(StreamState state) { var result = new List(); var ticks = state.RunTimeTicks ?? 0; var segmentLengthTicks = TimeSpan.FromSeconds(state.SegmentLength).Ticks; while (ticks > 0) { var length = ticks >= segmentLengthTicks ? segmentLengthTicks : ticks; result.Add(TimeSpan.FromTicks(length).TotalSeconds); ticks -= length; } return result.ToArray(); } private string GetCommandLineArguments(string outputPath, EncodingOptions encodingOptions, StreamState state, bool isEncoding, int startNumber) { var videoCodec = _encodingHelper.GetVideoEncoder(state, encodingOptions); var threads = _encodingHelper.GetNumberOfThreads(state, encodingOptions, videoCodec); if (state.BaseRequest.BreakOnNonKeyFrames) { // FIXME: this is actually a workaround, as ideally it really should be the client which decides whether non-keyframe // breakpoints are supported; but current implementation always uses "ffmpeg input seeking" which is liable // to produce a missing part of video stream before first keyframe is encountered, which may lead to // awkward cases like a few starting HLS segments having no video whatsoever, which breaks hls.js _logger.LogInformation("Current HLS implementation doesn't support non-keyframe breaks but one is requested, ignoring that request"); state.BaseRequest.BreakOnNonKeyFrames = false; } var inputModifier = _encodingHelper.GetInputModifier(state, encodingOptions); // If isEncoding is true we're actually starting ffmpeg var startNumberParam = isEncoding ? startNumber.ToString(CultureInfo.InvariantCulture) : "0"; var mapArgs = state.IsOutputVideo ? _encodingHelper.GetMapArgs(state) : string.Empty; var outputTsArg = Path.Combine(Path.GetDirectoryName(outputPath), Path.GetFileNameWithoutExtension(outputPath)) + "%d" + GetSegmentFileExtension(state.Request.SegmentContainer); var segmentFormat = GetSegmentFileExtension(state.Request.SegmentContainer).TrimStart('.'); if (string.Equals(segmentFormat, "ts", StringComparison.OrdinalIgnoreCase)) { segmentFormat = "mpegts"; } return string.Format( CultureInfo.InvariantCulture, "{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -copyts -avoid_negative_ts disabled -f hls -max_delay 5000000 -hls_time {6} -individual_header_trailer 0 -hls_segment_type {7} -start_number {8} -hls_segment_filename \"{9}\" -hls_playlist_type vod -hls_list_size 0 -y \"{10}\"", inputModifier, _encodingHelper.GetInputArgument(state, encodingOptions), threads, mapArgs, GetVideoArguments(state, encodingOptions, startNumber), GetAudioArguments(state, encodingOptions), state.SegmentLength.ToString(CultureInfo.InvariantCulture), segmentFormat, startNumberParam, outputTsArg, outputPath).Trim(); } private string GetAudioArguments(StreamState state, EncodingOptions encodingOptions) { var audioCodec = _encodingHelper.GetAudioEncoder(state); if (!state.IsOutputVideo) { if (EncodingHelper.IsCopyCodec(audioCodec)) { return "-acodec copy"; } var audioTranscodeParams = new List(); audioTranscodeParams.Add("-acodec " + audioCodec); if (state.OutputAudioBitrate.HasValue) { audioTranscodeParams.Add("-ab " + state.OutputAudioBitrate.Value.ToString(CultureInfo.InvariantCulture)); } if (state.OutputAudioChannels.HasValue) { audioTranscodeParams.Add("-ac " + state.OutputAudioChannels.Value.ToString(CultureInfo.InvariantCulture)); } if (state.OutputAudioSampleRate.HasValue) { audioTranscodeParams.Add("-ar " + state.OutputAudioSampleRate.Value.ToString(CultureInfo.InvariantCulture)); } audioTranscodeParams.Add("-vn"); return string.Join(" ", audioTranscodeParams.ToArray()); } if (EncodingHelper.IsCopyCodec(audioCodec)) { var videoCodec = _encodingHelper.GetVideoEncoder(state, encodingOptions); if (EncodingHelper.IsCopyCodec(videoCodec) && state.EnableBreakOnNonKeyFrames(videoCodec)) { return "-codec:a:0 copy -copypriorss:a:0 0"; } return "-codec:a:0 copy"; } var args = "-codec:a:0 " + audioCodec; var channels = state.OutputAudioChannels; if (channels.HasValue) { args += " -ac " + channels.Value; } var bitrate = state.OutputAudioBitrate; if (bitrate.HasValue) { args += " -ab " + bitrate.Value.ToString(CultureInfo.InvariantCulture); } if (state.OutputAudioSampleRate.HasValue) { args += " -ar " + state.OutputAudioSampleRate.Value.ToString(CultureInfo.InvariantCulture); } args += " " + _encodingHelper.GetAudioFilterParam(state, encodingOptions, true); return args; } private string GetVideoArguments(StreamState state, EncodingOptions encodingOptions, int startNumber) { if (!state.IsOutputVideo) { return string.Empty; } var codec = _encodingHelper.GetVideoEncoder(state, encodingOptions); var args = "-codec:v:0 " + codec; // if (state.EnableMpegtsM2TsMode) // { // args += " -mpegts_m2ts_mode 1"; // } // See if we can save come cpu cycles by avoiding encoding if (EncodingHelper.IsCopyCodec(codec)) { if (state.VideoStream != null && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase)) { string bitStreamArgs = _encodingHelper.GetBitStreamArgs(state.VideoStream); if (!string.IsNullOrEmpty(bitStreamArgs)) { args += " " + bitStreamArgs; } } // args += " -flags -global_header"; } else { var gopArg = string.Empty; var keyFrameArg = string.Format( CultureInfo.InvariantCulture, " -force_key_frames:0 \"expr:gte(t,{0}+n_forced*{1})\"", startNumber * state.SegmentLength, state.SegmentLength); var framerate = state.VideoStream?.RealFrameRate; if (framerate.HasValue) { // This is to make sure keyframe interval is limited to our segment, // as forcing keyframes is not enough. // Example: we encoded half of desired length, then codec detected // scene cut and inserted a keyframe; next forced keyframe would // be created outside of segment, which breaks seeking // -sc_threshold 0 is used to prevent the hardware encoder from post processing to break the set keyframe gopArg = string.Format( CultureInfo.InvariantCulture, " -g {0} -keyint_min {0} -sc_threshold 0", Math.Ceiling(state.SegmentLength * framerate.Value)); } args += " " + _encodingHelper.GetVideoQualityParam(state, codec, encodingOptions, "veryfast"); // Unable to force key frames using these hw encoders, set key frames by GOP if (string.Equals(codec, "h264_qsv", StringComparison.OrdinalIgnoreCase) || string.Equals(codec, "h264_nvenc", StringComparison.OrdinalIgnoreCase) || string.Equals(codec, "h264_amf", StringComparison.OrdinalIgnoreCase)) { args += " " + gopArg; } else { args += " " + keyFrameArg + gopArg; } // args += " -mixed-refs 0 -refs 3 -x264opts b_pyramid=0:weightb=0:weightp=0"; var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode; // This is for graphical subs if (hasGraphicalSubs) { args += _encodingHelper.GetGraphicalSubtitleParam(state, encodingOptions, codec); } // Add resolution params, if specified else { args += _encodingHelper.GetOutputSizeParam(state, encodingOptions, codec); } // -start_at_zero is necessary to use with -ss when seeking, // otherwise the target position cannot be determined. if (!(state.SubtitleStream != null && state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)) { args += " -start_at_zero"; } // args += " -flags -global_header"; } if (!string.IsNullOrEmpty(state.OutputVideoSync)) { args += " -vsync " + state.OutputVideoSync; } args += _encodingHelper.GetOutputFFlags(state); return args; } private string GetSegmentFileExtension(string? segmentContainer) { if (!string.IsNullOrWhiteSpace(segmentContainer)) { return "." + segmentContainer; } return ".ts"; } private string GetSegmentPath(StreamState state, string playlist, int index) { var folder = Path.GetDirectoryName(playlist); var filename = Path.GetFileNameWithoutExtension(playlist); return Path.Combine(folder, filename + index.ToString(CultureInfo.InvariantCulture) + GetSegmentFileExtension(state.Request.SegmentContainer)); } private async Task GetSegmentResult( StreamState state, string playlistPath, string segmentPath, string segmentExtension, int segmentIndex, TranscodingJobDto? transcodingJob, CancellationToken cancellationToken) { var segmentExists = System.IO.File.Exists(segmentPath); if (segmentExists) { if (transcodingJob != null && transcodingJob.HasExited) { // Transcoding job is over, so assume all existing files are ready _logger.LogDebug("serving up {0} as transcode is over", segmentPath); return GetSegmentResult(state, segmentPath, segmentIndex, transcodingJob); } var currentTranscodingIndex = GetCurrentTranscodingIndex(playlistPath, segmentExtension); // If requested segment is less than transcoding position, we can't transcode backwards, so assume it's ready if (segmentIndex < currentTranscodingIndex) { _logger.LogDebug("serving up {0} as transcode index {1} is past requested point {2}", segmentPath, currentTranscodingIndex, segmentIndex); return GetSegmentResult(state, segmentPath, segmentIndex, transcodingJob); } } var nextSegmentPath = GetSegmentPath(state, playlistPath, segmentIndex + 1); if (transcodingJob != null) { while (!cancellationToken.IsCancellationRequested && !transcodingJob.HasExited) { // To be considered ready, the segment file has to exist AND // either the transcoding job should be done or next segment should also exist if (segmentExists) { if (transcodingJob.HasExited || System.IO.File.Exists(nextSegmentPath)) { _logger.LogDebug("serving up {0} as it deemed ready", segmentPath); return GetSegmentResult(state, segmentPath, segmentIndex, transcodingJob); } } else { segmentExists = System.IO.File.Exists(segmentPath); if (segmentExists) { continue; // avoid unnecessary waiting if segment just became available } } await Task.Delay(100, cancellationToken).ConfigureAwait(false); } if (!System.IO.File.Exists(segmentPath)) { _logger.LogWarning("cannot serve {0} as transcoding quit before we got there", segmentPath); } else { _logger.LogDebug("serving {0} as it's on disk and transcoding stopped", segmentPath); } cancellationToken.ThrowIfCancellationRequested(); } else { _logger.LogWarning("cannot serve {0} as it doesn't exist and no transcode is running", segmentPath); } return GetSegmentResult(state, segmentPath, segmentIndex, transcodingJob); } private ActionResult GetSegmentResult(StreamState state, string segmentPath, int index, TranscodingJobDto? transcodingJob) { var segmentEndingPositionTicks = GetEndPositionTicks(state, index); Response.OnCompleted(() => { _logger.LogDebug("finished serving {0}", segmentPath); if (transcodingJob != null) { transcodingJob.DownloadPositionTicks = Math.Max(transcodingJob.DownloadPositionTicks ?? segmentEndingPositionTicks, segmentEndingPositionTicks); _transcodingJobHelper.OnTranscodeEndRequest(transcodingJob); } return Task.CompletedTask; }); return FileStreamResponseHelpers.GetStaticFileResult(segmentPath, MimeTypes.GetMimeType(segmentPath)!, false, this); } private long GetEndPositionTicks(StreamState state, int requestedIndex) { double startSeconds = 0; var lengths = GetSegmentLengths(state); if (requestedIndex >= lengths.Length) { var msg = string.Format( CultureInfo.InvariantCulture, "Invalid segment index requested: {0} - Segment count: {1}", requestedIndex, lengths.Length); throw new ArgumentException(msg); } for (var i = 0; i <= requestedIndex; i++) { startSeconds += lengths[i]; } return TimeSpan.FromSeconds(startSeconds).Ticks; } private int? GetCurrentTranscodingIndex(string playlist, string segmentExtension) { var job = _transcodingJobHelper.GetTranscodingJob(playlist, _transcodingJobType); if (job == null || job.HasExited) { return null; } var file = GetLastTranscodingFile(playlist, segmentExtension, _fileSystem); if (file == null) { return null; } var playlistFilename = Path.GetFileNameWithoutExtension(playlist); var indexString = Path.GetFileNameWithoutExtension(file.Name).Substring(playlistFilename.Length); return int.Parse(indexString, NumberStyles.Integer, CultureInfo.InvariantCulture); } private static FileSystemMetadata? GetLastTranscodingFile(string playlist, string segmentExtension, IFileSystem fileSystem) { var folder = Path.GetDirectoryName(playlist); var filePrefix = Path.GetFileNameWithoutExtension(playlist) ?? string.Empty; try { return fileSystem.GetFiles(folder, new[] { segmentExtension }, true, false) .Where(i => Path.GetFileNameWithoutExtension(i.Name).StartsWith(filePrefix, StringComparison.OrdinalIgnoreCase)) .OrderByDescending(fileSystem.GetLastWriteTimeUtc) .FirstOrDefault(); } catch (IOException) { return null; } } private void DeleteLastFile(string playlistPath, string segmentExtension, int retryCount) { var file = GetLastTranscodingFile(playlistPath, segmentExtension, _fileSystem); if (file != null) { DeleteFile(file.FullName, retryCount); } } private void DeleteFile(string path, int retryCount) { if (retryCount >= 5) { return; } _logger.LogDebug("Deleting partial HLS file {path}", path); try { _fileSystem.DeleteFile(path); } catch (IOException ex) { _logger.LogError(ex, "Error deleting partial stream file(s) {path}", path); var task = Task.Delay(100); Task.WaitAll(task); DeleteFile(path, retryCount + 1); } catch (Exception ex) { _logger.LogError(ex, "Error deleting partial stream file(s) {path}", path); } } private long GetStartPositionTicks(StreamState state, int requestedIndex) { double startSeconds = 0; var lengths = GetSegmentLengths(state); if (requestedIndex >= lengths.Length) { var msg = string.Format( CultureInfo.InvariantCulture, "Invalid segment index requested: {0} - Segment count: {1}", requestedIndex, lengths.Length); throw new ArgumentException(msg); } for (var i = 0; i < requestedIndex; i++) { startSeconds += lengths[i]; } var position = TimeSpan.FromSeconds(startSeconds).Ticks; return position; } } }