Merge branch 'master' into sort-nfo-data

This commit is contained in:
Marc Brooks 2024-03-18 14:48:56 -05:00 committed by GitHub
commit d04f255e71
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
137 changed files with 788 additions and 4037 deletions

View File

@ -1,93 +0,0 @@
parameters:
- name: Packages
type: object
default: {}
- name: LinuxImage
type: string
default: "ubuntu-latest"
- name: DotNetSdkVersion
type: string
default: 8.0.x
jobs:
- job: CompatibilityCheck
displayName: Compatibility Check
dependsOn: Build
condition: and(succeeded(), variables['System.PullRequest.PullRequestNumber'])
pool:
vmImage: "${{ parameters.LinuxImage }}"
strategy:
matrix:
${{ each Package in parameters.Packages }}:
${{ Package.key }}:
NugetPackageName: ${{ Package.value.NugetPackageName }}
AssemblyFileName: ${{ Package.value.AssemblyFileName }}
maxParallel: 2
steps:
- checkout: none
- task: UseDotNet@2
displayName: "Update DotNet"
inputs:
packageType: sdk
version: ${{ parameters.DotNetSdkVersion }}
- task: DotNetCoreCLI@2
displayName: 'Install ABI CompatibilityChecker Tool'
inputs:
command: custom
custom: tool
arguments: 'update compatibilitychecker -g'
- task: DownloadPipelineArtifact@2
displayName: 'Download New Assembly Build Artifact'
inputs:
source: 'current'
artifact: "$(NugetPackageName)"
path: "$(System.ArtifactsDirectory)/new-artifacts"
runVersion: "latest"
- task: CopyFiles@2
displayName: 'Copy New Assembly Build Artifact'
inputs:
sourceFolder: $(System.ArtifactsDirectory)/new-artifacts
contents: '**/*.dll'
targetFolder: $(System.ArtifactsDirectory)/new-release
cleanTargetFolder: true
overWrite: true
flattenFolders: true
- task: DownloadPipelineArtifact@2
displayName: 'Download Reference Assembly Build Artifact'
enabled: false
inputs:
source: "specific"
artifact: "$(NugetPackageName)"
path: "$(System.ArtifactsDirectory)/current-artifacts"
project: "$(System.TeamProjectId)"
pipeline: "$(System.DefinitionId)"
runVersion: "latestFromBranch"
runBranch: "refs/heads/$(System.PullRequest.TargetBranch)"
- task: CopyFiles@2
displayName: 'Copy Reference Assembly Build Artifact'
enabled: false
inputs:
sourceFolder: $(System.ArtifactsDirectory)/current-artifacts
contents: '**/*.dll'
targetFolder: $(System.ArtifactsDirectory)/current-release
cleanTargetFolder: true
overWrite: true
flattenFolders: true
- task: DotNetCoreCLI@2
displayName: 'Execute ABI Compatibility Check Tool'
enabled: false
inputs:
command: custom
custom: compat
arguments: 'current-release/$(AssemblyFileName) new-release/$(AssemblyFileName) --azure-pipelines --warnings-only'
workingDirectory: $(System.ArtifactsDirectory)

View File

@ -1,71 +0,0 @@
parameters:
LinuxImage: 'ubuntu-latest'
RestoreBuildProjects: 'Jellyfin.Server/Jellyfin.Server.csproj'
DotNetSdkVersion: 8.0.x
jobs:
- job: Build
displayName: Build
strategy:
matrix:
Release:
BuildConfiguration: Release
Debug:
BuildConfiguration: Debug
pool:
vmImage: '${{ parameters.LinuxImage }}'
steps:
- checkout: self
clean: true
submodules: true
persistCredentials: true
- task: UseDotNet@2
displayName: 'Update DotNet'
inputs:
packageType: sdk
version: ${{ parameters.DotNetSdkVersion }}
- task: DotNetCoreCLI@2
displayName: 'Publish Server'
inputs:
command: publish
publishWebProjects: false
projects: '${{ parameters.RestoreBuildProjects }}'
arguments: '--configuration $(BuildConfiguration) --output $(Build.ArtifactStagingDirectory)'
zipAfterPublish: false
- task: PublishPipelineArtifact@1
displayName: 'Publish Artifact Naming'
condition: and(succeeded(), eq(variables['BuildConfiguration'], 'Release'))
inputs:
targetPath: '$(build.ArtifactStagingDirectory)/Jellyfin.Server/Emby.Naming.dll'
artifactName: 'Jellyfin.Naming'
- task: PublishPipelineArtifact@1
displayName: 'Publish Artifact Controller'
condition: and(succeeded(), eq(variables['BuildConfiguration'], 'Release'))
inputs:
targetPath: '$(build.ArtifactStagingDirectory)/Jellyfin.Server/MediaBrowser.Controller.dll'
artifactName: 'Jellyfin.Controller'
- task: PublishPipelineArtifact@1
displayName: 'Publish Artifact Model'
condition: and(succeeded(), eq(variables['BuildConfiguration'], 'Release'))
inputs:
targetPath: '$(build.ArtifactStagingDirectory)/Jellyfin.Server/MediaBrowser.Model.dll'
artifactName: 'Jellyfin.Model'
- task: PublishPipelineArtifact@1
displayName: 'Publish Artifact Common'
condition: and(succeeded(), eq(variables['BuildConfiguration'], 'Release'))
inputs:
targetPath: '$(build.ArtifactStagingDirectory)/Jellyfin.Server/MediaBrowser.Common.dll'
artifactName: 'Jellyfin.Common'
- task: PublishPipelineArtifact@1
displayName: 'Publish Artifact Extensions'
condition: and(succeeded(), eq(variables['BuildConfiguration'], 'Release'))
inputs:
targetPath: '$(build.ArtifactStagingDirectory)/Jellyfin.Server/Jellyfin.Extensions.dll'
artifactName: 'Jellyfin.Extensions'

View File

@ -1,274 +0,0 @@
jobs:
- job: BuildPackage
displayName: 'Build Packages'
strategy:
matrix:
CentOS.amd64:
BuildConfiguration: centos.amd64
Fedora.amd64:
BuildConfiguration: fedora.amd64
Debian.amd64:
BuildConfiguration: debian.amd64
Debian.arm64:
BuildConfiguration: debian.arm64
Debian.armhf:
BuildConfiguration: debian.armhf
Ubuntu.amd64:
BuildConfiguration: ubuntu.amd64
Ubuntu.arm64:
BuildConfiguration: ubuntu.arm64
Ubuntu.armhf:
BuildConfiguration: ubuntu.armhf
Linux.amd64:
BuildConfiguration: linux.amd64
Linux.amd64-musl:
BuildConfiguration: linux.amd64-musl
Linux.arm64:
BuildConfiguration: linux.arm64
Linux.musl-linux-arm64:
BuildConfiguration: linux.musl-linux-arm64
Linux.armhf:
BuildConfiguration: linux.armhf
Windows.amd64:
BuildConfiguration: windows.amd64
MacOS.amd64:
BuildConfiguration: macos.amd64
MacOS.arm64:
BuildConfiguration: macos.arm64
Portable:
BuildConfiguration: portable
pool:
vmImage: 'ubuntu-latest'
steps:
- script: echo "##vso[task.setvariable variable=JellyfinVersion]$( awk -F '/' '{ print $NF }' <<<'$(Build.SourceBranch)' | sed 's/^v//' )"
displayName: Set release version (stable)
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
- script: 'docker build -f deployment/Dockerfile.$(BuildConfiguration) -t jellyfin-server-$(BuildConfiguration) --label "org.opencontainers.image.url=$(Build.Repository.Uri)" --label "org.opencontainers.image.revision=$(Build.SourceVersion)" deployment'
displayName: 'Build Dockerfile'
- script: 'docker image ls -a && docker run -v $(pwd)/deployment/dist:/dist -v $(pwd):/jellyfin -e IS_UNSTABLE="yes" -e BUILD_ID=$(Build.BuildNumber) jellyfin-server-$(BuildConfiguration)'
displayName: 'Run Dockerfile (unstable)'
condition: startsWith(variables['Build.SourceBranch'], 'refs/heads/master')
- script: 'docker image ls -a && docker run -v $(pwd)/deployment/dist:/dist -v $(pwd):/jellyfin -e IS_UNSTABLE="no" -e BUILD_ID=$(Build.BuildNumber) jellyfin-server-$(BuildConfiguration)'
displayName: 'Run Dockerfile (stable)'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
- task: PublishPipelineArtifact@1
displayName: 'Publish Release'
inputs:
targetPath: '$(Build.SourcesDirectory)/deployment/dist'
artifactName: 'jellyfin-server-$(BuildConfiguration)'
- task: SSH@0
displayName: 'Create target directory on repository server'
inputs:
sshEndpoint: repository
runOptions: 'inline'
inline: 'mkdir -p /srv/repository/incoming/azure/$(Build.BuildNumber)/$(BuildConfiguration)'
- task: CopyFilesOverSSH@0
displayName: 'Upload artifacts to repository server'
inputs:
sshEndpoint: repository
sourceFolder: '$(Build.SourcesDirectory)/deployment/dist'
contents: '**'
targetFolder: '/srv/repository/incoming/azure/$(Build.BuildNumber)/$(BuildConfiguration)'
- job: OpenAPISpec
dependsOn: Test
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/heads/master'),startsWith(variables['Build.SourceBranch'], 'refs/tags/v'))
displayName: 'Push OpenAPI Spec to repository'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: echo "##vso[task.setvariable variable=JellyfinVersion]$( awk -F '/' '{ print $NF }' <<<'$(Build.SourceBranch)' | sed 's/^v//' )"
displayName: Set release version (stable)
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
- task: DownloadPipelineArtifact@2
displayName: 'Download OpenAPI Spec'
inputs:
source: 'current'
artifact: "OpenAPI Spec"
path: "$(System.ArtifactsDirectory)/openapispec"
runVersion: "latest"
- task: SSH@0
displayName: 'Create target directory on repository server'
inputs:
sshEndpoint: repository
runOptions: 'inline'
inline: 'mkdir -p /srv/repository/incoming/azure/$(Build.BuildNumber)'
- task: CopyFilesOverSSH@0
displayName: 'Upload artifacts to repository server'
inputs:
sshEndpoint: repository
sourceFolder: '$(System.ArtifactsDirectory)/openapispec'
contents: 'openapi.json'
targetFolder: '/srv/repository/incoming/azure/$(Build.BuildNumber)'
- job: BuildDocker
displayName: 'Build Docker'
strategy:
matrix:
amd64:
BuildConfiguration: amd64
arm64:
BuildConfiguration: arm64
armhf:
BuildConfiguration: armhf
pool:
vmImage: 'ubuntu-latest'
variables:
- name: JellyfinVersion
value: 0.0.0
steps:
- script: echo "##vso[task.setvariable variable=JellyfinVersion]$( awk -F '/' '{ print $NF }' <<<'$(Build.SourceBranch)' | sed 's/^v//' )"
displayName: Set release version (stable)
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
- task: Docker@2
displayName: 'Push Unstable Image'
condition: startsWith(variables['Build.SourceBranch'], 'refs/heads/master')
inputs:
repository: 'jellyfin/jellyfin-server'
command: buildAndPush
buildContext: '.'
Dockerfile: 'deployment/Dockerfile.docker.$(BuildConfiguration)'
containerRegistry: Docker Hub
tags: |
unstable-$(Build.BuildNumber)-$(BuildConfiguration)
unstable-$(BuildConfiguration)
- task: Docker@2
displayName: 'Push Stable Image'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
inputs:
repository: 'jellyfin/jellyfin-server'
command: buildAndPush
buildContext: '.'
Dockerfile: 'deployment/Dockerfile.docker.$(BuildConfiguration)'
containerRegistry: Docker Hub
tags: |
stable-$(Build.BuildNumber)-$(BuildConfiguration)
$(JellyfinVersion)-$(BuildConfiguration)
- job: CollectArtifacts
timeoutInMinutes: 20
displayName: 'Collect Artifacts'
condition: succeededOrFailed()
continueOnError: true
dependsOn:
- BuildPackage
- BuildDocker
pool:
vmImage: 'ubuntu-latest'
steps:
- task: SSH@0
displayName: 'Update Unstable Repository'
continueOnError: true
condition: startsWith(variables['Build.SourceBranch'], 'refs/heads/master')
inputs:
sshEndpoint: repository
runOptions: 'commands'
commands: nohup sudo /srv/repository/collect-server.azure.sh /srv/repository/incoming/azure $(Build.BuildNumber) unstable &
- task: SSH@0
displayName: 'Update Stable Repository'
continueOnError: true
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
inputs:
sshEndpoint: repository
runOptions: 'commands'
commands: nohup sudo /srv/repository/collect-server.azure.sh /srv/repository/incoming/azure $(Build.BuildNumber) $(Build.SourceBranch) &
- job: PublishNuget
displayName: 'Publish NuGet packages'
pool:
vmImage: 'ubuntu-latest'
variables:
- name: JellyfinVersion
value: $[replace(variables['Build.SourceBranch'],'refs/tags/v','')]
steps:
- task: UseDotNet@2
displayName: 'Use .NET 8.0 sdk'
inputs:
packageType: 'sdk'
version: '8.0.x'
- task: DotNetCoreCLI@2
displayName: 'Build Stable Nuget packages'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
inputs:
command: 'custom'
projects: |
Jellyfin.Data/Jellyfin.Data.csproj
MediaBrowser.Common/MediaBrowser.Common.csproj
MediaBrowser.Controller/MediaBrowser.Controller.csproj
MediaBrowser.Model/MediaBrowser.Model.csproj
Emby.Naming/Emby.Naming.csproj
src/Jellyfin.Extensions/Jellyfin.Extensions.csproj
custom: 'pack'
arguments: -o $(Build.ArtifactStagingDirectory) -p:Version=$(JellyfinVersion)
- task: DotNetCoreCLI@2
displayName: 'Build Unstable Nuget packages'
condition: startsWith(variables['Build.SourceBranch'], 'refs/heads/master')
inputs:
command: 'custom'
projects: |
Jellyfin.Data/Jellyfin.Data.csproj
MediaBrowser.Common/MediaBrowser.Common.csproj
MediaBrowser.Controller/MediaBrowser.Controller.csproj
MediaBrowser.Model/MediaBrowser.Model.csproj
Emby.Naming/Emby.Naming.csproj
src/Jellyfin.Extensions/Jellyfin.Extensions.csproj
custom: 'pack'
arguments: '--version-suffix $(Build.BuildNumber) -o $(Build.ArtifactStagingDirectory) -p:Stability=Unstable'
- task: PublishBuildArtifacts@1
displayName: 'Publish Nuget packages'
inputs:
pathToPublish: $(Build.ArtifactStagingDirectory)
artifactName: Jellyfin Nuget Packages
- task: NuGetCommand@2
displayName: 'Push Nuget packages to stable feed'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags/v')
inputs:
command: 'push'
packagesToPush: '$(Build.ArtifactStagingDirectory)/**/*.nupkg'
nuGetFeedType: 'external'
publishFeedCredentials: 'NugetOrg'
allowPackageConflicts: true # This ignores an error if the version already exists
- task: NuGetAuthenticate@1
displayName: 'Authenticate to unstable Nuget feed'
condition: startsWith(variables['Build.SourceBranch'], 'refs/heads/master')
- task: NuGetCommand@2
displayName: 'Push Nuget packages to unstable feed'
condition: startsWith(variables['Build.SourceBranch'], 'refs/heads/master')
inputs:
command: 'push'
packagesToPush: '$(Build.ArtifactStagingDirectory)/**/*.nupkg;!$(Build.ArtifactStagingDirectory)/**/*.symbols.nupkg' # No symbols since Azure Artifact does not support it
nuGetFeedType: 'internal'
publishVstsFeed: '7cce6c46-d610-45e3-9fb7-65a6bfd1b671/a5746b79-f369-42db-93ff-59cd066f9327'
allowPackageConflicts: true # This ignores an error if the version already exists

View File

@ -1,98 +0,0 @@
parameters:
- name: ImageNames
type: object
default:
Linux: "ubuntu-latest"
Windows: "windows-latest"
macOS: "macos-latest"
- name: TestProjects
type: string
default: "tests/**/*Tests.csproj"
- name: DotNetSdkVersion
type: string
default: 8.0.x
jobs:
- job: Test
displayName: Test
strategy:
matrix:
${{ each imageName in parameters.ImageNames }}:
${{ imageName.key }}:
ImageName: ${{ imageName.value }}
pool:
vmImage: "$(ImageName)"
steps:
- checkout: self
clean: true
submodules: true
persistCredentials: false
# This is required for the SonarCloud analyzer
- task: UseDotNet@2
displayName: "Install .NET SDK 5.x"
condition: eq(variables['ImageName'], 'ubuntu-latest')
inputs:
packageType: sdk
version: '5.x'
- task: UseDotNet@2
displayName: "Update DotNet"
inputs:
packageType: sdk
version: ${{ parameters.DotNetSdkVersion }}
- task: SonarCloudPrepare@1
displayName: 'Prepare analysis on SonarCloud'
condition: eq(variables['ImageName'], 'ubuntu-latest')
enabled: false
inputs:
SonarCloud: 'Sonarcloud for Jellyfin'
organization: 'jellyfin'
projectKey: 'jellyfin_jellyfin'
- task: DotNetCoreCLI@2
displayName: 'Run CLI Tests'
inputs:
command: "test"
projects: ${{ parameters.TestProjects }}
arguments: '--configuration Release --collect:"XPlat Code Coverage" --settings tests/coverletArgs.runsettings --verbosity minimal'
publishTestResults: true
testRunTitle: $(Agent.JobName)
workingDirectory: "$(Build.SourcesDirectory)"
- task: SonarCloudAnalyze@1
displayName: 'Run Code Analysis'
condition: eq(variables['ImageName'], 'ubuntu-latest')
enabled: false
- task: SonarCloudPublish@1
displayName: 'Publish Quality Gate Result'
condition: eq(variables['ImageName'], 'ubuntu-latest')
enabled: false
- task: Palmmedia.reportgenerator.reportgenerator-build-release-task.reportgenerator@4
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) # !! THIS is for V1 only V2 will/should support merging
displayName: 'Run ReportGenerator'
inputs:
reports: "$(Agent.TempDirectory)/**/coverage.cobertura.xml"
targetdir: "$(Agent.TempDirectory)/merged/"
reporttypes: "Cobertura"
## V2 is already in the repository but it does not work "wrong number of segments" YAML error.
- task: PublishCodeCoverageResults@1
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) # !! THIS is for V1 only V2 will/should support merging
displayName: 'Publish Code Coverage'
inputs:
codeCoverageTool: "cobertura"
#summaryFileLocation: '$(Agent.TempDirectory)/**/coverage.cobertura.xml' # !!THIS IS FOR V2
summaryFileLocation: "$(Agent.TempDirectory)/merged/**.xml"
pathToSources: $(Build.SourcesDirectory)
failIfCoverageEmpty: true
- task: PublishPipelineArtifact@1
displayName: 'Publish OpenAPI Artifact'
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
inputs:
targetPath: "tests/Jellyfin.Server.Integration.Tests/bin/Release/net8.0/openapi.json"
artifactName: 'OpenAPI Spec'

View File

@ -1,64 +0,0 @@
name: $(Date:yyyyMMdd)$(Rev:.r)
variables:
- name: TestProjects
value: 'tests/**/*Tests.csproj'
- name: RestoreBuildProjects
value: 'Jellyfin.Server/Jellyfin.Server.csproj'
pr:
autoCancel: true
trigger:
batch: true
branches:
include:
- '*'
tags:
include:
- 'v*'
jobs:
- ${{ if not(startsWith(variables['Build.SourceBranch'], 'refs/tags/v')) }}:
- template: azure-pipelines-main.yml
parameters:
LinuxImage: 'ubuntu-latest'
RestoreBuildProjects: $(RestoreBuildProjects)
- ${{ if not(or(startsWith(variables['Build.SourceBranch'], 'refs/tags/v'), startsWith(variables['Build.SourceBranch'], 'refs/heads/master'))) }}:
- template: azure-pipelines-test.yml
parameters:
ImageNames:
Linux: 'ubuntu-latest'
Windows: 'windows-latest'
macOS: 'macos-latest'
- ${{ if or(startsWith(variables['Build.SourceBranch'], 'refs/tags/v'), startsWith(variables['Build.SourceBranch'], 'refs/heads/master')) }}:
- template: azure-pipelines-test.yml
parameters:
ImageNames:
Linux: 'ubuntu-latest'
- ${{ if not(or(startsWith(variables['Build.SourceBranch'], 'refs/tags/v'), startsWith(variables['Build.SourceBranch'], 'refs/heads/master'))) }}:
- template: azure-pipelines-abi.yml
parameters:
Packages:
Naming:
NugetPackageName: Jellyfin.Naming
AssemblyFileName: Emby.Naming.dll
Controller:
NugetPackageName: Jellyfin.Controller
AssemblyFileName: MediaBrowser.Controller.dll
Model:
NugetPackageName: Jellyfin.Model
AssemblyFileName: MediaBrowser.Model.dll
Common:
NugetPackageName: Jellyfin.Common
AssemblyFileName: MediaBrowser.Common.dll
Extensions:
NugetPackageName: Jellyfin.Extensions
AssemblyFileName: Jellyfin.Extensions.dll
LinuxImage: 'ubuntu-latest'
- ${{ if or(startsWith(variables['Build.SourceBranch'], 'refs/tags/v'), startsWith(variables['Build.SourceBranch'], 'refs/heads/master')) }}:
- template: azure-pipelines-package.yml

View File

@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"dotnet-ef": {
"version": "8.0.2",
"version": "8.0.3",
"commands": [
"dotnet-ef"
]

1
.copr
View File

@ -1 +0,0 @@
fedora

View File

@ -27,11 +27,11 @@ jobs:
dotnet-version: '8.0.x'
- name: Initialize CodeQL
uses: github/codeql-action/init@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
uses: github/codeql-action/init@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8
with:
languages: ${{ matrix.language }}
queries: +security-extended
- name: Autobuild
uses: github/codeql-action/autobuild@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
uses: github/codeql-action/autobuild@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
uses: github/codeql-action/analyze@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8

View File

@ -34,7 +34,7 @@ jobs:
--verbosity minimal
- name: Merge code coverage results
uses: danielpalme/ReportGenerator-GitHub-Action@b067e0c5d288fb4277b9f397b2dc6013f60381f0 # 5.2.2
uses: danielpalme/ReportGenerator-GitHub-Action@7a0988e399533f3680a732dceda1a967cafdafcd # 5.2.3
with:
reports: "**/coverage.cobertura.xml"
targetdir: "merged/"

View File

@ -0,0 +1,29 @@
name: Check Issue Template
on:
issues:
types:
- opened
jobs:
check_issue:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- name: pull in script
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
repository: jellyfin/jellyfin-triage-script
- name: install python
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with:
python-version: '3.12'
cache: 'pip'
- name: install python packages
run: pip install -r main-repo-triage/requirements.txt
- name: check and comment issue
working-directory: ./main-repo-triage
run: python3 single_issue_gha.py
env:
GH_TOKEN: ${{ secrets.JF_BOT_TOKEN }}
GH_REPO: ${{ github.repository }}
ISSUE: ${{ github.event.issue.number }}

View File

@ -180,6 +180,7 @@
_ [Barasingha](https://github.com/MaVdbussche)
- [Gauvino](https://github.com/Gauvino)
- [felix920506](https://github.com/felix920506)
- [btopherjohnson](https://github.com/btopherjohnson)
# Emby Contributors

View File

@ -8,11 +8,11 @@
<PackageVersion Include="AutoFixture.AutoMoq" Version="4.18.1" />
<PackageVersion Include="AutoFixture.Xunit2" Version="4.18.1" />
<PackageVersion Include="AutoFixture" Version="4.18.1" />
<PackageVersion Include="BDInfo" Version="0.7.6.2" />
<PackageVersion Include="BDInfo" Version="0.8.0" />
<PackageVersion Include="BlurHashSharp.SkiaSharp" Version="1.3.2" />
<PackageVersion Include="BlurHashSharp" Version="1.3.2" />
<PackageVersion Include="CommandLineParser" Version="2.9.1" />
<PackageVersion Include="coverlet.collector" Version="6.0.1" />
<PackageVersion Include="coverlet.collector" Version="6.0.2" />
<PackageVersion Include="Diacritics" Version="3.3.27" />
<PackageVersion Include="DiscUtils.Udf" Version="0.16.13" />
<PackageVersion Include="DotNet.Glob" Version="3.1.3" />
@ -24,28 +24,28 @@
<PackageVersion Include="libse" Version="3.6.13" />
<PackageVersion Include="LrcParser" Version="2023.524.0" />
<PackageVersion Include="MetaBrainz.MusicBrainz" Version="6.1.0" />
<PackageVersion Include="Microsoft.AspNetCore.Authorization" Version="8.0.2" />
<PackageVersion Include="Microsoft.AspNetCore.Authorization" Version="8.0.3" />
<PackageVersion Include="Microsoft.AspNetCore.HttpOverrides" Version="2.2.0" />
<PackageVersion Include="Microsoft.AspNetCore.Mvc.Testing" Version="8.0.2" />
<PackageVersion Include="Microsoft.AspNetCore.Mvc.Testing" Version="8.0.3" />
<PackageVersion Include="Microsoft.CodeAnalysis.BannedApiAnalyzers" Version="3.3.4" />
<PackageVersion Include="Microsoft.Data.Sqlite" Version="8.0.2" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Design" Version="8.0.2" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Relational" Version="8.0.2" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.2" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.2" />
<PackageVersion Include="Microsoft.Data.Sqlite" Version="8.0.3" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Design" Version="8.0.3" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Relational" Version="8.0.3" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Sqlite" Version="8.0.3" />
<PackageVersion Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.3" />
<PackageVersion Include="Microsoft.Extensions.Caching.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Caching.Memory" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.1" />
<PackageVersion Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Json" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.1" />
<PackageVersion Include="Microsoft.Extensions.DependencyInjection" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Diagnostics.HealthChecks.EntityFrameworkCore" Version="8.0.2" />
<PackageVersion Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="8.0.2" />
<PackageVersion Include="Microsoft.Extensions.Diagnostics.HealthChecks.EntityFrameworkCore" Version="8.0.3" />
<PackageVersion Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="8.0.3" />
<PackageVersion Include="Microsoft.Extensions.Hosting.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Http" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.1" />
<PackageVersion Include="Microsoft.Extensions.Logging" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Options" Version="8.0.2" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
@ -72,16 +72,16 @@
<PackageVersion Include="SkiaSharp.NativeAssets.Linux" Version="2.88.7" />
<PackageVersion Include="SmartAnalyzers.MultithreadingAnalyzer" Version="1.1.31" />
<PackageVersion Include="StyleCop.Analyzers" Version="1.2.0-beta.556" />
<PackageVersion Include="Svg.Skia" Version="1.0.0.14" />
<PackageVersion Include="Svg.Skia" Version="1.0.0.16" />
<PackageVersion Include="Swashbuckle.AspNetCore.ReDoc" Version="6.5.0" />
<PackageVersion Include="Swashbuckle.AspNetCore" Version="6.2.3" />
<PackageVersion Include="System.Globalization" Version="4.3.0" />
<PackageVersion Include="System.Linq.Async" Version="6.0.1" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="System.Text.Json" Version="8.0.2" />
<PackageVersion Include="System.Text.Json" Version="8.0.3" />
<PackageVersion Include="System.Threading.Tasks.Dataflow" Version="8.0.0" />
<PackageVersion Include="TagLibSharp" Version="2.3.0" />
<PackageVersion Include="TMDbLib" Version="2.1.0" />
<PackageVersion Include="TMDbLib" Version="2.2.0" />
<PackageVersion Include="UTF.Unknown" Version="2.5.1" />
<PackageVersion Include="Xunit.Priority" Version="1.1.6" />
<PackageVersion Include="xunit.runner.visualstudio" Version="2.5.7" />

View File

@ -1,87 +0,0 @@
# DESIGNED FOR BUILDING ON AMD64 ONLY
#####################################
# Requires binfm_misc registration
# https://github.com/multiarch/qemu-user-static#binfmt_misc-register
ARG DOTNET_VERSION=8.0
FROM node:20-alpine as web-builder
ARG JELLYFIN_WEB_VERSION=master
RUN apk add curl git zlib zlib-dev autoconf g++ make libpng-dev gifsicle alpine-sdk automake libtool make gcc musl-dev nasm python3 \
&& curl -L https://github.com/jellyfin/jellyfin-web/archive/${JELLYFIN_WEB_VERSION}.tar.gz | tar zxf - \
&& apk del curl \
&& cd jellyfin-web-* \
&& npm ci --no-audit --unsafe-perm \
&& npm run build:production \
&& mv dist /dist
FROM debian:bookworm-slim as app
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
ARG DEBIAN_FRONTEND="noninteractive"
# http://stackoverflow.com/questions/48162574/ddg#49462622
ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
# https://github.com/NVIDIA/nvidia-docker/wiki/Installation-(Native-GPU-Support)
ENV NVIDIA_VISIBLE_DEVICES="all"
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
ENV JELLYFIN_DATA_DIR=/config
ENV JELLYFIN_CACHE_DIR=/cache
# https://github.com/intel/compute-runtime/releases
ARG GMMLIB_VERSION=22.3.11.ci17757293
ARG IGC_VERSION=1.0.15136.22
ARG NEO_VERSION=23.39.27427.23
ARG LEVEL_ZERO_VERSION=1.3.27427.23
RUN apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates gnupg curl \
&& curl -fsSL https://repo.jellyfin.org/jellyfin_team.gpg.key | gpg --dearmor -o /etc/apt/trusted.gpg.d/debian-jellyfin.gpg \
&& echo "deb [arch=$( dpkg --print-architecture )] https://repo.jellyfin.org/$( awk -F'=' '/^ID=/{ print $NF }' /etc/os-release ) $( awk -F'=' '/^VERSION_CODENAME=/{ print $NF }' /etc/os-release ) main" | tee /etc/apt/sources.list.d/jellyfin.list \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y mesa-va-drivers jellyfin-ffmpeg6 openssl locales \
# Intel VAAPI Tone mapping dependencies:
# Prefer NEO to Beignet since the latter one doesn't support Comet Lake or newer for now.
# Do not use the intel-opencl-icd package from repo since they will not build with RELEASE_WITH_REGKEYS enabled.
&& mkdir intel-compute-runtime \
&& cd intel-compute-runtime \
&& curl -LO https://github.com/intel/intel-graphics-compiler/releases/download/igc-${IGC_VERSION}/intel-igc-core_${IGC_VERSION}_amd64.deb \
-LO https://github.com/intel/intel-graphics-compiler/releases/download/igc-${IGC_VERSION}/intel-igc-opencl_${IGC_VERSION}_amd64.deb \
-LO https://github.com/intel/compute-runtime/releases/download/${NEO_VERSION}/intel-level-zero-gpu_${LEVEL_ZERO_VERSION}_amd64.deb \
-LO https://github.com/intel/compute-runtime/releases/download/${NEO_VERSION}/intel-opencl-icd_${NEO_VERSION}_amd64.deb \
-LO https://github.com/intel/compute-runtime/releases/download/${NEO_VERSION}/libigdgmm12_${GMMLIB_VERSION}_amd64.deb \
&& dpkg -i *.deb \
&& cd .. \
&& rm -rf intel-compute-runtime \
&& apt-get remove gnupg -y \
&& apt-get clean autoclean -y \
&& apt-get autoremove -y \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR} \
&& chmod 777 ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR} \
&& sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
ENV LC_ALL=en_US.UTF-8
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US:en
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION} as builder
WORKDIR /repo
COPY . .
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
RUN dotnet publish Jellyfin.Server --configuration Release --output="/jellyfin" --self-contained --runtime linux-x64 -p:DebugSymbols=false -p:DebugType=none
FROM app
ENV HEALTHCHECK_URL=http://localhost:8096/health
COPY --from=builder /jellyfin /jellyfin
COPY --from=web-builder /dist /jellyfin/jellyfin-web
EXPOSE 8096
VOLUME ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR}
ENTRYPOINT [ "./jellyfin/jellyfin", \
"--ffmpeg", "/usr/lib/jellyfin-ffmpeg/ffmpeg" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=10s --retries=3 \
CMD curl -Lk -fsS "${HEALTHCHECK_URL}" || exit 1

View File

@ -1,74 +0,0 @@
# DESIGNED FOR BUILDING ON ARM ONLY
#####################################
# Requires binfm_misc registration
# https://github.com/multiarch/qemu-user-static#binfmt_misc-register
ARG DOTNET_VERSION=8.0
FROM node:20-alpine as web-builder
ARG JELLYFIN_WEB_VERSION=master
RUN apk add curl git zlib zlib-dev autoconf g++ make libpng-dev gifsicle alpine-sdk automake libtool make gcc musl-dev nasm python3 \
&& curl -L https://github.com/jellyfin/jellyfin-web/archive/${JELLYFIN_WEB_VERSION}.tar.gz | tar zxf - \
&& apk del curl \
&& cd jellyfin-web-* \
&& npm ci --no-audit --unsafe-perm \
&& npm run build:production \
&& mv dist /dist
FROM multiarch/qemu-user-static:x86_64-arm as qemu
FROM arm32v7/debian:bookworm-slim as app
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
ARG DEBIAN_FRONTEND="noninteractive"
# http://stackoverflow.com/questions/48162574/ddg#49462622
ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
# https://github.com/NVIDIA/nvidia-docker/wiki/Installation-(Native-GPU-Support)
ENV NVIDIA_VISIBLE_DEVICES="all"
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
ENV JELLYFIN_DATA_DIR=/config
ENV JELLYFIN_CACHE_DIR=/cache
COPY --from=qemu /usr/bin/qemu-arm-static /usr/bin
RUN apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates gnupg curl \
&& curl -fsSL https://repo.jellyfin.org/jellyfin_team.gpg.key | gpg --dearmor -o /etc/apt/trusted.gpg.d/debian-jellyfin.gpg \
&& curl -fsSL https://keyserver.ubuntu.com/pks/lookup?op=get\&search=0x6587ffd6536b8826e88a62547876ae518cbcf2f2 | gpg --dearmor -o /etc/apt/trusted.gpg.d/ubuntu-jellyfin.gpg \
&& echo "deb [arch=$( dpkg --print-architecture )] https://repo.jellyfin.org/$( awk -F'=' '/^ID=/{ print $NF }' /etc/os-release ) $( awk -F'=' '/^VERSION_CODENAME=/{ print $NF }' /etc/os-release ) main" | tee /etc/apt/sources.list.d/jellyfin.list \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y \
jellyfin-ffmpeg6 libssl-dev libfontconfig1 \
libfreetype6 vainfo libva2 locales \
&& apt-get remove gnupg -y \
&& apt-get clean autoclean -y \
&& apt-get autoremove -y \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR} \
&& chmod 777 ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR} \
&& sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
ENV LC_ALL=en_US.UTF-8
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US:en
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION} as builder
WORKDIR /repo
COPY . .
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
RUN dotnet publish Jellyfin.Server --configuration Release --output="/jellyfin" --self-contained --runtime linux-arm -p:DebugSymbols=false -p:DebugType=none
FROM app
ENV HEALTHCHECK_URL=http://localhost:8096/health
COPY --from=builder /jellyfin /jellyfin
COPY --from=web-builder /dist /jellyfin/jellyfin-web
EXPOSE 8096
VOLUME ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR}
ENTRYPOINT [ "/jellyfin/jellyfin", \
"--ffmpeg", "/usr/lib/jellyfin-ffmpeg/ffmpeg" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=10s --retries=3 \
CMD curl -Lk -fsS "${HEALTHCHECK_URL}" || exit 1

View File

@ -1,74 +0,0 @@
# DESIGNED FOR BUILDING ON ARM64 ONLY
#####################################
# Requires binfm_misc registration
# https://github.com/multiarch/qemu-user-static#binfmt_misc-register
ARG DOTNET_VERSION=8.0
FROM node:20-alpine as web-builder
ARG JELLYFIN_WEB_VERSION=master
RUN apk add curl git zlib zlib-dev autoconf g++ make libpng-dev gifsicle alpine-sdk automake libtool make gcc musl-dev nasm python3 \
&& curl -L https://github.com/jellyfin/jellyfin-web/archive/${JELLYFIN_WEB_VERSION}.tar.gz | tar zxf - \
&& apk del curl \
&& cd jellyfin-web-* \
&& npm ci --no-audit --unsafe-perm \
&& npm run build:production \
&& mv dist /dist
FROM multiarch/qemu-user-static:x86_64-aarch64 as qemu
FROM arm64v8/debian:bookworm-slim as app
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
ARG DEBIAN_FRONTEND="noninteractive"
# http://stackoverflow.com/questions/48162574/ddg#49462622
ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
# https://github.com/NVIDIA/nvidia-docker/wiki/Installation-(Native-GPU-Support)
ENV NVIDIA_VISIBLE_DEVICES="all"
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
ENV JELLYFIN_DATA_DIR=/config
ENV JELLYFIN_CACHE_DIR=/cache
COPY --from=qemu /usr/bin/qemu-aarch64-static /usr/bin
RUN apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y ca-certificates gnupg curl \
&& curl -fsSL https://repo.jellyfin.org/jellyfin_team.gpg.key | gpg --dearmor -o /etc/apt/trusted.gpg.d/debian-jellyfin.gpg \
&& curl -fsSL https://keyserver.ubuntu.com/pks/lookup?op=get\&search=0x6587ffd6536b8826e88a62547876ae518cbcf2f2 | gpg --dearmor -o /etc/apt/trusted.gpg.d/ubuntu-jellyfin.gpg \
&& echo "deb [arch=$( dpkg --print-architecture )] https://repo.jellyfin.org/$( awk -F'=' '/^ID=/{ print $NF }' /etc/os-release ) $( awk -F'=' '/^VERSION_CODENAME=/{ print $NF }' /etc/os-release ) main" | tee /etc/apt/sources.list.d/jellyfin.list \
&& apt-get update \
&& apt-get install --no-install-recommends --no-install-suggests -y \
jellyfin-ffmpeg6 locales libssl-dev libfontconfig1 \
libfreetype6 libomxil-bellagio0 libomxil-bellagio-bin \
&& apt-get remove gnupg -y \
&& apt-get clean autoclean -y \
&& apt-get autoremove -y \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR} \
&& chmod 777 ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR} \
&& sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
ENV LC_ALL=en_US.UTF-8
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US:en
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION} as builder
WORKDIR /repo
COPY . .
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
RUN dotnet publish Jellyfin.Server --configuration Release --output="/jellyfin" --self-contained --runtime linux-arm64 -p:DebugSymbols=false -p:DebugType=none
FROM app
ENV HEALTHCHECK_URL=http://localhost:8096/health
COPY --from=builder /jellyfin /jellyfin
COPY --from=web-builder /dist /jellyfin/jellyfin-web
EXPOSE 8096
VOLUME ${JELLYFIN_DATA_DIR} ${JELLYFIN_CACHE_DIR}
ENTRYPOINT [ "/jellyfin/jellyfin", \
"--ffmpeg", "/usr/lib/jellyfin-ffmpeg/ffmpeg" ]
HEALTHCHECK --interval=30s --timeout=30s --start-period=10s --retries=3 \
CMD curl -Lk -fsS "${HEALTHCHECK_URL}" || exit 1

View File

@ -16,167 +16,160 @@ using TagLib.IFD;
using TagLib.IFD.Entries;
using TagLib.IFD.Tags;
namespace Emby.Photos
namespace Emby.Photos;
/// <summary>
/// Metadata provider for photos.
/// </summary>
public class PhotoProvider : ICustomMetadataProvider<Photo>, IForcedProvider, IHasItemChangeMonitor
{
private readonly ILogger<PhotoProvider> _logger;
private readonly IImageProcessor _imageProcessor;
// These are causing taglib to hang
private readonly string[] _includeExtensions = [".jpg", ".jpeg", ".png", ".tiff", ".cr2", ".webp", ".avif"];
/// <summary>
/// Metadata provider for photos.
/// Initializes a new instance of the <see cref="PhotoProvider" /> class.
/// </summary>
public class PhotoProvider : ICustomMetadataProvider<Photo>, IForcedProvider, IHasItemChangeMonitor
/// <param name="logger">The logger.</param>
/// <param name="imageProcessor">The image processor.</param>
public PhotoProvider(ILogger<PhotoProvider> logger, IImageProcessor imageProcessor)
{
private readonly ILogger<PhotoProvider> _logger;
private readonly IImageProcessor _imageProcessor;
_logger = logger;
_imageProcessor = imageProcessor;
}
// These are causing taglib to hang
private readonly string[] _includeExtensions = new string[] { ".jpg", ".jpeg", ".png", ".tiff", ".cr2", ".webp", ".avif" };
/// <inheritdoc />
public string Name => "Embedded Information";
/// <summary>
/// Initializes a new instance of the <see cref="PhotoProvider" /> class.
/// </summary>
/// <param name="logger">The logger.</param>
/// <param name="imageProcessor">The image processor.</param>
public PhotoProvider(ILogger<PhotoProvider> logger, IImageProcessor imageProcessor)
/// <inheritdoc />
public bool HasChanged(BaseItem item, IDirectoryService directoryService)
{
if (item.IsFileProtocol)
{
_logger = logger;
_imageProcessor = imageProcessor;
var file = directoryService.GetFile(item.Path);
return file is not null && file.LastWriteTimeUtc != item.DateModified;
}
/// <inheritdoc />
public string Name => "Embedded Information";
return false;
}
/// <inheritdoc />
public bool HasChanged(BaseItem item, IDirectoryService directoryService)
/// <inheritdoc />
public Task<ItemUpdateType> FetchAsync(Photo item, MetadataRefreshOptions options, CancellationToken cancellationToken)
{
item.SetImagePath(ImageType.Primary, item.Path);
// Examples: https://github.com/mono/taglib-sharp/blob/a5f6949a53d09ce63ee7495580d6802921a21f14/tests/fixtures/TagLib.Tests.Images/NullOrientationTest.cs
if (_includeExtensions.Contains(Path.GetExtension(item.Path.AsSpan()), StringComparison.OrdinalIgnoreCase))
{
if (item.IsFileProtocol)
try
{
var file = directoryService.GetFile(item.Path);
return file is not null && file.LastWriteTimeUtc != item.DateModified;
}
return false;
}
/// <inheritdoc />
public Task<ItemUpdateType> FetchAsync(Photo item, MetadataRefreshOptions options, CancellationToken cancellationToken)
{
item.SetImagePath(ImageType.Primary, item.Path);
// Examples: https://github.com/mono/taglib-sharp/blob/a5f6949a53d09ce63ee7495580d6802921a21f14/tests/fixtures/TagLib.Tests.Images/NullOrientationTest.cs
if (_includeExtensions.Contains(Path.GetExtension(item.Path.AsSpan()), StringComparison.OrdinalIgnoreCase))
{
try
using var file = TagLib.File.Create(item.Path);
if (file.GetTag(TagTypes.TiffIFD) is IFDTag tag)
{
using (var file = TagLib.File.Create(item.Path))
var structure = tag.Structure;
if (structure?.GetEntry(0, (ushort)IFDEntryTag.ExifIFD) is SubIFDEntry exif)
{
if (file.GetTag(TagTypes.TiffIFD) is IFDTag tag)
var exifStructure = exif.Structure;
if (exifStructure is not null)
{
var structure = tag.Structure;
if (structure is not null
&& structure.GetEntry(0, (ushort)IFDEntryTag.ExifIFD) is SubIFDEntry exif)
if (exifStructure.GetEntry(0, (ushort)ExifEntryTag.ApertureValue) is RationalIFDEntry apertureEntry)
{
var exifStructure = exif.Structure;
if (exifStructure is not null)
{
var entry = exifStructure.GetEntry(0, (ushort)ExifEntryTag.ApertureValue) as RationalIFDEntry;
if (entry is not null)
{
item.Aperture = (double)entry.Value.Numerator / entry.Value.Denominator;
}
entry = exifStructure.GetEntry(0, (ushort)ExifEntryTag.ShutterSpeedValue) as RationalIFDEntry;
if (entry is not null)
{
item.ShutterSpeed = (double)entry.Value.Numerator / entry.Value.Denominator;
}
}
}
}
if (file is TagLib.Image.File image)
{
item.CameraMake = image.ImageTag.Make;
item.CameraModel = image.ImageTag.Model;
item.Width = image.Properties.PhotoWidth;
item.Height = image.Properties.PhotoHeight;
var rating = image.ImageTag.Rating;
item.CommunityRating = rating.HasValue ? rating : null;
item.Overview = image.ImageTag.Comment;
if (!string.IsNullOrWhiteSpace(image.ImageTag.Title)
&& !item.LockedFields.Contains(MetadataField.Name))
{
item.Name = image.ImageTag.Title;
item.Aperture = (double)apertureEntry.Value.Numerator / apertureEntry.Value.Denominator;
}
var dateTaken = image.ImageTag.DateTime;
if (dateTaken.HasValue)
if (exifStructure.GetEntry(0, (ushort)ExifEntryTag.ShutterSpeedValue) is RationalIFDEntry shutterSpeedEntry)
{
item.DateCreated = dateTaken.Value;
item.PremiereDate = dateTaken.Value;
item.ProductionYear = dateTaken.Value.Year;
}
item.Genres = image.ImageTag.Genres;
item.Tags = image.ImageTag.Keywords;
item.Software = image.ImageTag.Software;
if (image.ImageTag.Orientation == TagLib.Image.ImageOrientation.None)
{
item.Orientation = null;
}
else if (Enum.TryParse(image.ImageTag.Orientation.ToString(), true, out ImageOrientation orientation))
{
item.Orientation = orientation;
}
item.ExposureTime = image.ImageTag.ExposureTime;
item.FocalLength = image.ImageTag.FocalLength;
item.Latitude = image.ImageTag.Latitude;
item.Longitude = image.ImageTag.Longitude;
item.Altitude = image.ImageTag.Altitude;
if (image.ImageTag.ISOSpeedRatings.HasValue)
{
item.IsoSpeedRating = Convert.ToInt32(image.ImageTag.ISOSpeedRatings.Value);
}
else
{
item.IsoSpeedRating = null;
item.ShutterSpeed = (double)shutterSpeedEntry.Value.Numerator / shutterSpeedEntry.Value.Denominator;
}
}
}
}
catch (Exception ex)
if (file is TagLib.Image.File image)
{
_logger.LogError(ex, "Image Provider - Error reading image tag for {0}", item.Path);
}
}
item.CameraMake = image.ImageTag.Make;
item.CameraModel = image.ImageTag.Model;
if (item.Width <= 0 || item.Height <= 0)
{
var img = item.GetImageInfo(ImageType.Primary, 0);
item.Width = image.Properties.PhotoWidth;
item.Height = image.Properties.PhotoHeight;
try
{
var size = _imageProcessor.GetImageDimensions(item, img);
item.CommunityRating = image.ImageTag.Rating;
if (size.Width > 0 && size.Height > 0)
item.Overview = image.ImageTag.Comment;
if (!string.IsNullOrWhiteSpace(image.ImageTag.Title)
&& !item.LockedFields.Contains(MetadataField.Name))
{
item.Width = size.Width;
item.Height = size.Height;
item.Name = image.ImageTag.Title;
}
var dateTaken = image.ImageTag.DateTime;
if (dateTaken.HasValue)
{
item.DateCreated = dateTaken.Value;
item.PremiereDate = dateTaken.Value;
item.ProductionYear = dateTaken.Value.Year;
}
item.Genres = image.ImageTag.Genres;
item.Tags = image.ImageTag.Keywords;
item.Software = image.ImageTag.Software;
if (image.ImageTag.Orientation == TagLib.Image.ImageOrientation.None)
{
item.Orientation = null;
}
else if (Enum.TryParse(image.ImageTag.Orientation.ToString(), true, out ImageOrientation orientation))
{
item.Orientation = orientation;
}
item.ExposureTime = image.ImageTag.ExposureTime;
item.FocalLength = image.ImageTag.FocalLength;
item.Latitude = image.ImageTag.Latitude;
item.Longitude = image.ImageTag.Longitude;
item.Altitude = image.ImageTag.Altitude;
if (image.ImageTag.ISOSpeedRatings.HasValue)
{
item.IsoSpeedRating = Convert.ToInt32(image.ImageTag.ISOSpeedRatings.Value);
}
else
{
item.IsoSpeedRating = null;
}
}
catch (ArgumentException)
}
catch (Exception ex)
{
_logger.LogError(ex, "Image Provider - Error reading image tag for {0}", item.Path);
}
}
if (item.Width <= 0 || item.Height <= 0)
{
var img = item.GetImageInfo(ImageType.Primary, 0);
try
{
var size = _imageProcessor.GetImageDimensions(item, img);
if (size.Width > 0 && size.Height > 0)
{
// format not supported
item.Width = size.Width;
item.Height = size.Height;
}
}
const ItemUpdateType Result = ItemUpdateType.ImageUpdate | ItemUpdateType.MetadataImport;
return Task.FromResult(Result);
catch (ArgumentException)
{
// format not supported
}
}
const ItemUpdateType Result = ItemUpdateType.ImageUpdate | ItemUpdateType.MetadataImport;
return Task.FromResult(Result);
}
}

View File

@ -146,7 +146,7 @@ namespace Emby.Server.Implementations
_startupConfig = startupConfig;
Logger = LoggerFactory.CreateLogger<ApplicationHost>();
_deviceId = new DeviceId(ApplicationPaths, LoggerFactory);
_deviceId = new DeviceId(ApplicationPaths, LoggerFactory.CreateLogger<DeviceId>());
ApplicationVersion = typeof(ApplicationHost).Assembly.GetName().Version;
ApplicationVersionString = ApplicationVersion.ToString(3);

View File

@ -1,5 +1,3 @@
#nullable disable
#pragma warning disable CS1591
using System;
@ -17,19 +15,19 @@ namespace Emby.Server.Implementations.Devices
private readonly ILogger<DeviceId> _logger;
private readonly object _syncLock = new object();
private string _id;
private string? _id;
public DeviceId(IApplicationPaths appPaths, ILoggerFactory loggerFactory)
public DeviceId(IApplicationPaths appPaths, ILogger<DeviceId> logger)
{
_appPaths = appPaths;
_logger = loggerFactory.CreateLogger<DeviceId>();
_logger = logger;
}
public string Value => _id ?? (_id = GetDeviceId());
public string Value => _id ??= GetDeviceId();
private string CachePath => Path.Combine(_appPaths.DataPath, "device.txt");
private string GetCachedId()
private string? GetCachedId()
{
try
{
@ -65,7 +63,7 @@ namespace Emby.Server.Implementations.Devices
{
var path = CachePath;
Directory.CreateDirectory(Path.GetDirectoryName(path));
Directory.CreateDirectory(Path.GetDirectoryName(path) ?? throw new InvalidOperationException("Path can't be a root directory."));
lock (_syncLock)
{

View File

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Security;
using Jellyfin.Extensions;
using MediaBrowser.Common.Configuration;
using MediaBrowser.Model.IO;
@ -643,7 +644,15 @@ namespace Emby.Server.Implementations.IO
/// <inheritdoc />
public virtual IEnumerable<string> GetFileSystemEntryPaths(string path, bool recursive = false)
{
return Directory.EnumerateFileSystemEntries(path, "*", GetEnumerationOptions(recursive));
try
{
return Directory.EnumerateFileSystemEntries(path, "*", GetEnumerationOptions(recursive));
}
catch (Exception ex) when (ex is UnauthorizedAccessException or DirectoryNotFoundException or SecurityException)
{
_logger.LogError(ex, "Failed to enumerate path {Path}", path);
return Enumerable.Empty<string>();
}
}
/// <inheritdoc />

View File

@ -1,5 +1,3 @@
#nullable disable
#pragma warning disable CS1591
using System;
@ -37,16 +35,16 @@ namespace Emby.Server.Implementations.Library
_appPaths = appPaths;
}
public async Task AddMediaInfoWithProbe(MediaSourceInfo mediaSource, bool isAudio, string cacheKey, bool addProbeDelay, CancellationToken cancellationToken)
public async Task AddMediaInfoWithProbe(MediaSourceInfo mediaSource, bool isAudio, string? cacheKey, bool addProbeDelay, CancellationToken cancellationToken)
{
var originalRuntime = mediaSource.RunTimeTicks;
var now = DateTime.UtcNow;
MediaInfo mediaInfo = null;
MediaInfo? mediaInfo = null;
var cacheFilePath = string.IsNullOrEmpty(cacheKey) ? null : Path.Combine(_appPaths.CachePath, "mediainfo", cacheKey.GetMD5().ToString("N", CultureInfo.InvariantCulture) + ".json");
if (!string.IsNullOrEmpty(cacheKey))
if (cacheFilePath is not null)
{
try
{
@ -91,7 +89,7 @@ namespace Emby.Server.Implementations.Library
if (cacheFilePath is not null)
{
Directory.CreateDirectory(Path.GetDirectoryName(cacheFilePath));
Directory.CreateDirectory(Path.GetDirectoryName(cacheFilePath) ?? throw new InvalidOperationException("Path can't be a root directory."));
FileStream createStream = AsyncFile.OpenWrite(cacheFilePath);
await using (createStream.ConfigureAwait(false))
{

View File

@ -1,5 +1,3 @@
#nullable disable
#pragma warning disable CS1591
using System;
@ -13,7 +11,6 @@ using MediaBrowser.Controller.Entities;
using MediaBrowser.Controller.Entities.Audio;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.Playlists;
using MediaBrowser.Model.Querying;
using MusicAlbum = MediaBrowser.Controller.Entities.Audio.MusicAlbum;
namespace Emby.Server.Implementations.Library
@ -27,33 +24,35 @@ namespace Emby.Server.Implementations.Library
_libraryManager = libraryManager;
}
public List<BaseItem> GetInstantMixFromSong(Audio item, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromSong(Audio item, User? user, DtoOptions dtoOptions)
{
var list = new List<Audio>
var list = new List<BaseItem>
{
item
};
return list.Concat(GetInstantMixFromGenres(item.Genres, user, dtoOptions)).ToList();
list.AddRange(GetInstantMixFromGenres(item.Genres, user, dtoOptions));
return list;
}
/// <inheritdoc />
public List<BaseItem> GetInstantMixFromArtist(MusicArtist artist, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromArtist(MusicArtist artist, User? user, DtoOptions dtoOptions)
{
return GetInstantMixFromGenres(artist.Genres, user, dtoOptions);
}
public List<BaseItem> GetInstantMixFromAlbum(MusicAlbum item, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromAlbum(MusicAlbum item, User? user, DtoOptions dtoOptions)
{
return GetInstantMixFromGenres(item.Genres, user, dtoOptions);
}
public List<BaseItem> GetInstantMixFromFolder(Folder item, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromFolder(Folder item, User? user, DtoOptions dtoOptions)
{
var genres = item
.GetRecursiveChildren(user, new InternalItemsQuery(user)
{
IncludeItemTypes = new[] { BaseItemKind.Audio },
IncludeItemTypes = [BaseItemKind.Audio],
DtoOptions = dtoOptions
})
.Cast<Audio>()
@ -64,12 +63,12 @@ namespace Emby.Server.Implementations.Library
return GetInstantMixFromGenres(genres, user, dtoOptions);
}
public List<BaseItem> GetInstantMixFromPlaylist(Playlist item, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromPlaylist(Playlist item, User? user, DtoOptions dtoOptions)
{
return GetInstantMixFromGenres(item.Genres, user, dtoOptions);
}
public List<BaseItem> GetInstantMixFromGenres(IEnumerable<string> genres, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromGenres(IEnumerable<string> genres, User? user, DtoOptions dtoOptions)
{
var genreIds = genres.DistinctNames().Select(i =>
{
@ -86,27 +85,23 @@ namespace Emby.Server.Implementations.Library
return GetInstantMixFromGenreIds(genreIds, user, dtoOptions);
}
public List<BaseItem> GetInstantMixFromGenreIds(Guid[] genreIds, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromGenreIds(Guid[] genreIds, User? user, DtoOptions dtoOptions)
{
return _libraryManager.GetItemList(new InternalItemsQuery(user)
{
IncludeItemTypes = new[] { BaseItemKind.Audio },
GenreIds = genreIds.ToArray(),
IncludeItemTypes = [BaseItemKind.Audio],
GenreIds = genreIds,
Limit = 200,
OrderBy = new[] { (ItemSortBy.Random, SortOrder.Ascending) },
OrderBy = [(ItemSortBy.Random, SortOrder.Ascending)],
DtoOptions = dtoOptions
});
}
public List<BaseItem> GetInstantMixFromItem(BaseItem item, User user, DtoOptions dtoOptions)
public List<BaseItem> GetInstantMixFromItem(BaseItem item, User? user, DtoOptions dtoOptions)
{
if (item is MusicGenre)
{
return GetInstantMixFromGenreIds(new[] { item.Id }, user, dtoOptions);
return GetInstantMixFromGenreIds([item.Id], user, dtoOptions);
}
if (item is Playlist playlist)

View File

@ -1,5 +1,3 @@
#nullable disable
#pragma warning disable CS1591
using System;
@ -29,7 +27,7 @@ namespace Emby.Server.Implementations.Library
public QueryResult<SearchHintInfo> GetSearchHints(SearchQuery query)
{
User user = null;
User? user = null;
if (!query.UserId.IsEmpty())
{
user = _userManager.GetUserById(query.UserId);
@ -69,7 +67,7 @@ namespace Emby.Server.Implementations.Library
/// <param name="user">The user.</param>
/// <returns>IEnumerable{SearchHintResult}.</returns>
/// <exception cref="ArgumentException"><c>query.SearchTerm</c> is <c>null</c> or empty.</exception>
private List<SearchHintInfo> GetSearchHints(SearchQuery query, User user)
private List<SearchHintInfo> GetSearchHints(SearchQuery query, User? user)
{
var searchTerm = query.SearchTerm;
@ -78,7 +76,7 @@ namespace Emby.Server.Implementations.Library
searchTerm = searchTerm.Trim().RemoveDiacritics();
var excludeItemTypes = query.ExcludeItemTypes.ToList();
var includeItemTypes = (query.IncludeItemTypes ?? Array.Empty<BaseItemKind>()).ToList();
var includeItemTypes = query.IncludeItemTypes.ToList();
excludeItemTypes.Add(BaseItemKind.Year);
excludeItemTypes.Add(BaseItemKind.Folder);
@ -179,7 +177,7 @@ namespace Emby.Server.Implementations.Library
{
if (!searchQuery.ParentId.IsEmpty())
{
searchQuery.AncestorIds = new[] { searchQuery.ParentId };
searchQuery.AncestorIds = [searchQuery.ParentId];
searchQuery.ParentId = Guid.Empty;
}

View File

@ -1,6 +1,6 @@
{
"Albums": "Albums",
"AppDeviceValues": "Application : {0}, Appareil : {1}",
"AppDeviceValues": "Application: {0}, Appareil: {1}",
"Application": "Application",
"Artists": "Artistes",
"AuthenticationSucceededWithUserName": "{0} authentifié avec succès",
@ -29,7 +29,7 @@
"Inherit": "Hériter",
"ItemAddedWithName": "{0} a été ajouté à la médiathèque",
"ItemRemovedWithName": "{0} a été supprimé de la médiathèque",
"LabelIpAddressValue": "Adresse IP : {0}",
"LabelIpAddressValue": "Adresse IP: {0}",
"LabelRunningTimeValue": "Durée : {0}",
"Latest": "Derniers",
"MessageApplicationUpdated": "Le serveur Jellyfin a été mis à jour",

View File

@ -4,27 +4,27 @@
"HeaderNextUp": "इसके बाद",
"HeaderLiveTV": "लाइव टीवी",
"HeaderFavoriteSongs": "पसंदीदा गीत",
"HeaderFavoriteShows": "पसंदीदा शो",
"HeaderFavoriteEpisodes": "पसंदीदा एपिसोड्स",
"HeaderFavoriteArtists": "पसंदीदा कलाकारसमूह",
"HeaderFavoriteShows": "पसंदीदा शो",
"HeaderFavoriteEpisodes": "पसंदीदा प्रकरण",
"HeaderFavoriteArtists": "पसंदीदा कलाकार",
"HeaderFavoriteAlbums": "पसंदीदा एलबम्स",
"HeaderContinueWatching": "देखते रहिए",
"HeaderContinueWatching": "देखना जारी रखें",
"HeaderAlbumArtists": "एल्बम कलाकार",
"Genres": "शैल",
"Genres": "शैलियां",
"Forced": "बलपूर्वक",
"Folders": "फ़ोल्डरें",
"Folders": "फ़ोल्डर",
"Favorites": "पसंदीदा",
"FailedLoginAttemptWithUserName": "{0} से लॉगिन असफल हुआ",
"DeviceOnlineWithName": "{0} से संयोग हो गया है",
"DeviceOfflineWithName": "{0} से संयोग विच्छिन्न हो गया है",
"DeviceOnlineWithName": "{0} कनेक्ट हो गया है",
"DeviceOfflineWithName": "{0} डिस्कनेक्ट हो गया है",
"Default": "प्राथमिक",
"Collections": "संग्रहों",
"ChapterNameValue": "अध्याय",
"Collections": "संग्रह",
"ChapterNameValue": "अध्याय {0}",
"Channels": "चैनल",
"CameraImageUploadedFrom": "{0} से एक नया कैमरावाला चित्र अपलोड किया गया है",
"Books": "पुस्तकं",
"AuthenticationSucceededWithUserName": "सफलता से प्रमाणीकृत",
"Artists": "कलाकारों",
"CameraImageUploadedFrom": "{0} से एक नया कैमरा छवि अपलोड की गई है",
"Books": "पुस्तकं",
"AuthenticationSucceededWithUserName": "{0} सफलतापूर्वक प्रमाणित किया गया",
"Artists": "कलाकार",
"Application": "एप्लिकेशन",
"AppDeviceValues": "एप: {0}, उपकरण: {1}",
"NotificationOptionPluginUninstalled": "प्लगइन अनइंस्टाल हो गया",

View File

@ -126,5 +126,5 @@
"External": "Extern",
"HearingImpaired": "Slechthorend",
"TaskRefreshTrickplayImages": "Trickplay-afbeeldingen genereren",
"TaskRefreshTrickplayImagesDescription": "Genereert trickplay-afbeeldingen voor video's in bibliotheken waarvoor dit is ingeschakeld."
"TaskRefreshTrickplayImagesDescription": "Creëert trickplay-voorvertoningen voor video's in bibliotheken waarvoor dit is ingeschakeld."
}

View File

@ -1604,7 +1604,7 @@ public class DynamicHlsController : BaseJellyfinApiController
Path.GetFileNameWithoutExtension(outputPath));
}
var hlsArguments = GetHlsArguments(isEventPlaylist, state.SegmentLength);
var hlsArguments = $"-hls_playlist_type {(isEventPlaylist ? "event" : "vod")} -hls_list_size 0";
return string.Format(
CultureInfo.InvariantCulture,
@ -1625,33 +1625,6 @@ public class DynamicHlsController : BaseJellyfinApiController
EncodingUtils.NormalizePath(outputPath)).Trim();
}
/// <summary>
/// Gets the HLS arguments for transcoding.
/// </summary>
/// <returns>The command line arguments for HLS transcoding.</returns>
private string GetHlsArguments(bool isEventPlaylist, int segmentLength)
{
var enableThrottling = _encodingOptions.EnableThrottling;
var enableSegmentDeletion = _encodingOptions.EnableSegmentDeletion;
// Only enable segment deletion when throttling is enabled
if (enableThrottling && enableSegmentDeletion)
{
// Store enough segments for configured seconds of playback; this needs to be above throttling settings
var segmentCount = _encodingOptions.SegmentKeepSeconds / segmentLength;
_logger.LogDebug("Using throttling and segment deletion, keeping {0} segments", segmentCount);
return string.Format(CultureInfo.InvariantCulture, "-hls_list_size {0} -hls_flags delete_segments", segmentCount.ToString(CultureInfo.InvariantCulture));
}
else
{
_logger.LogDebug("Using normal playback, is event playlist? {0}", isEventPlaylist);
return string.Format(CultureInfo.InvariantCulture, "-hls_playlist_type {0} -hls_list_size 0", isEventPlaylist ? "event" : "vod");
}
}
/// <summary>
/// Gets the audio arguments for transcoding.
/// </summary>

View File

@ -157,7 +157,7 @@ public class UniversalAudioController : BaseJellyfinApiController
}
var isStatic = mediaSource.SupportsDirectStream;
if (!isStatic && mediaSource.TranscodingSubProtocol == MediaStreamProtocol.Hls)
if (!isStatic && mediaSource.TranscodingSubProtocol == MediaStreamProtocol.hls)
{
// hls segment container can only be mpegts or fmp4 per ffmpeg documentation
// ffmpeg option -> file extension
@ -268,7 +268,7 @@ public class UniversalAudioController : BaseJellyfinApiController
Context = EncodingContext.Streaming,
Container = transcodingContainer ?? "mp3",
AudioCodec = audioCodec ?? "mp3",
Protocol = transcodingProtocol ?? MediaStreamProtocol.Http,
Protocol = transcodingProtocol ?? MediaStreamProtocol.http,
BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false,
MaxAudioChannels = transcodingAudioChannels?.ToString(CultureInfo.InvariantCulture)
}

View File

@ -1,20 +1,22 @@
#pragma warning disable SA1300 // Lowercase required for backwards compat.
using System.ComponentModel;
namespace Jellyfin.Data.Enums;
/// <summary>
/// Media streaming protocol.
/// Lowercase for backwards compatibility.
/// </summary>
[DefaultValue(Http)]
[DefaultValue(http)]
public enum MediaStreamProtocol
{
/// <summary>
/// HTTP.
/// </summary>
Http = 0,
http = 0,
/// <summary>
/// HTTP Live Streaming.
/// </summary>
Hls = 1
hls = 1
}

View File

@ -57,6 +57,9 @@ public static class StartupHelpers
logger.LogInformation("User Interactive: {IsUserInteractive}", Environment.UserInteractive);
logger.LogInformation("Processor count: {ProcessorCount}", Environment.ProcessorCount);
logger.LogInformation("Program data path: {ProgramDataPath}", appPaths.ProgramDataPath);
logger.LogInformation("Log directory path: {LogDirectoryPath}", appPaths.LogDirectoryPath);
logger.LogInformation("Config directory path: {ConfigurationDirectoryPath}", appPaths.ConfigurationDirectoryPath);
logger.LogInformation("Cache path: {CachePath}", appPaths.CachePath);
logger.LogInformation("Web resources path: {WebPath}", appPaths.WebPath);
logger.LogInformation("Application directory: {ApplicationPath}", appPaths.ProgramSystemPath);
}

View File

@ -12,6 +12,7 @@ using Jellyfin.Server.Helpers;
using Jellyfin.Server.Implementations;
using MediaBrowser.Common.Configuration;
using MediaBrowser.Controller;
using Microsoft.AspNetCore.Hosting;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
@ -139,7 +140,15 @@ namespace Jellyfin.Server
host = Host.CreateDefaultBuilder()
.UseConsoleLifetime()
.ConfigureServices(services => appHost.Init(services))
.ConfigureWebHostDefaults(webHostBuilder => webHostBuilder.ConfigureWebHostBuilder(appHost, startupConfig, appPaths, _logger))
.ConfigureWebHostDefaults(webHostBuilder =>
{
webHostBuilder.ConfigureWebHostBuilder(appHost, startupConfig, appPaths, _logger);
if (bool.TryParse(Environment.GetEnvironmentVariable("JELLYFIN_ENABLE_IIS"), out var iisEnabled) && iisEnabled)
{
_logger.LogCritical("UNSUPPORTED HOSTING ENVIRONMENT Microsoft Internet Information Services. The option to run Jellyfin on IIS is an unsupported and untested feature. Only use at your own discretion.");
webHostBuilder.UseIIS();
}
})
.ConfigureAppConfiguration(config => config.ConfigureAppConfiguration(options, appPaths, startupConfig))
.UseSerilog()
.Build();

View File

@ -1,5 +1,3 @@
#nullable disable
#pragma warning disable CS1591
namespace MediaBrowser.Controller.Channels
@ -11,6 +9,6 @@ namespace MediaBrowser.Controller.Channels
/// </summary>
/// <param name="userId">The user identifier.</param>
/// <returns>System.String.</returns>
string GetCacheKey(string userId);
string? GetCacheKey(string? userId);
}
}

View File

@ -1,21 +0,0 @@
#nullable disable
#pragma warning disable CS1591
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace MediaBrowser.Controller.Channels
{
public interface ISearchableChannel
{
/// <summary>
/// Searches the specified search term.
/// </summary>
/// <param name="searchInfo">The search information.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>Task{IEnumerable{ChannelItemInfo}}.</returns>
Task<IEnumerable<ChannelItemInfo>> Search(ChannelSearchInfo searchInfo, CancellationToken cancellationToken);
}
}

View File

@ -1,6 +1,4 @@
#nullable disable
#pragma warning disable CS1591
#pragma warning disable CS1591
using System.Collections.Generic;
using System.Threading;

View File

@ -63,7 +63,9 @@ namespace MediaBrowser.Controller.Entities
".edl",
".bif",
".smi",
".ttml"
".ttml",
".lrc",
".elrc"
};
/// <summary>

View File

@ -11,6 +11,7 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Jellyfin.Data.Entities;
using Jellyfin.Data.Enums;
using Jellyfin.Extensions.Json;
using MediaBrowser.Controller.IO;
@ -95,6 +96,16 @@ namespace MediaBrowser.Controller.Entities
return GetLibraryOptions(Path);
}
public override bool IsVisible(User user)
{
if (GetLibraryOptions().Enabled)
{
return base.IsVisible(user);
}
return false;
}
private static LibraryOptions LoadLibraryOptions(string path)
{
try

View File

@ -331,8 +331,25 @@ namespace MediaBrowser.Controller.Entities
}
}
private static bool IsLibraryFolderAccessible(IDirectoryService directoryService, BaseItem item)
{
// For top parents i.e. Library folders, skip the validation if it's empty or inaccessible
if (item.IsTopParent && !directoryService.IsAccessible(item.ContainingFolderPath))
{
Logger.LogWarning("Library folder {LibraryFolderPath} is inaccessible or empty, skipping", item.ContainingFolderPath);
return false;
}
return true;
}
private async Task ValidateChildrenInternal2(IProgress<double> progress, bool recursive, bool refreshChildMetadata, MetadataRefreshOptions refreshOptions, IDirectoryService directoryService, CancellationToken cancellationToken)
{
if (!IsLibraryFolderAccessible(directoryService, this))
{
return;
}
cancellationToken.ThrowIfCancellationRequested();
var validChildren = new List<BaseItem>();
@ -369,6 +386,11 @@ namespace MediaBrowser.Controller.Entities
foreach (var child in nonCachedChildren)
{
if (!IsLibraryFolderAccessible(directoryService, child))
{
continue;
}
if (currentChildren.TryGetValue(child.Id, out BaseItem currentChild))
{
validChildren.Add(currentChild);
@ -392,8 +414,8 @@ namespace MediaBrowser.Controller.Entities
validChildren.Add(child);
}
// If any items were added or removed....
if (newItems.Count > 0 || currentChildren.Count != validChildren.Count)
// If it's an AggregateFolder, don't remove
if (!IsRoot && currentChildren.Count != validChildren.Count)
{
// That's all the new and changed ones - now see if there are any that are missing
var itemsRemoved = currentChildren.Values.Except(validChildren).ToList();
@ -408,7 +430,10 @@ namespace MediaBrowser.Controller.Entities
LibraryManager.DeleteItem(item, new DeleteOptions { DeleteFileLocation = false }, this, false);
}
}
}
if (newItems.Count > 0)
{
LibraryManager.CreateItems(newItems, this, cancellationToken);
}
}

View File

@ -1,5 +1,3 @@
#nullable disable
#pragma warning disable CA1002, CS1591
using System.Collections.Generic;
@ -19,7 +17,7 @@ namespace MediaBrowser.Controller.Library
/// <param name="user">The user to use.</param>
/// <param name="dtoOptions">The options to use.</param>
/// <returns>List of items.</returns>
List<BaseItem> GetInstantMixFromItem(BaseItem item, User user, DtoOptions dtoOptions);
List<BaseItem> GetInstantMixFromItem(BaseItem item, User? user, DtoOptions dtoOptions);
/// <summary>
/// Gets the instant mix from artist.
@ -28,7 +26,7 @@ namespace MediaBrowser.Controller.Library
/// <param name="user">The user to use.</param>
/// <param name="dtoOptions">The options to use.</param>
/// <returns>List of items.</returns>
List<BaseItem> GetInstantMixFromArtist(MusicArtist artist, User user, DtoOptions dtoOptions);
List<BaseItem> GetInstantMixFromArtist(MusicArtist artist, User? user, DtoOptions dtoOptions);
/// <summary>
/// Gets the instant mix from genre.
@ -37,6 +35,6 @@ namespace MediaBrowser.Controller.Library
/// <param name="user">The user to use.</param>
/// <param name="dtoOptions">The options to use.</param>
/// <returns>List of items.</returns>
List<BaseItem> GetInstantMixFromGenres(IEnumerable<string> genres, User user, DtoOptions dtoOptions);
List<BaseItem> GetInstantMixFromGenres(IEnumerable<string> genres, User? user, DtoOptions dtoOptions);
}
}

View File

@ -54,7 +54,7 @@ namespace MediaBrowser.Controller.LiveTv
public string ChannelGroup { get; set; }
/// <summary>
/// Gets or sets the the image path if it can be accessed directly from the file system.
/// Gets or sets the image path if it can be accessed directly from the file system.
/// </summary>
/// <value>The image path.</value>
public string ImagePath { get; set; }

View File

@ -51,6 +51,7 @@ namespace MediaBrowser.Controller.MediaEncoding
private readonly Version _minFFmpegOclCuTonemapMode = new Version(5, 1, 3);
private readonly Version _minFFmpegSvtAv1Params = new Version(5, 1);
private readonly Version _minFFmpegVaapiH26xEncA53CcSei = new Version(6, 0);
private readonly Version _minFFmpegReadrateOption = new Version(5, 0);
private static readonly string[] _videoProfilesH264 = new[]
{
@ -253,6 +254,14 @@ namespace MediaBrowser.Controller.MediaEncoding
&& _mediaEncoder.SupportsFilterWithOption(FilterOptionType.OverlayVulkanFrameSync);
}
private bool IsVideoToolboxFullSupported()
{
return _mediaEncoder.SupportsHwaccel("videotoolbox")
&& _mediaEncoder.SupportsFilter("yadif_videotoolbox")
&& _mediaEncoder.SupportsFilter("overlay_videotoolbox")
&& _mediaEncoder.SupportsFilter("scale_vt");
}
private bool IsHwTonemapAvailable(EncodingJobInfo state, EncodingOptions options)
{
if (state.VideoStream is null
@ -272,7 +281,8 @@ namespace MediaBrowser.Controller.MediaEncoding
var isNvdecDecoder = vidDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
var isVaapiDecoder = vidDecoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
var isD3d11vaDecoder = vidDecoder.Contains("d3d11va", StringComparison.OrdinalIgnoreCase);
return isSwDecoder || isNvdecDecoder || isVaapiDecoder || isD3d11vaDecoder;
var isVideoToolBoxDecoder = vidDecoder.Contains("videotoolbox", StringComparison.OrdinalIgnoreCase);
return isSwDecoder || isNvdecDecoder || isVaapiDecoder || isD3d11vaDecoder || isVideoToolBoxDecoder;
}
return state.VideoStream.VideoRange == VideoRange.HDR
@ -308,6 +318,21 @@ namespace MediaBrowser.Controller.MediaEncoding
&& state.VideoStream.VideoRangeType == VideoRangeType.HDR10;
}
private bool IsVideoToolboxTonemapAvailable(EncodingJobInfo state, EncodingOptions options)
{
if (state.VideoStream is null
|| !options.EnableVideoToolboxTonemapping
|| GetVideoColorBitDepth(state) != 10)
{
return false;
}
// Certain DV profile 5 video works in Safari with direct playing, but the VideoToolBox does not produce correct mapping results with transcoding.
// All other HDR formats working.
return state.VideoStream.VideoRange == VideoRange.HDR
&& state.VideoStream.VideoRangeType is VideoRangeType.HDR10 or VideoRangeType.HLG or VideoRangeType.HDR10Plus;
}
/// <summary>
/// Gets the name of the output video codec.
/// </summary>
@ -1197,7 +1222,7 @@ namespace MediaBrowser.Controller.MediaEncoding
// Disable auto inserted SW scaler for HW decoders in case of changed resolution.
var isSwDecoder = string.IsNullOrEmpty(GetHardwareVideoDecoder(state, options));
if (!isSwDecoder && _mediaEncoder.EncoderVersion >= new Version(4, 4))
if (!isSwDecoder)
{
arg.Append(" -noautoscale");
}
@ -4954,22 +4979,30 @@ namespace MediaBrowser.Controller.MediaEncoding
return (null, null, null);
}
var swFilterChain = GetSwVidFilterChain(state, options, vidEncoder);
var isMacOS = OperatingSystem.IsMacOS();
var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
var isVtEncoder = vidEncoder.Contains("videotoolbox", StringComparison.OrdinalIgnoreCase);
var isVtFullSupported = isMacOS && IsVideoToolboxFullSupported();
var isVtOclSupported = isVtFullSupported && IsOpenclFullSupported();
if (!options.EnableHardwareEncoding)
// legacy videotoolbox pipeline (disable hw filters)
if (!isVtEncoder
|| !isVtOclSupported
|| !_mediaEncoder.SupportsFilter("alphasrc"))
{
return swFilterChain;
return GetSwVidFilterChain(state, options, vidEncoder);
}
if (_mediaEncoder.EncoderVersion.CompareTo(new Version("5.0.0")) < 0)
{
// All features used here requires ffmpeg 5.0 or later, fallback to software filters if using an old ffmpeg
return swFilterChain;
}
// preferred videotoolbox + vt/ocl filters pipeline
return GetAppleVidFiltersPreferred(state, options, vidDecoder, vidEncoder);
}
var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
var doDeintH2645 = doDeintH264 || doDeintHevc;
public (List<string> MainFilters, List<string> SubFilters, List<string> OverlayFilters) GetAppleVidFiltersPreferred(
EncodingJobInfo state,
EncodingOptions options,
string vidDecoder,
string vidEncoder)
{
var inW = state.VideoStream?.Width;
var inH = state.VideoStream?.Height;
var reqW = state.BaseRequest.Width;
@ -4977,33 +5010,114 @@ namespace MediaBrowser.Controller.MediaEncoding
var reqMaxW = state.BaseRequest.MaxWidth;
var reqMaxH = state.BaseRequest.MaxHeight;
var threeDFormat = state.MediaSource.Video3DFormat;
var newfilters = new List<string>();
var noOverlay = swFilterChain.OverlayFilters.Count == 0;
var supportsHwDeint = _mediaEncoder.SupportsFilter("yadif_videotoolbox");
// fallback to software filters if we are using filters not supported by hardware yet.
var useHardwareFilters = noOverlay && (!doDeintH2645 || supportsHwDeint);
if (!useHardwareFilters)
var isVtEncoder = vidEncoder.Contains("videotoolbox", StringComparison.OrdinalIgnoreCase);
var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
var doDeintH2645 = doDeintH264 || doDeintHevc;
var doVtTonemap = IsVideoToolboxTonemapAvailable(state, options);
var doOclTonemap = !doVtTonemap && IsHwTonemapAvailable(state, options);
var scaleFormat = string.Empty;
if (!string.Equals(state.VideoStream.PixelFormat, "yuv420p", StringComparison.OrdinalIgnoreCase))
{
return swFilterChain;
// Use P010 for OpenCL tone mapping, otherwise force an 8bit output.
scaleFormat = doOclTonemap ? "p010le" : "nv12";
}
// ffmpeg cannot use videotoolbox to scale
var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
newfilters.Add(swScaleFilter);
var hwScaleFilter = GetHwScaleFilter("vt", scaleFormat, inW, inH, reqW, reqH, reqMaxW, reqMaxH);
// hwupload on videotoolbox encoders can automatically convert AVFrame into its CVPixelBuffer equivalent
// videotoolbox will automatically convert the CVPixelBuffer to a pixel format the encoder supports, so we don't have to set a pixel format explicitly here
// This will reduce CPU usage significantly on UHD videos with 10 bit colors because we bypassed the ffmpeg pixel format conversion
newfilters.Add("hwupload");
var hasSubs = state.SubtitleStream is not null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
var hasAssSubs = hasSubs
&& (string.Equals(state.SubtitleStream.Codec, "ass", StringComparison.OrdinalIgnoreCase)
|| string.Equals(state.SubtitleStream.Codec, "ssa", StringComparison.OrdinalIgnoreCase));
if (!isVtEncoder)
{
// should not happen.
return (null, null, null);
}
/* Make main filters for video stream */
var mainFilters = new List<string>();
// Color override is only required for OpenCL where hardware surface is in use
if (doOclTonemap)
{
mainFilters.Add(GetOverwriteColorPropertiesParam(state, doOclTonemap));
}
// INPUT videotoolbox/memory surface(vram/uma)
// this will pass-through automatically if in/out format matches.
mainFilters.Add("format=nv12|p010le|videotoolbox_vld");
mainFilters.Add("hwupload=derive_device=videotoolbox");
// hw deint
if (doDeintH2645)
{
var deintFilter = GetHwDeinterlaceFilter(state, options, "videotoolbox");
newfilters.Add(deintFilter);
mainFilters.Add(deintFilter);
}
return (newfilters, swFilterChain.SubFilters, swFilterChain.OverlayFilters);
if (doVtTonemap)
{
const string VtTonemapArgs = "color_matrix=bt709:color_primaries=bt709:color_transfer=bt709";
// scale_vt can handle scaling & tonemapping in one shot, just like vpp_qsv.
hwScaleFilter = string.IsNullOrEmpty(hwScaleFilter)
? "scale_vt=" + VtTonemapArgs
: hwScaleFilter + ":" + VtTonemapArgs;
}
// hw scale & vt tonemap
mainFilters.Add(hwScaleFilter);
// ocl tonemap
if (doOclTonemap)
{
// map from videotoolbox to opencl via videotoolbox-opencl interop.
mainFilters.Add("hwmap=derive_device=opencl:mode=read");
var tonemapFilter = GetHwTonemapFilter(options, "opencl", "nv12");
mainFilters.Add(tonemapFilter);
// OUTPUT videotoolbox(nv12) surface(vram/uma)
// reverse-mapping via videotoolbox-opencl interop.
mainFilters.Add("hwmap=derive_device=videotoolbox:mode=write:reverse=1");
}
/* Make sub and overlay filters for subtitle stream */
var subFilters = new List<string>();
var overlayFilters = new List<string>();
if (hasSubs)
{
if (hasGraphicalSubs)
{
var subPreProcFilters = GetGraphicalSubPreProcessFilters(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
subFilters.Add(subPreProcFilters);
subFilters.Add("format=bgra");
}
else if (hasTextSubs)
{
var framerate = state.VideoStream?.RealFrameRate;
var subFramerate = hasAssSubs ? Math.Min(framerate ?? 25, 60) : 10;
var alphaSrcFilter = GetAlphaSrcFilter(state, inW, inH, reqW, reqH, reqMaxW, reqMaxH, subFramerate);
var subTextSubtitlesFilter = GetTextSubtitlesFilter(state, true, true);
subFilters.Add(alphaSrcFilter);
subFilters.Add("format=bgra");
subFilters.Add(subTextSubtitlesFilter);
}
subFilters.Add("hwupload=derive_device=videotoolbox");
overlayFilters.Add("overlay_videotoolbox=eof_action=pass:repeatlast=0");
}
return (mainFilters, subFilters, overlayFilters);
}
/// <summary>
@ -5995,22 +6109,37 @@ namespace MediaBrowser.Controller.MediaEncoding
|| string.Equals("yuvj420p", videoStream.PixelFormat, StringComparison.OrdinalIgnoreCase);
var is8_10bitSwFormatsVt = is8bitSwFormatsVt || string.Equals("yuv420p10le", videoStream.PixelFormat, StringComparison.OrdinalIgnoreCase);
// Hardware surface only make sense when interop with OpenCL
// VideoToolbox's Hardware surface in ffmpeg is not only slower than hwupload, but also breaks HDR in many cases.
// For example: https://trac.ffmpeg.org/ticket/10884
var useOclToneMapping = !IsVideoToolboxTonemapAvailable(state, options)
&& options.EnableTonemapping
&& state.VideoStream is not null
&& GetVideoColorBitDepth(state) == 10
&& state.VideoStream.VideoRange == VideoRange.HDR
&& (state.VideoStream.VideoRangeType == VideoRangeType.HDR10
|| state.VideoStream.VideoRangeType == VideoRangeType.HLG
|| (state.VideoStream.VideoRangeType == VideoRangeType.DOVI
&& string.Equals(state.VideoStream.Codec, "hevc", StringComparison.OrdinalIgnoreCase)));
var useHwSurface = useOclToneMapping && IsVideoToolboxFullSupported() && _mediaEncoder.SupportsFilter("alphasrc");
if (is8bitSwFormatsVt)
{
if (string.Equals("avc", videoStream.Codec, StringComparison.OrdinalIgnoreCase)
|| string.Equals("h264", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return GetHwaccelType(state, options, "h264", bitDepth, false);
return GetHwaccelType(state, options, "h264", bitDepth, useHwSurface);
}
if (string.Equals("mpeg2video", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return GetHwaccelType(state, options, "mpeg2video", bitDepth, false);
return GetHwaccelType(state, options, "mpeg2video", bitDepth, useHwSurface);
}
if (string.Equals("mpeg4", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return GetHwaccelType(state, options, "mpeg4", bitDepth, false);
return GetHwaccelType(state, options, "mpeg4", bitDepth, useHwSurface);
}
}
@ -6019,12 +6148,12 @@ namespace MediaBrowser.Controller.MediaEncoding
if (string.Equals("hevc", videoStream.Codec, StringComparison.OrdinalIgnoreCase)
|| string.Equals("h265", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return GetHwaccelType(state, options, "hevc", bitDepth, false);
return GetHwaccelType(state, options, "hevc", bitDepth, useHwSurface);
}
if (string.Equals("vp9", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return GetHwaccelType(state, options, "vp9", bitDepth, false);
return GetHwaccelType(state, options, "vp9", bitDepth, useHwSurface);
}
}
@ -6265,6 +6394,16 @@ namespace MediaBrowser.Controller.MediaEncoding
{
inputModifier += " -re";
}
else if (encodingOptions.EnableSegmentDeletion
&& state.VideoStream is not null
&& state.TranscodingType == TranscodingJobType.Hls
&& IsCopyCodec(state.OutputVideoCodec)
&& _mediaEncoder.EncoderVersion >= _minFFmpegReadrateOption)
{
// Set an input read rate limit 10x for using SegmentDeletion with stream-copy
// to prevent ffmpeg from exiting prematurely (due to fast drive)
inputModifier += " -readrate 10";
}
var flags = new List<string>();
if (state.IgnoreInputDts)

View File

@ -136,6 +136,11 @@ public sealed class TranscodingJob : IDisposable
/// </summary>
public TranscodingThrottler? TranscodingThrottler { get; set; }
/// <summary>
/// Gets or sets transcoding segment cleaner.
/// </summary>
public TranscodingSegmentCleaner? TranscodingSegmentCleaner { get; set; }
/// <summary>
/// Gets or sets last ping date.
/// </summary>
@ -239,6 +244,7 @@ public sealed class TranscodingJob : IDisposable
{
#pragma warning disable CA1849 // Can't await in lock block
TranscodingThrottler?.Stop().GetAwaiter().GetResult();
TranscodingSegmentCleaner?.Stop();
var process = Process;
@ -276,5 +282,7 @@ public sealed class TranscodingJob : IDisposable
CancellationTokenSource = null;
TranscodingThrottler?.Dispose();
TranscodingThrottler = null;
TranscodingSegmentCleaner?.Dispose();
TranscodingSegmentCleaner = null;
}
}

View File

@ -0,0 +1,178 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MediaBrowser.Common.Configuration;
using MediaBrowser.Model.Configuration;
using MediaBrowser.Model.IO;
using Microsoft.Extensions.Logging;
namespace MediaBrowser.Controller.MediaEncoding;
/// <summary>
/// Transcoding segment cleaner.
/// </summary>
public class TranscodingSegmentCleaner : IDisposable
{
private readonly TranscodingJob _job;
private readonly ILogger<TranscodingSegmentCleaner> _logger;
private readonly IConfigurationManager _config;
private readonly IFileSystem _fileSystem;
private readonly IMediaEncoder _mediaEncoder;
private Timer? _timer;
private int _segmentLength;
/// <summary>
/// Initializes a new instance of the <see cref="TranscodingSegmentCleaner"/> class.
/// </summary>
/// <param name="job">Transcoding job dto.</param>
/// <param name="logger">Instance of the <see cref="ILogger{TranscodingSegmentCleaner}"/> interface.</param>
/// <param name="config">Instance of the <see cref="IConfigurationManager"/> interface.</param>
/// <param name="fileSystem">Instance of the <see cref="IFileSystem"/> interface.</param>
/// <param name="mediaEncoder">Instance of the <see cref="IMediaEncoder"/> interface.</param>
/// <param name="segmentLength">The segment length of this transcoding job.</param>
public TranscodingSegmentCleaner(TranscodingJob job, ILogger<TranscodingSegmentCleaner> logger, IConfigurationManager config, IFileSystem fileSystem, IMediaEncoder mediaEncoder, int segmentLength)
{
_job = job;
_logger = logger;
_config = config;
_fileSystem = fileSystem;
_mediaEncoder = mediaEncoder;
_segmentLength = segmentLength;
}
/// <summary>
/// Start timer.
/// </summary>
public void Start()
{
_timer = new Timer(TimerCallback, null, 20000, 20000);
}
/// <summary>
/// Stop cleaner.
/// </summary>
public void Stop()
{
DisposeTimer();
}
/// <summary>
/// Dispose cleaner.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Dispose cleaner.
/// </summary>
/// <param name="disposing">Disposing.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
DisposeTimer();
}
}
private EncodingOptions GetOptions()
{
return _config.GetEncodingOptions();
}
private async void TimerCallback(object? state)
{
if (_job.HasExited)
{
DisposeTimer();
return;
}
var options = GetOptions();
var enableSegmentDeletion = options.EnableSegmentDeletion;
var segmentKeepSeconds = Math.Max(options.SegmentKeepSeconds, 20);
if (enableSegmentDeletion)
{
var downloadPositionTicks = _job.DownloadPositionTicks ?? 0;
var downloadPositionSeconds = Convert.ToInt64(TimeSpan.FromTicks(downloadPositionTicks).TotalSeconds);
if (downloadPositionSeconds > 0 && segmentKeepSeconds > 0 && downloadPositionSeconds > segmentKeepSeconds)
{
var idxMaxToDelete = (downloadPositionSeconds - segmentKeepSeconds) / _segmentLength;
if (idxMaxToDelete > 0)
{
await DeleteSegmentFiles(_job, 0, idxMaxToDelete, 1500).ConfigureAwait(false);
}
}
}
}
private async Task DeleteSegmentFiles(TranscodingJob job, long idxMin, long idxMax, int delayMs)
{
var path = job.Path ?? throw new ArgumentException("Path can't be null.");
_logger.LogDebug("Deleting segment file(s) index {Min} to {Max} from {Path}", idxMin, idxMax, path);
await Task.Delay(delayMs).ConfigureAwait(false);
try
{
if (job.Type == TranscodingJobType.Hls)
{
DeleteHlsSegmentFiles(path, idxMin, idxMax);
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Error deleting segment file(s) {Path}", path);
}
}
private void DeleteHlsSegmentFiles(string outputFilePath, long idxMin, long idxMax)
{
var directory = Path.GetDirectoryName(outputFilePath)
?? throw new ArgumentException("Path can't be a root directory.", nameof(outputFilePath));
var name = Path.GetFileNameWithoutExtension(outputFilePath);
var filesToDelete = _fileSystem.GetFilePaths(directory)
.Where(f => long.TryParse(Path.GetFileNameWithoutExtension(f).Replace(name, string.Empty, StringComparison.Ordinal), out var idx)
&& (idx >= idxMin && idx <= idxMax));
List<Exception>? exs = null;
foreach (var file in filesToDelete)
{
try
{
_logger.LogDebug("Deleting HLS segment file {0}", file);
_fileSystem.DeleteFile(file);
}
catch (IOException ex)
{
(exs ??= new List<Exception>()).Add(ex);
_logger.LogDebug(ex, "Error deleting HLS segment file {Path}", file);
}
}
if (exs is not null)
{
throw new AggregateException("Error deleting HLS segment files", exs);
}
}
private void DisposeTimer()
{
if (_timer is not null)
{
_timer.Dispose();
_timer = null;
}
}
}

View File

@ -115,7 +115,7 @@ public class TranscodingThrottler : IDisposable
var options = GetOptions();
if (options.EnableThrottling && IsThrottleAllowed(_job, options.ThrottleDelaySeconds))
if (options.EnableThrottling && IsThrottleAllowed(_job, Math.Max(options.ThrottleDelaySeconds, 60)))
{
await PauseTranscoding().ConfigureAwait(false);
}

View File

@ -78,5 +78,10 @@ namespace MediaBrowser.Controller.Providers
return filePaths;
}
public bool IsAccessible(string path)
{
return _fileSystem.GetFileSystemEntryPaths(path).Any();
}
}
}

View File

@ -16,5 +16,7 @@ namespace MediaBrowser.Controller.Providers
IReadOnlyList<string> GetFilePaths(string path);
IReadOnlyList<string> GetFilePaths(string path, bool clearCache, bool sort = false);
bool IsAccessible(string path);
}
}

View File

@ -139,7 +139,8 @@ namespace MediaBrowser.MediaEncoding.Attachments
var processArgs = string.Format(
CultureInfo.InvariantCulture,
"-dump_attachment:t \"\" -y -i {0} -t 0 -f null null",
"-dump_attachment:t \"\" -y {0} -i {1} -t 0 -f null null",
inputPath.EndsWith(".concat\"", StringComparison.OrdinalIgnoreCase) ? "-f concat -safe 0" : string.Empty,
inputPath);
int exitCode;

View File

@ -128,6 +128,8 @@ namespace MediaBrowser.MediaEncoding.Encoder
"overlay_vulkan",
// videotoolbox
"yadif_videotoolbox",
"scale_vt",
"overlay_videotoolbox",
// rkrga
"scale_rkrga",
"vpp_rkrga",
@ -144,17 +146,18 @@ namespace MediaBrowser.MediaEncoding.Encoder
{ 5, new string[] { "overlay_vulkan", "Action to take when encountering EOF from secondary input" } }
};
// These are the library versions that corresponds to our minimum ffmpeg version 4.x according to the version table below
// These are the library versions that corresponds to our minimum ffmpeg version 4.4 according to the version table below
// Refers to the versions in https://ffmpeg.org/download.html
private static readonly Dictionary<string, Version> _ffmpegMinimumLibraryVersions = new Dictionary<string, Version>
{
{ "libavutil", new Version(56, 14) },
{ "libavcodec", new Version(58, 18) },
{ "libavformat", new Version(58, 12) },
{ "libavdevice", new Version(58, 3) },
{ "libavfilter", new Version(7, 16) },
{ "libswscale", new Version(5, 1) },
{ "libswresample", new Version(3, 1) },
{ "libpostproc", new Version(55, 1) }
{ "libavutil", new Version(56, 70) },
{ "libavcodec", new Version(58, 134) },
{ "libavformat", new Version(58, 76) },
{ "libavdevice", new Version(58, 13) },
{ "libavfilter", new Version(7, 110) },
{ "libswscale", new Version(5, 9) },
{ "libswresample", new Version(3, 9) },
{ "libpostproc", new Version(55, 9) }
};
private readonly ILogger _logger;
@ -174,7 +177,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
// When changing this, also change the minimum library versions in _ffmpegMinimumLibraryVersions
public static Version MinVersion { get; } = new Version(4, 0);
public static Version MinVersion { get; } = new Version(4, 4);
public static Version? MaxVersion { get; } = null;

View File

@ -80,6 +80,7 @@ namespace MediaBrowser.MediaEncoding.Probing
"5/8erl in Ehr'n",
"Smith/Kotzen",
"We;Na",
"LSR/CITY",
};
/// <summary>

View File

@ -321,7 +321,7 @@ public sealed class TranscodeManager : ITranscodeManager, IDisposable
}
catch (IOException ex)
{
(exs ??= new List<Exception>(4)).Add(ex);
(exs ??= new List<Exception>()).Add(ex);
_logger.LogError(ex, "Error deleting HLS file {Path}", file);
}
}
@ -546,6 +546,7 @@ public sealed class TranscodeManager : ITranscodeManager, IDisposable
if (!transcodingJob.HasExited)
{
StartThrottler(state, transcodingJob);
StartSegmentCleaner(state, transcodingJob);
}
else if (transcodingJob.ExitCode != 0)
{
@ -573,6 +574,22 @@ public sealed class TranscodeManager : ITranscodeManager, IDisposable
&& state.IsInputVideo
&& state.VideoType == VideoType.VideoFile;
private void StartSegmentCleaner(StreamState state, TranscodingJob transcodingJob)
{
if (EnableSegmentCleaning(state))
{
transcodingJob.TranscodingSegmentCleaner = new TranscodingSegmentCleaner(transcodingJob, _loggerFactory.CreateLogger<TranscodingSegmentCleaner>(), _serverConfigurationManager, _fileSystem, _mediaEncoder, state.SegmentLength);
transcodingJob.TranscodingSegmentCleaner.Start();
}
}
private static bool EnableSegmentCleaning(StreamState state)
=> state.InputProtocol is MediaProtocol.File or MediaProtocol.Http
&& state.IsInputVideo
&& state.TranscodingType == TranscodingJobType.Hls
&& state.RunTimeTicks.HasValue
&& state.RunTimeTicks.Value >= TimeSpan.FromMinutes(5).Ticks;
private TranscodingJob OnTranscodeBeginning(
string path,
string? playSessionId,
@ -724,7 +741,14 @@ public sealed class TranscodeManager : ITranscodeManager, IDisposable
foreach (var file in _fileSystem.GetFilePaths(path, true))
{
_fileSystem.DeleteFile(file);
try
{
_fileSystem.DeleteFile(file);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error deleting encoded media cache file {Path}", path);
}
}
}

View File

@ -28,6 +28,7 @@ public class EncodingOptions
VaapiDevice = "/dev/dri/renderD128";
EnableTonemapping = false;
EnableVppTonemapping = false;
EnableVideoToolboxTonemapping = false;
TonemappingAlgorithm = "bt2390";
TonemappingMode = "auto";
TonemappingRange = "auto";
@ -146,6 +147,11 @@ public class EncodingOptions
/// </summary>
public bool EnableVppTonemapping { get; set; }
/// <summary>
/// Gets or sets a value indicating whether videotoolbox tonemapping is enabled.
/// </summary>
public bool EnableVideoToolboxTonemapping { get; set; }
/// <summary>
/// Gets or sets the tone-mapping algorithm.
/// </summary>

View File

@ -27,6 +27,8 @@ namespace MediaBrowser.Model.Configuration
SeasonZeroDisplayName = "Specials";
}
public bool Enabled { get; set; } = true;
public bool EnablePhotos { get; set; }
public bool EnableRealtimeMonitor { get; set; }

View File

@ -345,7 +345,7 @@ namespace MediaBrowser.Model.Dlna
/// <param name="profile">The <see cref="DeviceProfile"/>.</param>
/// <param name="type">The <see cref="DlnaProfileType"/>.</param>
/// <param name="playProfile">The <see cref="DirectPlayProfile"/> object to get the video stream from.</param>
/// <returns>The the normalized input container.</returns>
/// <returns>The normalized input container.</returns>
public static string? NormalizeMediaSourceFormatIntoSingleContainer(string inputContainer, DeviceProfile? profile, DlnaProfileType type, DirectPlayProfile? playProfile = null)
{
if (string.IsNullOrEmpty(inputContainer))
@ -557,7 +557,7 @@ namespace MediaBrowser.Model.Dlna
private static void SetStreamInfoOptionsFromDirectPlayProfile(MediaOptions options, MediaSourceInfo item, StreamInfo playlistItem, DirectPlayProfile? directPlayProfile)
{
var container = NormalizeMediaSourceFormatIntoSingleContainer(item.Container, options.Profile, DlnaProfileType.Video, directPlayProfile);
var protocol = MediaStreamProtocol.Http;
var protocol = MediaStreamProtocol.http;
item.TranscodingContainer = container;
item.TranscodingSubProtocol = protocol;
@ -648,7 +648,7 @@ namespace MediaBrowser.Model.Dlna
if (directPlay == PlayMethod.DirectPlay)
{
playlistItem.SubProtocol = MediaStreamProtocol.Http;
playlistItem.SubProtocol = MediaStreamProtocol.http;
var audioStreamIndex = directPlayInfo.AudioStreamIndex ?? audioStream?.Index;
if (audioStreamIndex.HasValue)
@ -803,7 +803,7 @@ namespace MediaBrowser.Model.Dlna
var videoCodecs = ContainerProfile.SplitValue(videoCodec);
// Enforce HLS video codec restrictions
if (playlistItem.SubProtocol == MediaStreamProtocol.Hls)
if (playlistItem.SubProtocol == MediaStreamProtocol.hls)
{
videoCodecs = videoCodecs.Where(codec => _supportedHlsVideoCodecs.Contains(codec)).ToArray();
}
@ -840,7 +840,7 @@ namespace MediaBrowser.Model.Dlna
var audioCodecs = ContainerProfile.SplitValue(audioCodec);
// Enforce HLS audio codec restrictions
if (playlistItem.SubProtocol == MediaStreamProtocol.Hls)
if (playlistItem.SubProtocol == MediaStreamProtocol.hls)
{
if (string.Equals(playlistItem.Container, "mp4", StringComparison.OrdinalIgnoreCase))
{
@ -1350,7 +1350,7 @@ namespace MediaBrowser.Model.Dlna
/// <param name="transcoderSupport">The <see cref="ITranscoderSupport"/>.</param>
/// <param name="outputContainer">The output container.</param>
/// <param name="transcodingSubProtocol">The subtitle transoding protocol.</param>
/// <returns>The the normalized input container.</returns>
/// <returns>The normalized input container.</returns>
public static SubtitleProfile GetSubtitleProfile(
MediaSourceInfo mediaSource,
MediaStream subtitleStream,
@ -1360,7 +1360,7 @@ namespace MediaBrowser.Model.Dlna
string? outputContainer,
MediaStreamProtocol? transcodingSubProtocol)
{
if (!subtitleStream.IsExternal && (playMethod != PlayMethod.Transcode || transcodingSubProtocol != MediaStreamProtocol.Hls))
if (!subtitleStream.IsExternal && (playMethod != PlayMethod.Transcode || transcodingSubProtocol != MediaStreamProtocol.hls))
{
// Look for supported embedded subs of the same format
foreach (var profile in subtitleProfiles)

View File

@ -670,7 +670,7 @@ namespace MediaBrowser.Model.Dlna
if (MediaType == DlnaProfileType.Audio)
{
if (SubProtocol == MediaStreamProtocol.Hls)
if (SubProtocol == MediaStreamProtocol.hls)
{
return string.Format(CultureInfo.InvariantCulture, "{0}/audio/{1}/master.m3u8?{2}", baseUrl, ItemId, queryString);
}
@ -678,7 +678,7 @@ namespace MediaBrowser.Model.Dlna
return string.Format(CultureInfo.InvariantCulture, "{0}/audio/{1}/stream{2}?{3}", baseUrl, ItemId, extension, queryString);
}
if (SubProtocol == MediaStreamProtocol.Hls)
if (SubProtocol == MediaStreamProtocol.hls)
{
return string.Format(CultureInfo.InvariantCulture, "{0}/videos/{1}/master.m3u8?{2}", baseUrl, ItemId, queryString);
}
@ -716,7 +716,7 @@ namespace MediaBrowser.Model.Dlna
long startPositionTicks = item.StartPositionTicks;
if (item.SubProtocol == MediaStreamProtocol.Hls)
if (item.SubProtocol == MediaStreamProtocol.hls)
{
list.Add(new NameValuePair("StartTimeTicks", string.Empty));
}
@ -778,7 +778,7 @@ namespace MediaBrowser.Model.Dlna
list.Add(new NameValuePair("SubtitleCodec", item.SubtitleStreamIndex.HasValue && item.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Embed ? subtitleCodecs : string.Empty));
if (item.SubProtocol == MediaStreamProtocol.Hls)
if (item.SubProtocol == MediaStreamProtocol.hls)
{
list.Add(new NameValuePair("SegmentContainer", item.Container ?? string.Empty));
@ -829,7 +829,7 @@ namespace MediaBrowser.Model.Dlna
var list = new List<SubtitleStreamInfo>();
// HLS will preserve timestamps so we can just grab the full subtitle stream
long startPositionTicks = SubProtocol == MediaStreamProtocol.Hls
long startPositionTicks = SubProtocol == MediaStreamProtocol.hls
? 0
: (PlayMethod == PlayMethod.Transcode && !CopyTimestamps ? StartPositionTicks : 0);

View File

@ -27,7 +27,7 @@ namespace MediaBrowser.Model.Dlna
public string AudioCodec { get; set; } = string.Empty;
[XmlAttribute("protocol")]
public MediaStreamProtocol Protocol { get; set; } = MediaStreamProtocol.Http;
public MediaStreamProtocol Protocol { get; set; } = MediaStreamProtocol.http;
[DefaultValue(false)]
[XmlAttribute("estimateContentLength")]

View File

@ -1,4 +1,3 @@
#nullable disable
#pragma warning disable CS1591
using System;
@ -31,7 +30,7 @@ namespace MediaBrowser.Model.Search
/// Gets or sets the search term.
/// </summary>
/// <value>The search term.</value>
public string SearchTerm { get; set; }
public required string SearchTerm { get; set; }
/// <summary>
/// Gets or sets the start index. Used for paging.

View File

@ -1,4 +1,3 @@
#nullable disable
#pragma warning disable CS1591
using System;
@ -29,6 +28,6 @@ namespace MediaBrowser.Model.System
/// Gets or sets the name.
/// </summary>
/// <value>The name.</value>
public string Name { get; set; }
public required string Name { get; set; }
}
}

View File

@ -24,7 +24,7 @@ namespace MediaBrowser.Model.Tasks
/// <param name="lastResult">Result of the last run triggered task.</param>
/// <param name="logger">The <see cref="ILogger"/>.</param>
/// <param name="taskName">The name of the task.</param>
/// <param name="isApplicationStartup">Whether or not this is is fired during startup.</param>
/// <param name="isApplicationStartup">Whether or not this is fired during startup.</param>
void Start(TaskResult? lastResult, ILogger logger, string taskName, bool isApplicationStartup);
/// <summary>

View File

@ -773,7 +773,8 @@ namespace MediaBrowser.Providers.Manager
MergeData(metadata, temp, Array.Empty<MetadataField>(), false, false);
}
MergeData(temp, metadata, item.LockedFields, true, false);
// Will always replace all metadata when Scan for new and updated files is used. Else, follow the options.
MergeData(temp, metadata, item.LockedFields, options.MetadataRefreshMode == MetadataRefreshMode.Default || options.ReplaceAllMetadata, false);
}
}
}

View File

@ -229,6 +229,7 @@ namespace MediaBrowser.Providers.MediaInfo
audio.RunTimeTicks = mediaInfo.RunTimeTicks;
audio.Size = mediaInfo.Size;
audio.PremiereDate = mediaInfo.PremiereDate;
if (!audio.IsLocked)
{
@ -349,7 +350,7 @@ namespace MediaBrowser.Providers.MediaInfo
}
}
if (!audio.LockedFields.Contains(MetadataField.Name))
if (!audio.LockedFields.Contains(MetadataField.Name) && !string.IsNullOrEmpty(tags.Title))
{
audio.Name = options.ReplaceAllMetadata || string.IsNullOrEmpty(audio.Name) ? tags.Title.Trim() : audio.Name;
}
@ -371,7 +372,11 @@ namespace MediaBrowser.Providers.MediaInfo
{
var year = Convert.ToInt32(tags.Year);
audio.ProductionYear = year;
audio.PremiereDate = new DateTime(year, 01, 01);
if (!audio.PremiereDate.HasValue)
{
audio.PremiereDate = new DateTime(year, 01, 01);
}
}
if (!audio.LockedFields.Contains(MetadataField.Genres))

1
build
View File

@ -1 +0,0 @@
build.sh

114
build.sh
View File

@ -1,114 +0,0 @@
#!/usr/bin/env bash
# build.sh - Build Jellyfin binary packages
# Part of the Jellyfin Project
set -o errexit
set -o pipefail
usage() {
echo -e "build.sh - Build Jellyfin binary packages"
echo -e "Usage:"
echo -e " $0 -t/--type <BUILD_TYPE> -p/--platform <PLATFORM> [-k/--keep-artifacts] [-l/--list-platforms]"
echo -e "Notes:"
echo -e " * BUILD_TYPE can be one of: [native, docker] and must be specified"
echo -e " * native: Build using the build script in the host OS"
echo -e " * docker: Build using the build script in a standardized Docker container"
echo -e " * PLATFORM can be any platform shown by -l/--list-platforms and must be specified"
echo -e " * If -k/--keep-artifacts is specified, transient artifacts (e.g. Docker containers) will be"
echo -e " retained after the build is finished; the source directory will still be cleaned"
echo -e " * If -l/--list-platforms is specified, all other arguments are ignored; the script will print"
echo -e " the list of supported platforms and exit"
}
list_platforms() {
declare -a platforms
platforms=(
$( find deployment -maxdepth 1 -mindepth 1 -name "build.*" | awk -F'.' '{ $1=""; printf $2; if ($3 != ""){ printf "." $3; }; if ($4 != ""){ printf "." $4; }; print ""; }' | sort )
)
echo -e "Valid platforms:"
echo
for platform in ${platforms[@]}; do
echo -e "* ${platform} : $( grep '^#=' deployment/build.${platform} | sed 's/^#= //' )"
done
}
do_build_native() {
if [[ ! -f $( which dpkg ) || $( dpkg --print-architecture | head -1 ) != "${PLATFORM##*.}" ]]; then
echo "Cross-building is not supported for native builds, use 'docker' builds on amd64 for cross-building."
exit 1
fi
export IS_DOCKER=NO
deployment/build.${PLATFORM}
}
do_build_docker() {
if [[ -f $( which dpkg ) && $( dpkg --print-architecture | head -1 ) != "amd64" ]]; then
echo "Docker-based builds only support amd64-based cross-building; use a 'native' build instead."
exit 1
fi
if [[ ! -f deployment/Dockerfile.${PLATFORM} ]]; then
echo "Missing Dockerfile for platform ${PLATFORM}"
exit 1
fi
if [[ ${KEEP_ARTIFACTS} == YES ]]; then
docker_args=""
else
docker_args="--rm"
fi
docker build . -t "jellyfin-builder.${PLATFORM}" -f deployment/Dockerfile.${PLATFORM}
mkdir -p ${ARTIFACT_DIR}
docker run $docker_args -v "${SOURCE_DIR}:/jellyfin" -v "${ARTIFACT_DIR}:/dist" "jellyfin-builder.${PLATFORM}"
}
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-t|--type)
BUILD_TYPE="$2"
shift # past argument
shift # past value
;;
-p|--platform)
PLATFORM="$2"
shift # past argument
shift # past value
;;
-k|--keep-artifacts)
KEEP_ARTIFACTS=YES
shift # past argument
;;
-l|--list-platforms)
list_platforms
exit 0
;;
-h|--help)
usage
exit 0
;;
*) # unknown option
echo "Unknown option $1"
usage
exit 1
;;
esac
done
if [[ -z ${BUILD_TYPE} || -z ${PLATFORM} ]]; then
usage
exit 1
fi
export SOURCE_DIR="$( pwd )"
export ARTIFACT_DIR="${SOURCE_DIR}/../bin/${PLATFORM}"
# Determine build type
case ${BUILD_TYPE} in
native)
do_build_native
;;
docker)
do_build_docker
;;
esac

View File

@ -1,18 +0,0 @@
---
# We just wrap `build` so this is really it
name: "jellyfin"
version: "10.9.0"
packages:
- debian.amd64
- debian.arm64
- debian.armhf
- ubuntu.amd64
- ubuntu.arm64
- ubuntu.armhf
- fedora.amd64
- centos.amd64
- linux.amd64
- windows.amd64
- macos.amd64
- macos.arm64
- portable

89
debian/changelog vendored
View File

@ -1,89 +0,0 @@
jellyfin-server (10.9.0-1) unstable; urgency=medium
* New upstream version 10.9.0; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.9.0
-- Jellyfin Packaging Team <packaging@jellyfin.org> Wed, 13 Jul 2022 20:58:08 -0600
jellyfin-server (10.8.0-1) unstable; urgency=medium
* Forthcoming stable release
-- Jellyfin Packaging Team <packaging@jellyfin.org> Fri, 04 Dec 2020 21:55:12 -0500
jellyfin-server (10.7.0-1) unstable; urgency=medium
* Forthcoming stable release
-- Jellyfin Packaging Team <packaging@jellyfin.org> Mon, 27 Jul 2020 19:09:45 -0400
jellyfin-server (10.6.0-2) unstable; urgency=medium
* Fix upgrade bug
-- Joshua Boniface <joshua@boniface.me> Sun, 19 Jul 22:47:27 -0400
jellyfin-server (10.6.0-1) unstable; urgency=medium
* Forthcoming stable release
-- Jellyfin Packaging Team <packaging@jellyfin.org> Mon, 23 Mar 2020 14:46:05 -0400
jellyfin (10.5.0-1) unstable; urgency=medium
* New upstream version 10.5.0; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.5.0
-- Jellyfin Packaging Team <packaging@jellyfin.org> Fri, 11 Oct 2019 20:12:38 -0400
jellyfin (10.4.0-1) unstable; urgency=medium
* New upstream version 10.4.0; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.4.0
-- Jellyfin Packaging Team <packaging@jellyfin.org> Sat, 31 Aug 2019 21:38:56 -0400
jellyfin (10.3.7-1) unstable; urgency=medium
* New upstream version 10.3.7; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.7
-- Jellyfin Packaging Team <packaging@jellyfin.org> Wed, 24 Jul 2019 10:48:28 -0400
jellyfin (10.3.6-1) unstable; urgency=medium
* New upstream version 10.3.6; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.6
-- Jellyfin Packaging Team <packaging@jellyfin.org> Sat, 06 Jul 2019 13:34:19 -0400
jellyfin (10.3.5-1) unstable; urgency=medium
* New upstream version 10.3.5; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.5
-- Jellyfin Packaging Team <packaging@jellyfin.org> Sun, 09 Jun 2019 21:47:35 -0400
jellyfin (10.3.4-1) unstable; urgency=medium
* New upstream version 10.3.4; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.4
-- Jellyfin Packaging Team <packaging@jellyfin.org> Thu, 06 Jun 2019 22:45:31 -0400
jellyfin (10.3.3-1) unstable; urgency=medium
* New upstream version 10.3.3; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.3
-- Jellyfin Packaging Team <packaging@jellyfin.org> Fri, 17 May 2019 23:12:08 -0400
jellyfin (10.3.2-1) unstable; urgency=medium
* New upstream version 10.3.2; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.2
-- Jellyfin Packaging Team <packaging@jellyfin.org> Tue, 30 Apr 2019 20:18:44 -0400
jellyfin (10.3.1-1) unstable; urgency=medium
* New upstream version 10.3.1; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.1
-- Jellyfin Packaging Team <packaging@jellyfin.org> Sat, 20 Apr 2019 14:24:07 -0400
jellyfin (10.3.0-1) unstable; urgency=medium
* New upstream version 10.3.0; release changelog at https://github.com/jellyfin/jellyfin/releases/tag/v10.3.0
-- Jellyfin Packaging Team <packaging@jellyfin.org> Fri, 19 Apr 2019 14:24:29 -0400

1
debian/compat vendored
View File

@ -1 +0,0 @@
8

53
debian/conf/jellyfin vendored
View File

@ -1,53 +0,0 @@
# Jellyfin default configuration options
# This is a POSIX shell fragment
# Use this file to override the default configurations; add additional
# options with JELLYFIN_ADD_OPTS.
# Under systemd, use
# /etc/systemd/system/jellyfin.service.d/jellyfin.service.conf
# to override the user or this config file's location.
#
# General options
#
# Program directories
JELLYFIN_DATA_DIR="/var/lib/jellyfin"
JELLYFIN_CONFIG_DIR="/etc/jellyfin"
JELLYFIN_LOG_DIR="/var/log/jellyfin"
JELLYFIN_CACHE_DIR="/var/cache/jellyfin"
# web client path, installed by the jellyfin-web package
JELLYFIN_WEB_OPT="--webdir=/usr/share/jellyfin/web"
# ffmpeg binary paths, overriding the system values
JELLYFIN_FFMPEG_OPT="--ffmpeg=/usr/lib/jellyfin-ffmpeg/ffmpeg"
# Disable glibc dynamic heap adjustment
MALLOC_TRIM_THRESHOLD_=131072
# [OPTIONAL] run Jellyfin as a headless service
#JELLYFIN_SERVICE_OPT="--service"
# [OPTIONAL] run Jellyfin without the web app
#JELLYFIN_NOWEBAPP_OPT="--nowebclient"
# Space to add additional command line options to jellyfin (for help see ~$ jellyfin --help)
JELLYFIN_ADDITIONAL_OPTS=""
# [OPTIONAL] run Jellyfin with ASP.NET Server Garbage Collection (uses more RAM and less CPU than Workstation GC)
# 0 = Workstation
# 1 = Server
#COMPlus_gcServer=1
#
# SysV init/Upstart options
#
# Note: These options are ignored by systemd; use /etc/systemd/system/jellyfin.d overrides instead.
#
# Application username
JELLYFIN_USER="jellyfin"
# Full application command
JELLYFIN_ARGS="$JELLYFIN_WEB_OPT $JELLYFIN_FFMPEG_OPT $JELLYFIN_SERVICE_OPT $JELLYFIN_NOWEBAPP_OPT $JELLFIN_ADDITIONAL_OPTS --datadir $JELLYFIN_DATA_DIR --configdir $JELLYFIN_CONFIG_DIR --logdir $JELLYFIN_LOG_DIR --cachedir $JELLYFIN_CACHE_DIR"

View File

@ -1,55 +0,0 @@
# Jellyfin systemd configuration options
# Use this file to override the user or environment file location.
[Service]
# Alter the user that Jellyfin runs as
#User = jellyfin
# Alter where environment variables are sourced from
#EnvironmentFile = /etc/default/jellyfin
# Service hardening options
# These were added in PR #6953 to solve issue #6952, but some combination of
# them causes "restart.sh" functionality to break with the following error:
# sudo: effective uid is not 0, is /usr/bin/sudo on a file system with the
# 'nosuid' option set or an NFS file system without root privileges?
# See issue #7503 for details on the troubleshooting that went into this.
# Since these were added for NixOS specifically and are above and beyond
# what 99% of systemd units do, they have been moved here as optional
# additional flags to set for maximum system security and can be enabled at
# the administrator's or package maintainer's discretion.
# Uncomment these only if you know what you're doing, and doing so may cause
# bugs with in-server Restart and potentially other functionality as well.
#NoNewPrivileges=true
#SystemCallArchitectures=native
#RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6 AF_NETLINK
#RestrictNamespaces=false
#RestrictRealtime=true
#RestrictSUIDSGID=true
#ProtectControlGroups=false
#ProtectHostname=true
#ProtectKernelLogs=false
#ProtectKernelModules=false
#ProtectKernelTunables=false
#LockPersonality=true
#PrivateTmp=false
#PrivateDevices=false
#PrivateUsers=true
#RemoveIPC=true
#SystemCallFilter=~@clock
#SystemCallFilter=~@aio
#SystemCallFilter=~@chown
#SystemCallFilter=~@cpu-emulation
#SystemCallFilter=~@debug
#SystemCallFilter=~@keyring
#SystemCallFilter=~@memlock
#SystemCallFilter=~@module
#SystemCallFilter=~@mount
#SystemCallFilter=~@obsolete
#SystemCallFilter=~@privileged
#SystemCallFilter=~@raw-io
#SystemCallFilter=~@reboot
#SystemCallFilter=~@setuid
#SystemCallFilter=~@swap
#SystemCallErrorNumber=EPERM

View File

@ -1,30 +0,0 @@
{
"Serilog": {
"MinimumLevel": "Information",
"WriteTo": [
{
"Name": "Console",
"Args": {
"outputTemplate": "[{Timestamp:HH:mm:ss}] [{Level:u3}] {Message:lj}{NewLine}{Exception}"
}
},
{
"Name": "Async",
"Args": {
"configure": [
{
"Name": "File",
"Args": {
"path": "%JELLYFIN_LOG_DIR%//jellyfin.log",
"fileSizeLimitBytes": 10485700,
"rollOnFileSizeLimit": true,
"retainedFileCountLimit": 10,
"outputTemplate": "[{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz}] [{Level:u3}] {Message}{NewLine}{Exception}"
}
}
]
}
}
]
}
}

27
debian/control vendored
View File

@ -1,27 +0,0 @@
Source: jellyfin-server
Section: misc
Priority: optional
Maintainer: Jellyfin Team <team@jellyfin.org>
Build-Depends: debhelper (>= 9),
dotnet-sdk-8.0,
libc6-dev,
libcurl4-openssl-dev,
libfontconfig1-dev,
libfreetype6-dev,
libssl-dev
Standards-Version: 3.9.4
Homepage: https://jellyfin.org/
Vcs-Git: https://github.org/jellyfin/jellyfin.git
Vcs-Browser: https://github.org/jellyfin/jellyfin
Package: jellyfin-server
Replaces: jellyfin (<<10.6.0)
Breaks: jellyfin (<<10.6.0)
Architecture: any
Depends: libsqlite3-0,
libfontconfig1,
libfreetype6,
libssl1.1 | libssl3
Recommends: jellyfin-web
Description: Jellyfin is the Free Software Media System.
This package provides the Jellyfin server backend and API.

29
debian/copyright vendored
View File

@ -1,29 +0,0 @@
Format: http://dep.debian.net/deps/dep5
Upstream-Name: jellyfin
Source: https://github.com/jellyfin/jellyfin
Files: *
Copyright: 2018 Jellyfin Team
License: GPL-2.0+
Files: debian/*
Copyright: 2018 Joshua Boniface <joshua@boniface.me>
Copyright: 2014 Carlos Hernandez <carlos@techbyte.ca>
License: GPL-2.0+
License: GPL-2.0+
This package is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.
This package is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
.
On Debian systems, the complete text of the GNU General
Public License version 2 can be found in "/usr/share/common-licenses/GPL-2".

6
debian/gbp.conf vendored
View File

@ -1,6 +0,0 @@
[DEFAULT]
pristine-tar = False
cleaner = fakeroot debian/rules clean
[import-orig]
filter = [ ".git*", ".hg*", ".vs*", ".vscode*" ]

4
debian/install vendored
View File

@ -1,4 +0,0 @@
usr/lib/jellyfin usr/lib/
debian/conf/jellyfin etc/default/
debian/conf/logging.json etc/jellyfin/
debian/conf/jellyfin.service.conf etc/systemd/system/jellyfin.service.d/

62
debian/jellyfin.init vendored
View File

@ -1,62 +0,0 @@
#!/bin/sh
### BEGIN INIT INFO
# Provides: Jellyfin Media Server
# Required-Start: $local_fs $network
# Required-Stop: $local_fs
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Jellyfin Media Server
# Description: Runs Jellyfin Server
### END INIT INFO
set -e
# Carry out specific functions when asked to by the system
if test -f /etc/default/jellyfin; then
. /etc/default/jellyfin
fi
. /lib/lsb/init-functions
PIDFILE="/run/jellyfin.pid"
case "$1" in
start)
log_daemon_msg "Starting Jellyfin Media Server" "jellyfin" || true
if start-stop-daemon --start --quiet --oknodo --background --pidfile $PIDFILE --make-pidfile --user $JELLYFIN_USER --chuid $JELLYFIN_USER --exec /usr/bin/jellyfin -- $JELLYFIN_ARGS; then
log_end_msg 0 || true
else
log_end_msg 1 || true
fi
;;
stop)
log_daemon_msg "Stopping Jellyfin Media Server" "jellyfin" || true
if start-stop-daemon --stop --quiet --oknodo --pidfile $PIDFILE --remove-pidfile; then
log_end_msg 0 || true
else
log_end_msg 1 || true
fi
;;
restart)
log_daemon_msg "Restarting Jellyfin Media Server" "jellyfin" || true
start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $PIDFILE --remove-pidfile
if start-stop-daemon --start --quiet --oknodo --background --pidfile $PIDFILE --make-pidfile --user $JELLYFIN_USER --chuid $JELLYFIN_USER --exec /usr/bin/jellyfin -- $JELLYFIN_ARGS; then
log_end_msg 0 || true
else
log_end_msg 1 || true
fi
;;
status)
status_of_proc -p $PIDFILE /usr/bin/jellyfin jellyfin && exit 0 || exit $?
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
exit 1
;;
esac

View File

@ -1,17 +0,0 @@
[Unit]
Description = Jellyfin Media Server
After = network-online.target
[Service]
Type = simple
EnvironmentFile = /etc/default/jellyfin
User = jellyfin
Group = jellyfin
WorkingDirectory = /var/lib/jellyfin
ExecStart = /usr/bin/jellyfin $JELLYFIN_WEB_OPT $JELLYFIN_FFMPEG_OPT $JELLYFIN_SERVICE_OPT $JELLYFIN_NOWEBAPP_OPT $JELLYFIN_ADDITIONAL_OPTS
Restart = on-failure
TimeoutSec = 15
SuccessExitStatus=0 143
[Install]
WantedBy = multi-user.target

View File

@ -1,20 +0,0 @@
description "jellyfin daemon"
start on (local-filesystems and net-device-up IFACE!=lo)
stop on runlevel [!2345]
console log
respawn
respawn limit 10 5
kill timeout 20
script
set -x
echo "Starting $UPSTART_JOB"
# Log file
logger -t "$0" "DEBUG: `set`"
. /etc/default/jellyfin
exec su -u $JELLYFIN_USER -c /usr/bin/jellyfin $JELLYFIN_ARGS
end script

View File

@ -1,13 +0,0 @@
Source: jellyfin
Section: misc
Priority: optional
Homepage: https://jellyfin.org
Standards-Version: 3.9.2
Package: jellyfin
Version: 10.9.0
Maintainer: Jellyfin Packaging Team <packaging@jellyfin.org>
Depends: jellyfin-server, jellyfin-web
Description: Provides the Jellyfin Free Software Media System
Provides the full Jellyfin experience, including both the server and web interface.

View File

@ -1 +0,0 @@
[type: gettext/rfc822deb] templates

View File

@ -1,57 +0,0 @@
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the PACKAGE package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: jellyfin-server\n"
"Report-Msgid-Bugs-To: jellyfin-server@packages.debian.org\n"
"POT-Creation-Date: 2015-06-12 20:51-0600\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=CHARSET\n"
"Content-Transfer-Encoding: 8bit\n"
#. Type: note
#. Description
#: ../templates:1001
msgid "Jellyfin permission info:"
msgstr ""
#. Type: note
#. Description
#: ../templates:1001
msgid ""
"Jellyfin by default runs under a user named \"jellyfin\". Please ensure that the "
"user jellyfin has read and write access to any folders you wish to add to your "
"library. Otherwise please run jellyfin under a different user."
msgstr ""
#. Type: string
#. Description
#: ../templates:2001
msgid "Username to run Jellyfin as:"
msgstr ""
#. Type: string
#. Description
#: ../templates:2001
msgid "The user that jellyfin will run as."
msgstr ""
#. Type: note
#. Description
#: ../templates:3001
msgid "Jellyfin still running"
msgstr ""
#. Type: note
#. Description
#: ../templates:3001
msgid "Jellyfin is currently running. Please close it and try again."
msgstr ""

102
debian/postinst vendored
View File

@ -1,102 +0,0 @@
#!/bin/bash
set -e
NAME=jellyfin
DEFAULT_FILE=/etc/default/${NAME}
# Source Jellyfin default configuration
if [[ -f $DEFAULT_FILE ]]; then
. $DEFAULT_FILE
fi
JELLYFIN_USER=${JELLYFIN_USER:-jellyfin}
RENDER_GROUP=${RENDER_GROUP:-render}
VIDEO_GROUP=${VIDEO_GROUP:-video}
# Data directories for program data (cache, db), configs, and logs
PROGRAMDATA=${JELLYFIN_DATA_DIRECTORY-/var/lib/$NAME}
CONFIGDATA=${JELLYFIN_CONFIG_DIRECTORY-/etc/$NAME}
LOGDATA=${JELLYFIN_LOG_DIRECTORY-/var/log/$NAME}
CACHEDATA=${JELLYFIN_CACHE_DIRECTORY-/var/cache/$NAME}
case "$1" in
configure)
# create jellyfin group if it does not exist
if [[ -z "$(getent group ${JELLYFIN_USER})" ]]; then
addgroup --quiet --system ${JELLYFIN_USER} > /dev/null 2>&1
fi
# create jellyfin user if it does not exist
if [[ -z "$(getent passwd ${JELLYFIN_USER})" ]]; then
adduser --system --ingroup ${JELLYFIN_USER} --shell /bin/false ${JELLYFIN_USER} --no-create-home --home ${PROGRAMDATA} \
--gecos "Jellyfin default user" > /dev/null 2>&1
fi
# add jellyfin to the render group for hwa
if [[ ! -z "$(getent group ${RENDER_GROUP})" ]]; then
usermod -aG ${RENDER_GROUP} ${JELLYFIN_USER} > /dev/null 2>&1
fi
# add jellyfin to the video group for hwa
if [[ ! -z "$(getent group ${VIDEO_GROUP})" ]]; then
usermod -aG ${VIDEO_GROUP} ${JELLYFIN_USER} > /dev/null 2>&1
fi
# ensure $PROGRAMDATA exists
if [[ ! -d $PROGRAMDATA ]]; then
mkdir $PROGRAMDATA
fi
# ensure $CONFIGDATA exists
if [[ ! -d $CONFIGDATA ]]; then
mkdir $CONFIGDATA
fi
# ensure $LOGDATA exists
if [[ ! -d $LOGDATA ]]; then
mkdir $LOGDATA
fi
# ensure $CACHEDATA exists
if [[ ! -d $CACHEDATA ]]; then
mkdir $CACHEDATA
fi
# Ensure permissions are correct on all config directories
chown -R ${JELLYFIN_USER} $PROGRAMDATA $CONFIGDATA $LOGDATA $CACHEDATA
chgrp adm $PROGRAMDATA $CONFIGDATA $LOGDATA $CACHEDATA
chmod 0750 $PROGRAMDATA $CONFIGDATA $LOGDATA $CACHEDATA
# Install jellyfin symlink into /usr/bin
ln -sf /usr/lib/jellyfin/bin/jellyfin /usr/bin/jellyfin
;;
abort-upgrade|abort-remove|abort-deconfigure)
;;
*)
echo "postinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER
if [[ -x "/usr/bin/deb-systemd-helper" ]]; then
# Manual init script handling
deb-systemd-helper unmask jellyfin.service >/dev/null || true
# was-enabled defaults to true, so new installations run enable.
if deb-systemd-helper --quiet was-enabled jellyfin.service; then
# Enables the unit on first installation, creates new
# symlinks on upgrades if the unit file has changed.
deb-systemd-helper enable jellyfin.service >/dev/null || true
else
# Update the statefile to add new symlinks (if any), which need to be
# cleaned up on purge. Also remove old symlinks.
deb-systemd-helper update-state jellyfin.service >/dev/null || true
fi
fi
# End automatically added section
# Automatically added by dh_installinit
if [[ "$1" == "configure" ]] || [[ "$1" == "abort-upgrade" ]]; then
if [[ -d "/run/systemd/system" ]]; then
systemctl --system daemon-reload >/dev/null || true
deb-systemd-invoke start jellyfin >/dev/null || true
elif [[ -x "/etc/init.d/jellyfin" ]] || [[ -e "/etc/init/jellyfin.conf" ]]; then
update-rc.d jellyfin defaults >/dev/null
invoke-rc.d jellyfin start || exit $?
fi
fi
exit 0

81
debian/postrm vendored
View File

@ -1,81 +0,0 @@
#!/bin/bash
set -e
NAME=jellyfin
DEFAULT_FILE=/etc/default/${NAME}
# Source Jellyfin default configuration
if [[ -f $DEFAULT_FILE ]]; then
. $DEFAULT_FILE
fi
# Data directories for program data (cache, db), configs, and logs
PROGRAMDATA=${JELLYFIN_DATA_DIRECTORY-/var/lib/$NAME}
CONFIGDATA=${JELLYFIN_CONFIG_DIRECTORY-/etc/$NAME}
LOGDATA=${JELLYFIN_LOG_DIRECTORY-/var/log/$NAME}
CACHEDATA=${JELLYFIN_CACHE_DIRECTORY-/var/cache/$NAME}
# In case this system is running systemd, we make systemd reload the unit files
# to pick up changes.
if [[ -d /run/systemd/system ]] ; then
systemctl --system daemon-reload >/dev/null || true
fi
case "$1" in
purge)
echo PURGE | debconf-communicate $NAME > /dev/null 2>&1 || true
if [[ -x "/etc/init.d/jellyfin" ]] || [[ -e "/etc/init/jellyfin.conf" ]]; then
update-rc.d jellyfin remove >/dev/null 2>&1 || true
fi
if [[ -x "/usr/bin/deb-systemd-helper" ]]; then
deb-systemd-helper purge jellyfin.service >/dev/null
deb-systemd-helper unmask jellyfin.service >/dev/null
fi
# Remove user and group
userdel jellyfin > /dev/null 2>&1 || true
delgroup --quiet jellyfin > /dev/null 2>&1 || true
# Remove config dir
if [[ -d $CONFIGDATA ]]; then
rm -rf $CONFIGDATA
fi
# Remove log dir
if [[ -d $LOGDATA ]]; then
rm -rf $LOGDATA
fi
# Remove cache dir
if [[ -d $CACHEDATA ]]; then
rm -rf $CACHEDATA
fi
# Remove program data dir
if [[ -d $PROGRAMDATA ]]; then
rm -rf $PROGRAMDATA
fi
# Remove binary symlink
rm -f /usr/bin/jellyfin
# Remove sudoers config
[[ -f /etc/sudoers.d/jellyfin-sudoers ]] && rm /etc/sudoers.d/jellyfin-sudoers
# Remove anything at the default locations; catches situations where the user moved the defaults
[[ -e /etc/jellyfin ]] && rm -rf /etc/jellyfin
[[ -e /var/log/jellyfin ]] && rm -rf /var/log/jellyfin
[[ -e /var/cache/jellyfin ]] && rm -rf /var/cache/jellyfin
[[ -e /var/lib/jellyfin ]] && rm -rf /var/lib/jellyfin
;;
remove)
if [[ -x "/usr/bin/deb-systemd-helper" ]]; then
deb-systemd-helper mask jellyfin.service >/dev/null
fi
;;
upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
;;
*)
echo "postrm called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

78
debian/preinst vendored
View File

@ -1,78 +0,0 @@
#!/bin/bash
set -e
NAME=jellyfin
DEFAULT_FILE=/etc/default/${NAME}
# Source Jellyfin default configuration
if [[ -f $DEFAULT_FILE ]]; then
. $DEFAULT_FILE
fi
# Data directories for program data (cache, db), configs, and logs
PROGRAMDATA=${JELLYFIN_DATA_DIRECTORY-/var/lib/$NAME}
CONFIGDATA=${JELLYFIN_CONFIG_DIRECTORY-/etc/$NAME}
LOGDATA=${JELLYFIN_LOG_DIRECTORY-/var/log/$NAME}
CACHEDATA=${JELLYFIN_CACHE_DIRECTORY-/var/cache/$NAME}
# In case this system is running systemd, we make systemd reload the unit files
# to pick up changes.
if [[ -d /run/systemd/system ]] ; then
systemctl --system daemon-reload >/dev/null || true
fi
case "$1" in
install|upgrade)
# try graceful termination;
if [[ -d /run/systemd/system ]]; then
deb-systemd-invoke stop ${NAME}.service > /dev/null 2>&1 || true
elif [ -x "/etc/init.d/${NAME}" ] || [ -e "/etc/init/${NAME}.conf" ]; then
invoke-rc.d ${NAME} stop > /dev/null 2>&1 || true
fi
# try and figure out if jellyfin is running
PIDFILE=$(find /var/run/ -maxdepth 1 -mindepth 1 -name "jellyfin*.pid" -print -quit)
[[ -n "$PIDFILE" ]] && [[ -s "$PIDFILE" ]] && JELLYFIN_PID=$(cat ${PIDFILE})
# if its running, let's stop it
if [[ -n "$JELLYFIN_PID" ]]; then
echo "Stopping Jellyfin!"
# if jellyfin is still running, kill it
if [[ -n "$(ps -p $JELLYFIN_PID -o pid=)" ]]; then
CPIDS=$(pgrep -P $JELLYFIN_PID)
sleep 2 && kill -KILL $CPIDS
kill -TERM $CPIDS > /dev/null 2>&1
fi
sleep 1
# if it's still running, show error
if [[ -n "$(ps -p $JELLYFIN_PID -o pid=)" ]]; then
echo "Could not successfully stop JellyfinServer, please do so before uninstalling."
exit 1
else
[[ -f $PIDFILE ]] && rm $PIDFILE
fi
fi
# Clean up old Emby cruft that can break the user's system
[[ -f /etc/sudoers.d/emby ]] && rm -f /etc/sudoers.d/emby
# If we have existing config, log, or cache dirs in /var/lib/jellyfin, move them into the right place
if [[ -d $PROGRAMDATA/config ]]; then
mv $PROGRAMDATA/config $CONFIGDATA
fi
if [[ -d $PROGRAMDATA/logs ]]; then
mv $PROGRAMDATA/logs $LOGDATA
fi
if [[ -d $PROGRAMDATA/logs ]]; then
mv $PROGRAMDATA/cache $CACHEDATA
fi
;;
abort-upgrade)
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

61
debian/prerm vendored
View File

@ -1,61 +0,0 @@
#!/bin/bash
set -e
NAME=jellyfin
DEFAULT_FILE=/etc/default/${NAME}
# Source Jellyfin default configuration
if [[ -f $DEFAULT_FILE ]]; then
. $DEFAULT_FILE
fi
# Data directories for program data (cache, db), configs, and logs
PROGRAMDATA=${JELLYFIN_DATA_DIRECTORY-/var/lib/$NAME}
CONFIGDATA=${JELLYFIN_CONFIG_DIRECTORY-/etc/$NAME}
LOGDATA=${JELLYFIN_LOG_DIRECTORY-/var/log/$NAME}
CACHEDATA=${JELLYFIN_CACHE_DIRECTORY-/var/cache/$NAME}
case "$1" in
remove|upgrade|deconfigure)
echo "Stopping Jellyfin!"
# try graceful termination;
if [[ -d /run/systemd/system ]]; then
deb-systemd-invoke stop ${NAME}.service > /dev/null 2>&1 || true
elif [ -x "/etc/init.d/${NAME}" ] || [ -e "/etc/init/${NAME}.conf" ]; then
invoke-rc.d ${NAME} stop > /dev/null 2>&1 || true
fi
# Ensure that it is shutdown
PIDFILE=$(find /var/run/ -maxdepth 1 -mindepth 1 -name "jellyfin*.pid" -print -quit)
[[ -n "$PIDFILE" ]] && [[ -s "$PIDFILE" ]] && JELLYFIN_PID=$(cat ${PIDFILE})
# if its running, let's stop it
if [[ -n "$JELLYFIN_PID" ]]; then
# if jellyfin is still running, kill it
if [[ -n "$(ps -p $JELLYFIN_PID -o pid=)" ]]; then
CPIDS=$(pgrep -P $JELLYFIN_PID)
sleep 2 && kill -KILL $CPIDS
kill -TERM $CPIDS > /dev/null 2>&1
fi
sleep 1
# if it's still running, show error
if [[ -n "$(ps -p $JELLYFIN_PID -o pid=)" ]]; then
echo "Could not successfully stop Jellyfin, please do so before uninstalling."
exit 1
else
[[ -f $PIDFILE ]] && rm $PIDFILE
fi
fi
if [[ -f /usr/lib/jellyfin/bin/MediaBrowser.Server.Mono.exe.so ]]; then
rm /usr/lib/jellyfin/bin/MediaBrowser.Server.Mono.exe.so
fi
;;
failed-upgrade)
;;
*)
echo "prerm called with unknown argument \`$1'" >&2
exit 1
;;
esac
#DEBHELPER#
exit 0

55
debian/rules vendored
View File

@ -1,55 +0,0 @@
#! /usr/bin/make -f
CONFIG := Release
TERM := xterm
SHELL := /bin/bash
HOST_ARCH := $(shell arch)
BUILD_ARCH := ${DEB_HOST_MULTIARCH}
ifeq ($(HOST_ARCH),x86_64)
# Building AMD64
DOTNETRUNTIME := linux-x64
ifeq ($(BUILD_ARCH),arm-linux-gnueabihf)
# Cross-building ARM on AMD64
DOTNETRUNTIME := linux-arm
endif
ifeq ($(BUILD_ARCH),aarch64-linux-gnu)
# Cross-building ARM on AMD64
DOTNETRUNTIME := linux-arm64
endif
endif
ifeq ($(HOST_ARCH),armv7l)
# Building ARM
DOTNETRUNTIME := linux-arm
endif
ifeq ($(HOST_ARCH),arm64)
# Building ARM
DOTNETRUNTIME := linux-arm64
endif
ifeq ($(HOST_ARCH),aarch64)
# Building ARM
DOTNETRUNTIME := linux-arm64
endif
export DH_VERBOSE=1
export DOTNET_CLI_TELEMETRY_OPTOUT=1
%:
dh $@
# disable "make check"
override_dh_auto_test:
# disable stripping debugging symbols
override_dh_clistrip:
override_dh_auto_build:
dotnet publish -maxcpucount:1 --configuration $(CONFIG) --output='$(CURDIR)/usr/lib/jellyfin/bin' --self-contained --runtime $(DOTNETRUNTIME) \
-p:DebugSymbols=false -p:DebugType=none Jellyfin.Server
override_dh_auto_clean:
dotnet clean -maxcpucount:1 --configuration $(CONFIG) Jellyfin.Server || true
rm -rf '$(CURDIR)/usr'
# Force the service name to jellyfin even if we're building jellyfin-nightly
override_dh_installinit:
dh_installinit --name=jellyfin

View File

@ -1,3 +0,0 @@
# This is an override for the following lintian errors:
jellyfin source: license-problem-md5sum-non-free-file Emby.Drawing/ImageMagick/fonts/webdings.ttf*
jellyfin source: source-is-missing

View File

@ -1 +0,0 @@
1.0

11
debian/source/options vendored
View File

@ -1,11 +0,0 @@
tar-ignore='.git*'
tar-ignore='**/.git'
tar-ignore='**/.hg'
tar-ignore='**/.vs'
tar-ignore='**/.vscode'
tar-ignore='deployment'
tar-ignore='**/bin'
tar-ignore='**/obj'
tar-ignore='**/.nuget'
tar-ignore='*.deb'
tar-ignore='ThirdParty'

View File

@ -1,39 +0,0 @@
FROM quay.io/centos/centos:stream9
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV IS_DOCKER=YES
# Prepare CentOS environment
RUN dnf update -yq \
&& dnf install -yq \
@buildsys-build rpmdevtools git \
dnf-plugins-core libcurl-devel fontconfig-devel \
freetype-devel openssl-devel glibc-devel \
libicu-devel systemd wget make \
&& dnf clean all \
&& rm -rf /var/cache/dnf
# Install DotNET SDK
RUN wget -q https://download.visualstudio.microsoft.com/download/pr/85bcc525-4e9c-471e-9c1d-96259aa1a315/930833ef34f66fe9ee2643b0ba21621a/dotnet-sdk-8.0.201-linux-x64.tar.gz -O dotnet-sdk.tar.gz \
&& mkdir -p dotnet-sdk \
&& tar -xzf dotnet-sdk.tar.gz -C dotnet-sdk \
&& ln -s $( pwd )/dotnet-sdk/dotnet /usr/bin/dotnet
# Create symlinks and directories
RUN ln -sf ${SOURCE_DIR}/deployment/build.centos.amd64 /build.sh \
&& mkdir -p ${SOURCE_DIR}/SPECS \
&& ln -s ${SOURCE_DIR}/fedora/jellyfin.spec ${SOURCE_DIR}/SPECS/jellyfin.spec \
&& mkdir -p ${SOURCE_DIR}/SOURCES \
&& ln -s ${SOURCE_DIR}/fedora ${SOURCE_DIR}/SOURCES
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yq \
&& apt-get install --no-install-recommends -yq \
debhelper gnupg devscripts build-essential mmv \
libcurl4-openssl-dev libfontconfig1-dev libfreetype6-dev \
libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yq \
&& apt-get autoremove -yq \
&& rm -rf /var/lib/apt/lists/*
# Link to build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.debian.amd64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,46 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts build-essential mmv
# Prepare the cross-toolchain
RUN dpkg --add-architecture arm64 \
&& apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq cross-gcc-dev \
&& TARGET_LIST="arm64" cross-gcc-gensource 12 \
&& cd cross-gcc-packages-amd64/cross-gcc-12-arm64 \
&& apt-get install --no-install-recommends -yqq \
gcc-12-source libstdc++-12-dev-arm64-cross \
binutils-aarch64-linux-gnu bison flex libtool \
gdb sharutils netbase libmpc-dev libmpfr-dev libgmp-dev \
systemtap-sdt-dev autogen expect chrpath zlib1g-dev zip \
libc6-dev:arm64 linux-libc-dev:arm64 libgcc1:arm64 \
libcurl4-openssl-dev:arm64 libfontconfig1-dev:arm64 \
libfreetype6-dev:arm64 libssl-dev:arm64 liblttng-ust1:arm64 libstdc++-12-dev:arm64 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.debian.arm64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,47 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts build-essential mmv
# Prepare the cross-toolchain
RUN dpkg --add-architecture armhf \
&& apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq cross-gcc-dev \
&& TARGET_LIST="armhf" cross-gcc-gensource 12 \
&& cd cross-gcc-packages-amd64/cross-gcc-12-armhf \
&& apt-get install --no-install-recommends -yqq \
gcc-12-source libstdc++-12-dev-armhf-cross \
binutils-aarch64-linux-gnu bison flex libtool gdb \
sharutils netbase libmpc-dev libmpfr-dev libgmp-dev \
systemtap-sdt-dev autogen expect chrpath zlib1g-dev \
zip binutils-arm-linux-gnueabihf libc6-dev:armhf \
linux-libc-dev:armhf libgcc1:armhf libcurl4-openssl-dev:armhf \
libfontconfig1-dev:armhf libfreetype6-dev:armhf libssl-dev:armhf \
liblttng-ust1:armhf libstdc++-12-dev:armhf \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.debian.armhf /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,12 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
ARG SOURCE_DIR=/src
ARG ARTIFACT_DIR=/jellyfin
WORKDIR ${SOURCE_DIR}
COPY . .
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
RUN dotnet publish Jellyfin.Server --configuration Release --output="${ARTIFACT_DIR}" --self-contained --runtime linux-x64 -p:DebugSymbols=false -p:DebugType=none

View File

@ -1,12 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
ARG SOURCE_DIR=/src
ARG ARTIFACT_DIR=/jellyfin
WORKDIR ${SOURCE_DIR}
COPY . .
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
RUN dotnet publish Jellyfin.Server --configuration Release --output="${ARTIFACT_DIR}" --self-contained --runtime linux-arm64 -p:DebugSymbols=false -p:DebugType=none

View File

@ -1,12 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
ARG SOURCE_DIR=/src
ARG ARTIFACT_DIR=/jellyfin
WORKDIR ${SOURCE_DIR}
COPY . .
ENV DOTNET_CLI_TELEMETRY_OPTOUT=1
RUN dotnet publish Jellyfin.Server --configuration Release --output="${ARTIFACT_DIR}" --self-contained --runtime linux-arm -p:DebugSymbols=false -p:DebugType=none

View File

@ -1,39 +0,0 @@
FROM fedora:39
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV IS_DOCKER=YES
# Prepare Fedora environment
RUN dnf update -yq \
&& dnf install -yq \
@buildsys-build rpmdevtools git \
dnf-plugins-core libcurl-devel fontconfig-devel \
freetype-devel openssl-devel glibc-devel \
libicu-devel systemd wget make \
&& dnf clean all \
&& rm -rf /var/cache/dnf
# Install DotNET SDK
RUN wget -q https://download.visualstudio.microsoft.com/download/pr/85bcc525-4e9c-471e-9c1d-96259aa1a315/930833ef34f66fe9ee2643b0ba21621a/dotnet-sdk-8.0.201-linux-x64.tar.gz -O dotnet-sdk.tar.gz \
&& mkdir -p dotnet-sdk \
&& tar -xzf dotnet-sdk.tar.gz -C dotnet-sdk \
&& ln -s $( pwd )/dotnet-sdk/dotnet /usr/bin/dotnet
# Create symlinks and directories
RUN ln -sf ${SOURCE_DIR}/deployment/build.fedora.amd64 /build.sh \
&& mkdir -p ${SOURCE_DIR}/SPECS \
&& ln -s ${SOURCE_DIR}/fedora/jellyfin.spec ${SOURCE_DIR}/SPECS/jellyfin.spec \
&& mkdir -p ${SOURCE_DIR}/SOURCES \
&& ln -s ${SOURCE_DIR}/fedora ${SOURCE_DIR}/SOURCES
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts unzip \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.linux.amd64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts unzip \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.linux.amd64-musl /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=arm64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts unzip \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.linux.arm64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=armhf
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts unzip \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.linux.armhf /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=arm64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts unzip \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.linux.musl-linux-arm64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.macos.amd64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,33 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV ARCH=amd64
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.macos.arm64 /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

View File

@ -1,32 +0,0 @@
ARG DOTNET_VERSION=8.0
FROM mcr.microsoft.com/dotnet/sdk:${DOTNET_VERSION}-bookworm-slim
# Docker build arguments
ARG SOURCE_DIR=/jellyfin
ARG ARTIFACT_DIR=/dist
# Docker run environment
ENV SOURCE_DIR=/jellyfin
ENV ARTIFACT_DIR=/dist
ENV DEB_BUILD_OPTIONS=noddebs
ENV IS_DOCKER=YES
# Prepare Debian build environment
RUN apt-get update -yqq \
&& apt-get install --no-install-recommends -yqq \
debhelper gnupg devscripts \
mmv libcurl4-openssl-dev libfontconfig1-dev \
libfreetype6-dev libssl-dev libssl3 liblttng-ust1 \
&& apt-get clean autoclean -yqq \
&& apt-get autoremove -yqq \
&& rm -rf /var/lib/apt/lists/*
# Link to docker-build script
RUN ln -sf ${SOURCE_DIR}/deployment/build.portable /build.sh
VOLUME ${SOURCE_DIR}/
VOLUME ${ARTIFACT_DIR}/
ENTRYPOINT ["/build.sh"]

Some files were not shown because too many files have changed in this diff Show More