Skip to content

Instantly share code, notes, and snippets.

@vredesbyyrd
Created August 21, 2025 02:34
Show Gist options
  • Select an option

  • Save vredesbyyrd/a01ec39c0ed82854bf6f3f2a64e395b5 to your computer and use it in GitHub Desktop.

Select an option

Save vredesbyyrd/a01ec39c0ed82854bf6f3f2a64e395b5 to your computer and use it in GitHub Desktop.
jellyfin_daemon.go
package main
import (
"bytes"
"context"
"crypto/md5"
"encoding/json"
"fmt"
"image"
"image/jpeg"
"io"
"log"
"net/http"
"net/url"
"os"
"os/signal"
"path/filepath"
"strconv"
"strings"
"sync"
"syscall"
"time"
"github.com/nfnt/resize"
"nhooyr.io/websocket"
"nhooyr.io/websocket/wsjson"
)
// Configuration constants
const (
JellyfinAPIKey = "07a9962817324bf5872e97ae28141fa3"
JellyfinUsername = "clu"
JellyfinPassword = "rinzler"
JellyfinServer = "http://192.168.1.175:8096"
CacheDir = "/home/clu/scripts/rofi_jellyfin/cache"
ImageCacheDir = "/home/clu/scripts/rofi_jellyfin/image_cache"
PrimaryImageWidth = 170
PrimaryImageHeight = 240
ScreenshotWidth = 170
ScreenshotHeight = 240
DeviceID = "rofi-jellyfin-daemon"
ClientName = "Rofi Jellyfin Daemon"
Version = "1.0.0"
)
var (
// Library filtering configuration
IncludedLibraries = []string{"Movies", "Shows"} // Set to nil to include all libraries
// Global state
daemonState = &DaemonState{
cacheLock: &sync.RWMutex{},
lastPlaybackUpdate: make(map[string]time.Time),
}
logger = log.New(os.Stdout, "[RT-DAEMON] ", log.LstdFlags)
)
// Data structures
type DaemonState struct {
wsConn *websocket.Conn
running bool
reconnectAttempts int
maxReconnectAttempts int
reconnectDelay time.Duration
userID string
accessToken string
cacheData *CacheData
cacheLock *sync.RWMutex
httpClient *http.Client
failedUpdates int
totalUpdates int
lastPlaybackUpdate map[string]time.Time
}
type CacheData struct {
Timestamp int64 `json:"timestamp"`
TotalItems int `json:"total_items"`
LibraryFilter LibraryFilter `json:"library_filter"`
AllItems AllItems `json:"all_items"`
}
type LibraryFilter struct {
Enabled bool `json:"enabled"`
IncludedLibraries []string `json:"included_libraries"`
AvailableLibraries []string `json:"available_libraries"`
FilteredLibraryIDs []string `json:"filtered_library_ids"`
}
type AllItems struct {
Movies []MediaItem `json:"movies"`
Series []MediaItem `json:"series"`
Episodes map[string][]MediaItem `json:"episodes"`
Playlists []MediaItem `json:"playlists"`
PlaylistItems map[string][]PlaylistItem `json:"playlist_items"`
FavoritesIDs []string `json:"favorites_ids"`
ContinueWatchingIDs []string `json:"continue_watching_ids"`
RecentlyAddedIDs []string `json:"recently_added_ids"`
NextUpIDs []string `json:"next_up_ids"`
}
type MediaItem struct {
ID string `json:"id"`
Name string `json:"name"`
Type string `json:"type"`
Year *int `json:"year,omitempty"`
CommunityRating *float64 `json:"community_rating,omitempty"`
CriticRating *float64 `json:"critic_rating,omitempty"`
DateAdded string `json:"date_added,omitempty"`
Overview string `json:"overview,omitempty"`
PremiereDate string `json:"premiere_date,omitempty"`
RuntimeTicks *int64 `json:"runtime_ticks,omitempty"`
RuntimeMinutes *int `json:"runtime_minutes,omitempty"`
Genres string `json:"genres,omitempty"`
SeriesName string `json:"series_name,omitempty"`
SeriesID string `json:"series_id,omitempty"`
SeasonNumber *int `json:"season_number,omitempty"`
EpisodeNumber *int `json:"episode_number,omitempty"`
SeasonID string `json:"season_id,omitempty"`
ParentID string `json:"parent_id,omitempty"`
Container string `json:"container,omitempty"`
VideoCodec string `json:"video_codec,omitempty"`
VideoWidth *int `json:"video_width,omitempty"`
FileSize *int64 `json:"file_size,omitempty"`
HasSubtitles bool `json:"has_subtitles"`
AudioLanguage string `json:"audio_language,omitempty"`
TrailerURL string `json:"trailer_url,omitempty"`
ImdbURL string `json:"imdb_url,omitempty"`
ImdbID string `json:"imdb_id,omitempty"`
TmdbID string `json:"tmdb_id,omitempty"`
Played bool `json:"played"`
IsFavorite bool `json:"is_favorite"`
PlaybackPositionTicks int64 `json:"playback_position_ticks"`
PlayedPercentage float64 `json:"played_percentage"`
LastPlayedDate *int64 `json:"last_played_date,omitempty"`
Directors string `json:"directors,omitempty"`
Writers string `json:"writers,omitempty"`
Cast string `json:"cast,omitempty"`
PrimaryImagePath string `json:"primary_image_path,omitempty"`
PrimaryImagePathLarge string `json:"primary_image_path_large,omitempty"`
HasPrimaryImage bool `json:"has_primary_image"`
HasPrimaryImageLarge bool `json:"has_primary_image_large"`
ScreenshotImagePath string `json:"screenshot_image_path,omitempty"`
ScreenshotImagePathLarge string `json:"screenshot_image_path_large,omitempty"`
HasScreenshotImage bool `json:"has_screenshot_image"`
HasScreenshotImageLarge bool `json:"has_screenshot_image_large"`
ThumbImagePath string `json:"thumb_image_path,omitempty"`
ThumbImagePathLarge string `json:"thumb_image_path_large,omitempty"`
HasThumbImage bool `json:"has_thumb_image"`
HasThumbImageLarge bool `json:"has_thumb_image_large"`
// Playlist specific fields
PlaylistType string `json:"playlist_type,omitempty"`
ChildCount int `json:"child_count,omitempty"`
CumulativeRuntimeTicks *int64 `json:"cumulative_runtime_ticks,omitempty"`
CumulativeRuntimeMinutes *int `json:"cumulative_runtime_minutes,omitempty"`
IsFolder bool `json:"is_folder,omitempty"`
MediaType string `json:"media_type,omitempty"`
}
type PlaylistItem struct {
ID string `json:"id"`
PlaylistID string `json:"playlist_id"`
PlaylistIndex int `json:"playlist_index"`
}
type WebSocketMessage struct {
MessageType string `json:"MessageType"`
Data interface{} `json:"Data"`
}
type UserDataChange struct {
ItemID string `json:"ItemId"`
PlayedPercentage float64 `json:"PlayedPercentage"`
PlaybackPositionTicks int64 `json:"PlaybackPositionTicks"`
Played bool `json:"Played"`
IsFavorite bool `json:"IsFavorite"`
LastPlayedDate string `json:"LastPlayedDate,omitempty"`
PlayCount int `json:"PlayCount"`
}
type LibraryChange struct {
ItemsAdded []string `json:"ItemsAdded"`
ItemsRemoved []string `json:"ItemsRemoved"`
ItemsUpdated []string `json:"ItemsUpdated"`
}
// Authentication responses
type AuthResponse struct {
AccessToken string `json:"AccessToken"`
User struct {
ID string `json:"Id"`
Name string `json:"Name"`
} `json:"User"`
}
func debugDo(client *http.Client, req *http.Request) (*http.Response, error) {
// Dump request
if req.Body != nil {
bodyBytes, _ := io.ReadAll(req.Body)
fmt.Printf("\n>>> DEBUG REQUEST %s %s\n", req.Method, req.URL.String())
fmt.Printf(">>> HEADERS:\n")
for k, v := range req.Header {
fmt.Printf("%s: %s\n", k, strings.Join(v, ","))
}
fmt.Printf(">>> BODY: %s\n\n", string(bodyBytes))
// Reset body so request still works
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
}
// Perform request
resp, err := client.Do(req)
if err != nil {
return resp, err
}
// Dump response status + body (non-destructive)
bodyBytes, _ := io.ReadAll(resp.Body)
fmt.Printf("<<< DEBUG RESPONSE %d %s\n", resp.StatusCode, resp.Status)
fmt.Printf("<<< BODY: %s\n\n", string(bodyBytes))
resp.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) // reset for caller
return resp, err
}
func mustJSON(v interface{}) []byte {
b, _ := json.Marshal(v)
return b
}
// HTTP Client functions - Updated to handle both object and array responses
func makeJellyfinRequest(method, endpoint string, body interface{}) (interface{}, error) {
var reqBody io.Reader
if body != nil {
jsonData, err := json.Marshal(body)
if err != nil {
return nil, fmt.Errorf("failed to marshal request body: %w", err)
}
reqBody = bytes.NewBuffer(jsonData)
}
req, err := http.NewRequest(method, JellyfinServer+endpoint, reqBody)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
// Set headers
if daemonState.accessToken != "" {
req.Header.Set("Authorization", fmt.Sprintf("MediaBrowser Token=\"%s\"", daemonState.accessToken))
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-Emby-Client", ClientName)
req.Header.Set("X-Emby-Device-Id", DeviceID)
req.Header.Set("X-Emby-Client-Version", Version)
resp, err := debugDo(daemonState.httpClient, req)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
bodyBytes, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(bodyBytes))
}
// Read the response body
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to read response: %w", err)
}
// Try to decode as interface{} first to handle both objects and arrays
var result interface{}
if err := json.Unmarshal(bodyBytes, &result); err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
return result, nil
}
// Helper function for endpoints that expect object responses
func makeJellyfinObjectRequest(method, endpoint string, body interface{}) (map[string]interface{}, error) {
result, err := makeJellyfinRequest(method, endpoint, body)
if err != nil {
return nil, err
}
if objResult, ok := result.(map[string]interface{}); ok {
return objResult, nil
}
return nil, fmt.Errorf("expected object response, got %T", result)
}
// Authentication functions
func authenticateWithJellyfin() error {
logger.Println("Authenticating with Jellyfin server...")
daemonState.httpClient = &http.Client{Timeout: 30 * time.Second}
// Username/password authentication
if JellyfinPassword != "" {
logger.Println("Attempting username/password authentication...")
authBody := map[string]string{
"Username": JellyfinUsername,
"Pw": JellyfinPassword, // <-- FIX
}
req, err := http.NewRequest("POST", JellyfinServer+"/Users/AuthenticateByName", bytes.NewBuffer(mustJSON(authBody))) // <-- FIX
if err != nil {
return fmt.Errorf("failed to create auth request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("X-Emby-Authorization",
fmt.Sprintf(`MediaBrowser Client="%s", Device="%s", DeviceId="%s", Version="%s"`,
ClientName, ClientName, DeviceID, Version))
resp, err := debugDo(daemonState.httpClient, req)
if err != nil {
return fmt.Errorf("auth request failed: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
body, _ := io.ReadAll(resp.Body)
return fmt.Errorf("username/password authentication failed: HTTP %d: %s", resp.StatusCode, body)
}
var authResp AuthResponse
if err := json.NewDecoder(resp.Body).Decode(&authResp); err != nil {
return fmt.Errorf("failed to decode auth response: %w", err)
}
daemonState.accessToken = authResp.AccessToken
daemonState.userID = authResp.User.ID
logger.Println("βœ… Username/password authentication successful")
return nil
}
// API key fallback
if JellyfinAPIKey != "" {
logger.Println("Attempting API key authentication...")
daemonState.accessToken = JellyfinAPIKey
req, _ := http.NewRequest("GET", JellyfinServer+"/System/Info", nil)
req.Header.Set("X-Emby-Authorization",
fmt.Sprintf(`MediaBrowser Client="%s", Device="%s", DeviceId="%s", Version="%s"`,
ClientName, ClientName, DeviceID, Version))
req.Header.Set("X-MediaBrowser-Token", JellyfinAPIKey)
resp, err := debugDo(daemonState.httpClient, req)
if err != nil || resp.StatusCode != 200 {
return fmt.Errorf("API key authentication failed: %v", err)
}
logger.Println("βœ… API key authentication successful")
// Fix user lookup: /Users returns an array, not {Items: []}
users, err := makeJellyfinRequest("GET", "/Users", nil)
if err == nil {
if arr, ok := users.([]interface{}); ok {
for _, u := range arr {
if umap, ok := u.(map[string]interface{}); ok {
if umap["Name"] == JellyfinUsername {
if id, ok := umap["Id"].(string); ok {
daemonState.userID = id
logger.Printf("User ID: %s", id)
}
}
}
}
}
}
return nil
}
return fmt.Errorf("no authentication method configured")
}
// Utility functions
func parseTimestamp(timestamp string) *int64 {
if timestamp == "" {
return nil
}
t, err := time.Parse(time.RFC3339, timestamp)
if err != nil {
// Try alternative formats
t, err = time.Parse("2006-01-02T15:04:05.0000000Z", timestamp)
if err != nil {
logger.Printf("Failed to parse timestamp %s: %v", timestamp, err)
return nil
}
}
unix := t.Unix()
return &unix
}
func getImageCachePath(itemID, imageType, size string) string {
hash := fmt.Sprintf("%x", md5.Sum([]byte(itemID)))
var filename string
if size == "large" {
filename = fmt.Sprintf("%s_%s_large.jpg", hash, imageType)
} else {
filename = fmt.Sprintf("%s_%s.jpg", hash, imageType)
}
return filepath.Join(ImageCacheDir, filename)
}
func resizeAndPadImage(img image.Image, targetWidth, targetHeight int, preserveOriginalSize bool) image.Image {
if preserveOriginalSize {
// For large version: don't resize, just pad to maintain aspect ratio
targetRatio := float64(targetWidth) / float64(targetHeight)
imageRatio := float64(img.Bounds().Dx()) / float64(img.Bounds().Dy())
if imageRatio > targetRatio {
// Image is wider than target ratio - pad top/bottom
newHeight := int(float64(img.Bounds().Dx()) / targetRatio)
padded := image.NewRGBA(image.Rect(0, 0, img.Bounds().Dx(), newHeight))
yOffset := (newHeight - img.Bounds().Dy()) / 2
for y := 0; y < img.Bounds().Dy(); y++ {
for x := 0; x < img.Bounds().Dx(); x++ {
padded.Set(x, y+yOffset, img.At(x, y))
}
}
return padded
} else {
// Image is taller than target ratio - pad left/right
newWidth := int(float64(img.Bounds().Dy()) * targetRatio)
padded := image.NewRGBA(image.Rect(0, 0, newWidth, img.Bounds().Dy()))
xOffset := (newWidth - img.Bounds().Dx()) / 2
for y := 0; y < img.Bounds().Dy(); y++ {
for x := 0; x < img.Bounds().Dx(); x++ {
padded.Set(x+xOffset, y, img.At(x, y))
}
}
return padded
}
}
// Original resizing logic for small version
bounds := img.Bounds()
scaleFactor := float64(targetWidth) / float64(bounds.Dx())
if scaleHeight := float64(targetHeight) / float64(bounds.Dy()); scaleHeight < scaleFactor {
scaleFactor = scaleHeight
}
newWidth := uint(float64(bounds.Dx()) * scaleFactor)
newHeight := uint(float64(bounds.Dy()) * scaleFactor)
resized := resize.Resize(newWidth, newHeight, img, resize.Lanczos3)
// Create padded image
padded := image.NewRGBA(image.Rect(0, 0, targetWidth, targetHeight))
xOffset := (targetWidth - int(newWidth)) / 2
yOffset := (targetHeight - int(newHeight)) / 2
for y := 0; y < int(newHeight); y++ {
for x := 0; x < int(newWidth); x++ {
padded.Set(x+xOffset, y+yOffset, resized.At(x, y))
}
}
return padded
}
func downloadAndProcessImage(imageURL, cachePathSmall, cachePathLarge, imageType string) (bool, bool) {
req, err := http.NewRequest("GET", imageURL, nil)
if err != nil {
logger.Printf("Failed to create request for %s: %v", imageURL, err)
return false, false
}
req.Header.Set("Authorization", fmt.Sprintf("MediaBrowser Token=\"%s\"", daemonState.accessToken))
resp, err := debugDo(daemonState.httpClient, req)
if err != nil {
logger.Printf("Failed to download image %s: %v", imageURL, err)
return false, false
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
logger.Printf("Failed to download image %s: status %d", imageURL, resp.StatusCode)
return false, false
}
img, _, err := image.Decode(resp.Body)
if err != nil {
logger.Printf("Failed to decode image %s: %v", imageURL, err)
return false, false
}
// Ensure cache directory exists
os.MkdirAll(filepath.Dir(cachePathSmall), 0755)
os.MkdirAll(filepath.Dir(cachePathLarge), 0755)
smallSuccess := false
largeSuccess := false
// Process small version
var processedSmall image.Image
if imageType == "primary" {
processedSmall = resizeAndPadImage(img, PrimaryImageWidth, PrimaryImageHeight, false)
} else {
processedSmall = resizeAndPadImage(img, ScreenshotWidth, ScreenshotHeight, false)
}
smallFile, err := os.Create(cachePathSmall)
if err == nil {
err = jpeg.Encode(smallFile, processedSmall, &jpeg.Options{Quality: 90})
smallFile.Close()
if err == nil {
smallSuccess = true
}
}
// Process large version
var processedLarge image.Image
if imageType == "primary" {
processedLarge = resizeAndPadImage(img, PrimaryImageWidth, PrimaryImageHeight, true)
} else {
processedLarge = resizeAndPadImage(img, ScreenshotWidth, ScreenshotHeight, true)
}
largeFile, err := os.Create(cachePathLarge)
if err == nil {
err = jpeg.Encode(largeFile, processedLarge, &jpeg.Options{Quality: 95})
largeFile.Close()
if err == nil {
largeSuccess = true
}
}
return smallSuccess, largeSuccess
}
func cacheItemImages(item *MediaItem) {
if item.ID == "" || item.Type == "" {
return
}
switch item.Type {
case "Playlist", "Movie", "Series":
imageURL := fmt.Sprintf("%s/Items/%s/Images/Primary?width=765", JellyfinServer, item.ID)
cachePathSmall := getImageCachePath(item.ID, "primary", "small")
cachePathLarge := getImageCachePath(item.ID, "primary", "large")
// Check if both versions exist
if _, err := os.Stat(cachePathSmall); os.IsNotExist(err) {
smallSuccess, largeSuccess := downloadAndProcessImage(imageURL, cachePathSmall, cachePathLarge, "primary")
if smallSuccess {
item.PrimaryImagePath = cachePathSmall
item.HasPrimaryImage = true
}
if largeSuccess {
item.PrimaryImagePathLarge = cachePathLarge
item.HasPrimaryImageLarge = true
}
} else {
// Both versions already exist
item.PrimaryImagePath = cachePathSmall
item.PrimaryImagePathLarge = cachePathLarge
item.HasPrimaryImage = true
item.HasPrimaryImageLarge = true
}
case "Episode":
// Try primary image first (screenshot)
imageURL := fmt.Sprintf("%s/Items/%s/Images/Primary?width=765", JellyfinServer, item.ID)
cachePathSmall := getImageCachePath(item.ID, "screenshot", "small")
cachePathLarge := getImageCachePath(item.ID, "screenshot", "large")
if _, err := os.Stat(cachePathSmall); os.IsNotExist(err) {
smallSuccess, largeSuccess := downloadAndProcessImage(imageURL, cachePathSmall, cachePathLarge, "screenshot")
if smallSuccess {
item.ScreenshotImagePath = cachePathSmall
item.HasScreenshotImage = true
}
if largeSuccess {
item.ScreenshotImagePathLarge = cachePathLarge
item.HasScreenshotImageLarge = true
}
// If primary failed, try thumb
if !smallSuccess {
thumbURL := fmt.Sprintf("%s/Items/%s/Images/Thumb?width=765", JellyfinServer, item.ID)
cachePathSmallThumb := getImageCachePath(item.ID, "thumb", "small")
cachePathLargeThumb := getImageCachePath(item.ID, "thumb", "large")
smallSuccess, largeSuccess = downloadAndProcessImage(thumbURL, cachePathSmallThumb, cachePathLargeThumb, "screenshot")
if smallSuccess {
item.ThumbImagePath = cachePathSmallThumb
item.HasThumbImage = true
}
if largeSuccess {
item.ThumbImagePathLarge = cachePathLargeThumb
item.HasThumbImageLarge = true
}
}
} else {
item.ScreenshotImagePath = cachePathSmall
item.ScreenshotImagePathLarge = cachePathLarge
item.HasScreenshotImage = true
item.HasScreenshotImageLarge = true
}
}
}
// Data processing functions
func processJellyfinItem(item map[string]interface{}) *MediaItem {
processed := &MediaItem{}
// Extract basic fields
if id, ok := item["Id"].(string); ok {
processed.ID = id
}
if name, ok := item["Name"].(string); ok {
processed.Name = name
}
if itemType, ok := item["Type"].(string); ok {
processed.Type = itemType
}
// Extract year
if year, ok := item["ProductionYear"].(float64); ok {
yearInt := int(year)
processed.Year = &yearInt
}
// Extract ratings
if rating, ok := item["CommunityRating"].(float64); ok {
processed.CommunityRating = &rating
}
if criticRating, ok := item["CriticRating"].(float64); ok {
processed.CriticRating = &criticRating
}
// Extract dates
if dateCreated, ok := item["DateCreated"].(string); ok {
processed.DateAdded = dateCreated
}
if premiereDate, ok := item["PremiereDate"].(string); ok {
processed.PremiereDate = premiereDate
}
// Extract overview
if overview, ok := item["Overview"].(string); ok {
processed.Overview = overview
}
// Extract runtime
if runtimeTicks, ok := item["RunTimeTicks"].(float64); ok {
ticks := int64(runtimeTicks)
processed.RuntimeTicks = &ticks
minutes := int(ticks / 600000000)
processed.RuntimeMinutes = &minutes
}
// Extract genres
if genres, ok := item["Genres"].([]interface{}); ok {
var genreStrings []string
for _, genre := range genres {
if genreStr, ok := genre.(string); ok {
genreStrings = append(genreStrings, genreStr)
}
}
processed.Genres = strings.Join(genreStrings, ", ")
}
// Extract episode-specific fields
if processed.Type == "Episode" {
if seriesName, ok := item["SeriesName"].(string); ok {
processed.SeriesName = seriesName
}
if seriesID, ok := item["SeriesId"].(string); ok {
processed.SeriesID = seriesID
}
if seasonNum, ok := item["ParentIndexNumber"].(float64); ok {
num := int(seasonNum)
processed.SeasonNumber = &num
}
if episodeNum, ok := item["IndexNumber"].(float64); ok {
num := int(episodeNum)
processed.EpisodeNumber = &num
}
if seasonID, ok := item["SeasonId"].(string); ok {
processed.SeasonID = seasonID
}
if parentID, ok := item["ParentId"].(string); ok {
processed.ParentID = parentID
}
}
// Extract container
if container, ok := item["Container"].(string); ok {
processed.Container = container
}
// Extract media metadata
if mediaSources, ok := item["MediaSources"].([]interface{}); ok && len(mediaSources) > 0 {
if source, ok := mediaSources[0].(map[string]interface{}); ok {
if size, ok := source["Size"].(float64); ok {
sizeInt := int64(size)
processed.FileSize = &sizeInt
}
if width, ok := source["Width"].(float64); ok {
widthInt := int(width)
processed.VideoWidth = &widthInt
}
// Extract stream information
if streams, ok := source["MediaStreams"].([]interface{}); ok {
for _, stream := range streams {
if streamMap, ok := stream.(map[string]interface{}); ok {
streamType, _ := streamMap["Type"].(string)
switch streamType {
case "Video":
if codec, ok := streamMap["Codec"].(string); ok {
processed.VideoCodec = codec
}
if processed.VideoWidth == nil {
if width, ok := streamMap["Width"].(float64); ok {
widthInt := int(width)
processed.VideoWidth = &widthInt
}
}
case "Audio":
if language, ok := streamMap["Language"].(string); ok {
processed.AudioLanguage = language
}
case "Subtitle":
processed.HasSubtitles = true
}
}
}
}
}
}
// Extract provider IDs
if providerIDs, ok := item["ProviderIds"].(map[string]interface{}); ok {
if imdbID, ok := providerIDs["Imdb"].(string); ok {
processed.ImdbID = imdbID
processed.ImdbURL = fmt.Sprintf("https://www.imdb.com/title/%s/", imdbID)
}
if tmdbID, ok := providerIDs["Tmdb"].(string); ok {
processed.TmdbID = tmdbID
}
}
// Extract trailer information
if trailers, ok := item["RemoteTrailers"].([]interface{}); ok && len(trailers) > 0 {
if trailer, ok := trailers[0].(map[string]interface{}); ok {
if url, ok := trailer["Url"].(string); ok {
processed.TrailerURL = url
}
}
}
// Extract user data
if userData, ok := item["UserData"].(map[string]interface{}); ok {
if played, ok := userData["Played"].(bool); ok {
processed.Played = played
}
if favorite, ok := userData["IsFavorite"].(bool); ok {
processed.IsFavorite = favorite
}
if position, ok := userData["PlaybackPositionTicks"].(float64); ok {
processed.PlaybackPositionTicks = int64(position)
}
if percentage, ok := userData["PlayedPercentage"].(float64); ok {
processed.PlayedPercentage = percentage
}
if lastPlayedStr, ok := userData["LastPlayedDate"].(string); ok {
parsed := parseTimestamp(lastPlayedStr)
processed.LastPlayedDate = parsed
}
}
// Extract people information
if people, ok := item["People"].([]interface{}); ok {
var directors, writers, cast []string
for _, person := range people {
if personMap, ok := person.(map[string]interface{}); ok {
name, _ := personMap["Name"].(string)
personType, _ := personMap["Type"].(string)
switch personType {
case "Director":
directors = append(directors, name)
case "Writer":
writers = append(writers, name)
case "Actor":
cast = append(cast, name)
}
}
}
if len(directors) > 0 {
processed.Directors = strings.Join(directors, ", ")
}
if len(writers) > 0 {
processed.Writers = strings.Join(writers, ", ")
}
if len(cast) > 0 {
processed.Cast = strings.Join(cast, ", ")
}
}
// Handle playlist-specific fields
if processed.Type == "Playlist" {
if mediaType, ok := item["MediaType"].(string); ok {
processed.PlaylistType = mediaType
processed.MediaType = mediaType
}
if childCount, ok := item["ChildCount"].(float64); ok {
processed.ChildCount = int(childCount)
}
if cumulativeRuntime, ok := item["CumulativeRunTimeTicks"].(float64); ok {
ticks := int64(cumulativeRuntime)
processed.CumulativeRuntimeTicks = &ticks
minutes := int(ticks / 600000000)
processed.CumulativeRuntimeMinutes = &minutes
}
if isFolder, ok := item["IsFolder"].(bool); ok {
processed.IsFolder = isFolder
}
}
// Cache images
cacheItemImages(processed)
return processed
}
// API data fetching functions
func fetchRecentlyAddedIDs(limit int) ([]string, error) {
params := url.Values{}
params.Set("Limit", strconv.Itoa(limit))
params.Set("Fields", "Id")
params.Set("EnableImages", "false")
params.Set("ImageTypeLimit", "0")
endpoint := fmt.Sprintf("/Users/%s/Items/Latest?%s", daemonState.userID, params.Encode())
response, err := makeJellyfinRequest("GET", endpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to fetch recently added items: %w", err)
}
var ids []string
// The Latest endpoint returns an array directly
if responseArray, ok := response.([]interface{}); ok {
for _, item := range responseArray {
if itemMap, ok := item.(map[string]interface{}); ok {
if id, ok := itemMap["Id"].(string); ok {
ids = append(ids, id)
}
}
}
} else {
return nil, fmt.Errorf("expected array response from Latest endpoint, got %T", response)
}
logger.Printf("βœ… Fetched %d recently added item IDs", len(ids))
return ids, nil
}
func fetchNextUpIDs(limit int) ([]string, error) {
params := url.Values{}
params.Set("Limit", strconv.Itoa(limit))
params.Set("Fields", "Id")
params.Set("EnableImages", "false")
params.Set("ImageTypeLimit", "0")
endpoint := fmt.Sprintf("/Shows/NextUp?UserId=%s&%s", daemonState.userID, params.Encode())
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to fetch next up episodes: %w", err)
}
var ids []string
if items, ok := response["Items"].([]interface{}); ok {
for _, item := range items {
if itemMap, ok := item.(map[string]interface{}); ok {
if id, ok := itemMap["Id"].(string); ok {
ids = append(ids, id)
}
}
}
}
logger.Printf("βœ… Fetched %d next up episode IDs", len(ids))
return ids, nil
}
func fetchAvailableLibraries() (map[string]map[string]interface{}, error) {
endpoint := fmt.Sprintf("/Users/%s/Views", daemonState.userID)
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to fetch libraries: %w", err)
}
libraries := make(map[string]map[string]interface{})
if items, ok := response["Items"].([]interface{}); ok {
for _, view := range items {
if viewMap, ok := view.(map[string]interface{}); ok {
if id, ok := viewMap["Id"].(string); ok {
if name, ok := viewMap["Name"].(string); ok {
collectionType := ""
if ct, ok := viewMap["CollectionType"].(string); ok {
collectionType = ct
}
libraries[name] = map[string]interface{}{
"id": id,
"collection_type": collectionType,
}
logger.Printf("Found library: '%s' (ID: %s, Type: %s)", name, id, collectionType)
}
}
}
}
}
logger.Printf("Found %d available libraries: %v", len(libraries), getLibraryNames(libraries))
return libraries, nil
}
func getLibraryNames(libraries map[string]map[string]interface{}) []string {
var names []string
for name := range libraries {
names = append(names, name)
}
return names
}
func getFilteredLibraryIDs(libraries map[string]map[string]interface{}) []string {
if IncludedLibraries == nil {
logger.Println("Including all libraries (no filter configured)")
return []string{} // Empty list means no filtering
}
var filteredIDs []string
var notFound []string
for _, libraryName := range IncludedLibraries {
if libraryInfo, exists := libraries[libraryName]; exists {
if id, ok := libraryInfo["id"].(string); ok {
filteredIDs = append(filteredIDs, id)
logger.Printf("Including library: '%s' (ID: %s)", libraryName, id)
}
} else {
notFound = append(notFound, libraryName)
}
}
if len(notFound) > 0 {
availableNames := getLibraryNames(libraries)
logger.Printf("Configured libraries not found: %v", notFound)
logger.Printf("Available libraries: %v", availableNames)
}
if len(filteredIDs) > 0 {
var includedNames []string
for _, name := range IncludedLibraries {
if _, exists := libraries[name]; exists {
includedNames = append(includedNames, name)
}
}
logger.Printf("Library filtering enabled - including %d libraries: %v", len(filteredIDs), includedNames)
} else {
logger.Println("No valid libraries found in configuration - will include all libraries")
}
return filteredIDs
}
func fetchEpisodesForSeries(seriesID string) ([]MediaItem, error) {
params := url.Values{}
params.Set("Recursive", "true")
params.Set("Fields", "Overview,Genres,CommunityRating,OfficialRating,ProductionYear,PremiereDate,MediaSources,MediaStreams,Path,Genres,Studios,Tags,Taglines,ProviderIds,UserData,People,Overview")
params.Set("ImageTypeLimit", "1")
params.Set("EnableImageTypes", "Primary,Backdrop,Screenshot,Thumb")
params.Set("IncludeItemTypes", "Episode")
params.Set("ParentId", seriesID)
params.Set("SortBy", "ParentIndexNumber,IndexNumber")
params.Set("SortOrder", "Ascending")
endpoint := fmt.Sprintf("/Users/%s/Items?%s", daemonState.userID, params.Encode())
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to fetch episodes for series %s: %w", seriesID, err)
}
var episodes []MediaItem
if items, ok := response["Items"].([]interface{}); ok {
for _, item := range items {
if itemMap, ok := item.(map[string]interface{}); ok {
episode := processJellyfinItem(itemMap)
episodes = append(episodes, *episode)
}
}
}
logger.Printf("Fetched %d episodes for series %s", len(episodes), seriesID)
return episodes, nil
}
func fetchPlaylistItems(playlistID string) ([]PlaylistItem, error) {
params := url.Values{}
params.Set("ParentId", playlistID)
params.Set("Recursive", "false")
params.Set("Fields", "Etag")
params.Set("SortBy", "SortName")
params.Set("SortOrder", "Ascending")
endpoint := fmt.Sprintf("/Users/%s/Items?%s", daemonState.userID, params.Encode())
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to fetch playlist items for %s: %w", playlistID, err)
}
var playlistItems []PlaylistItem
if items, ok := response["Items"].([]interface{}); ok {
for index, item := range items {
if itemMap, ok := item.(map[string]interface{}); ok {
if id, ok := itemMap["Id"].(string); ok {
playlistItem := PlaylistItem{
ID: id,
PlaylistID: playlistID,
PlaylistIndex: index,
}
playlistItems = append(playlistItems, playlistItem)
}
}
}
}
logger.Printf("Fetched %d items for playlist %s", len(playlistItems), playlistID)
return playlistItems, nil
}
func fetchAllJellyfinData() (*CacheData, error) {
logger.Println("Fetching all Jellyfin data with optimized structure and library filtering...")
if daemonState.httpClient == nil || daemonState.userID == "" {
return nil, fmt.Errorf("client not authenticated")
}
// Get available libraries and apply filtering
availableLibraries, err := fetchAvailableLibraries()
if err != nil {
return nil, fmt.Errorf("failed to get available libraries: %w", err)
}
filteredLibraryIDs := getFilteredLibraryIDs(availableLibraries)
if IncludedLibraries != nil {
if len(filteredLibraryIDs) == 0 {
logger.Println("No valid libraries found for filtering - falling back to all libraries")
} else {
var includedNames []string
for _, name := range IncludedLibraries {
if _, exists := availableLibraries[name]; exists {
includedNames = append(includedNames, name)
}
}
logger.Printf("Using library filtering: %v", includedNames)
}
}
allData := &CacheData{
Timestamp: time.Now().Unix(),
LibraryFilter: LibraryFilter{
Enabled: IncludedLibraries != nil,
IncludedLibraries: IncludedLibraries,
AvailableLibraries: getLibraryNames(availableLibraries),
FilteredLibraryIDs: filteredLibraryIDs,
},
AllItems: AllItems{
Movies: []MediaItem{},
Series: []MediaItem{},
Episodes: make(map[string][]MediaItem),
Playlists: []MediaItem{},
PlaylistItems: make(map[string][]PlaylistItem),
FavoritesIDs: []string{},
ContinueWatchingIDs: []string{},
RecentlyAddedIDs: []string{},
NextUpIDs: []string{},
},
}
baseParams := url.Values{}
baseParams.Set("Recursive", "true")
baseParams.Set("Fields", "Overview,Genres,CommunityRating,OfficialRating,ProductionYear,PremiereDate,MediaSources,MediaStreams,Path,Genres,Studios,Tags,Taglines,ProviderIds,UserData,People,Overview")
baseParams.Set("ImageTypeLimit", "1")
baseParams.Set("EnableImageTypes", "Primary,Backdrop,Screenshot,Thumb")
// Fetch movies, series, and playlists with library filtering
categories := []struct {
name string
itemType string
target *[]MediaItem
}{
{"movies", "Movie", &allData.AllItems.Movies},
{"series", "Series", &allData.AllItems.Series},
{"playlists", "Playlist", &allData.AllItems.Playlists},
}
for _, category := range categories {
logger.Printf("Fetching %s...", category.name)
if len(filteredLibraryIDs) > 0 && category.name != "playlists" {
// For movies and series, apply library filtering
for _, libraryID := range filteredLibraryIDs {
libraryName := ""
for name, info := range availableLibraries {
if id, ok := info["id"].(string); ok && id == libraryID {
libraryName = name
break
}
}
logger.Printf("Fetching %s from library: %s", category.name, libraryName)
params := url.Values{}
for k, v := range baseParams {
params[k] = v
}
params.Set("IncludeItemTypes", category.itemType)
params.Set("ParentId", libraryID)
endpoint := fmt.Sprintf("/Users/%s/Items?%s", daemonState.userID, params.Encode())
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
logger.Printf("Failed to fetch %s from library %s: %v", category.name, libraryName, err)
continue
}
if items, ok := response["Items"].([]interface{}); ok {
for _, item := range items {
if itemMap, ok := item.(map[string]interface{}); ok {
processed := processJellyfinItem(itemMap)
*category.target = append(*category.target, *processed)
}
}
logger.Printf("Fetched %d %s from %s", len(items), category.name, libraryName)
}
}
} else {
// For playlists or when no filtering, fetch normally
params := url.Values{}
for k, v := range baseParams {
params[k] = v
}
params.Set("IncludeItemTypes", category.itemType)
endpoint := fmt.Sprintf("/Users/%s/Items?%s", daemonState.userID, params.Encode())
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
logger.Printf("Failed to fetch %s: %v", category.name, err)
continue
}
if items, ok := response["Items"].([]interface{}); ok {
for _, item := range items {
if itemMap, ok := item.(map[string]interface{}); ok {
processed := processJellyfinItem(itemMap)
*category.target = append(*category.target, *processed)
}
}
}
}
logger.Printf("βœ… Loaded %d %s", len(*category.target), category.name)
}
// Fetch episodes for all series
logger.Println("Fetching episodes for all series...")
episodeCount := 0
for _, series := range allData.AllItems.Series {
episodes, err := fetchEpisodesForSeries(series.ID)
if err != nil {
logger.Printf("Failed to fetch episodes for series %s: %v", series.ID, err)
continue
}
if len(episodes) > 0 {
allData.AllItems.Episodes[series.ID] = episodes
episodeCount += len(episodes)
}
}
logger.Printf("βœ… Loaded %d episodes across %d series", episodeCount, len(allData.AllItems.Episodes))
// Fetch playlist items for all playlists
logger.Println("Fetching items for all playlists (optimized)...")
playlistItemsCount := 0
for _, playlist := range allData.AllItems.Playlists {
playlistItems, err := fetchPlaylistItems(playlist.ID)
if err != nil {
logger.Printf("Failed to fetch items for playlist %s: %v", playlist.ID, err)
continue
}
if len(playlistItems) > 0 {
allData.AllItems.PlaylistItems[playlist.ID] = playlistItems
playlistItemsCount += len(playlistItems)
}
}
logger.Printf("βœ… Loaded %d items across %d playlists", playlistItemsCount, len(allData.AllItems.Playlists))
// Collect IDs for filtered views efficiently
var allItemsForFiltering []MediaItem
allItemsForFiltering = append(allItemsForFiltering, allData.AllItems.Movies...)
allItemsForFiltering = append(allItemsForFiltering, allData.AllItems.Series...)
// Add all episodes
for _, episodesList := range allData.AllItems.Episodes {
allItemsForFiltering = append(allItemsForFiltering, episodesList...)
}
// Extract favorites IDs
for _, item := range allItemsForFiltering {
if item.IsFavorite {
allData.AllItems.FavoritesIDs = append(allData.AllItems.FavoritesIDs, item.ID)
}
}
// Extract continue watching IDs (items with progress > 5% and < 95%)
for _, item := range allItemsForFiltering {
if item.PlaybackPositionTicks > 0 && !item.Played &&
item.PlayedPercentage > 5 && item.PlayedPercentage < 95 {
allData.AllItems.ContinueWatchingIDs = append(allData.AllItems.ContinueWatchingIDs, item.ID)
}
}
// Fetch recently added and next up
recentlyAddedIDs, err := fetchRecentlyAddedIDs(50)
if err != nil {
logger.Printf("Failed to fetch recently added: %v", err)
} else {
allData.AllItems.RecentlyAddedIDs = recentlyAddedIDs
}
nextUpIDs, err := fetchNextUpIDs(50)
if err != nil {
logger.Printf("Failed to fetch next up: %v", err)
} else {
allData.AllItems.NextUpIDs = nextUpIDs
}
// Calculate total items
allData.TotalItems = len(allData.AllItems.Movies) + len(allData.AllItems.Series) +
len(allData.AllItems.Playlists) + episodeCount
// Log filtering results
if IncludedLibraries != nil {
var includedNames []string
for _, name := range IncludedLibraries {
if _, exists := availableLibraries[name]; exists {
includedNames = append(includedNames, name)
}
}
logger.Printf("πŸ“š Library filtering applied - included: %v", includedNames)
}
logger.Printf("βœ… Optimized structure created:")
logger.Printf(" Total items: %d", allData.TotalItems)
logger.Printf(" Movies: %d", len(allData.AllItems.Movies))
logger.Printf(" Series: %d", len(allData.AllItems.Series))
logger.Printf(" Playlists: %d", len(allData.AllItems.Playlists))
logger.Printf(" Episodes: %d", episodeCount)
logger.Printf(" Favorites: %d items", len(allData.AllItems.FavoritesIDs))
logger.Printf(" Continue watching: %d items", len(allData.AllItems.ContinueWatchingIDs))
logger.Printf(" Recently added: %d items", len(allData.AllItems.RecentlyAddedIDs))
logger.Printf(" Next up: %d episodes", len(allData.AllItems.NextUpIDs))
daemonState.cacheLock.Lock()
daemonState.cacheData = allData
daemonState.cacheLock.Unlock()
return allData, nil
}
// Cache management functions
func saveCache(cacheData *CacheData) error {
if err := os.MkdirAll(CacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
cacheFile := filepath.Join(CacheDir, "jellyfin_data.json")
tempFile := cacheFile + ".tmp"
dataToSave := cacheData
if dataToSave == nil {
daemonState.cacheLock.RLock()
dataToSave = daemonState.cacheData
daemonState.cacheLock.RUnlock()
}
if dataToSave == nil {
return fmt.Errorf("no cache data to save")
}
daemonState.cacheLock.RLock()
file, err := os.Create(tempFile)
if err != nil {
daemonState.cacheLock.RUnlock()
return fmt.Errorf("failed to create temp file: %w", err)
}
encoder := json.NewEncoder(file)
encoder.SetIndent("", " ")
err = encoder.Encode(dataToSave)
file.Close()
daemonState.cacheLock.RUnlock()
if err != nil {
os.Remove(tempFile)
return fmt.Errorf("failed to encode cache data: %w", err)
}
if err := os.Rename(tempFile, cacheFile); err != nil {
os.Remove(tempFile)
return fmt.Errorf("failed to rename temp file: %w", err)
}
logger.Printf("βœ… Cache updated successfully (%d items)", dataToSave.TotalItems)
return nil
}
func loadCache() error {
cacheFile := filepath.Join(CacheDir, "jellyfin_data.json")
if _, err := os.Stat(cacheFile); os.IsNotExist(err) {
return fmt.Errorf("cache file does not exist")
}
file, err := os.Open(cacheFile)
if err != nil {
return fmt.Errorf("failed to open cache file: %w", err)
}
defer file.Close()
var cacheData CacheData
if err := json.NewDecoder(file).Decode(&cacheData); err != nil {
return fmt.Errorf("failed to decode cache data: %w", err)
}
daemonState.cacheLock.Lock()
daemonState.cacheData = &cacheData
daemonState.cacheLock.Unlock()
logger.Printf("βœ… Loaded existing cache (%d items)", cacheData.TotalItems)
return nil
}
// WebSocket handling
func setupWebSocket() error {
if daemonState.httpClient == nil {
return fmt.Errorf("cannot setup WebSocket: client not authenticated")
}
logger.Println("Setting up WebSocket connection...")
// Build WebSocket URL
wsURL := strings.Replace(JellyfinServer, "http://", "ws://", 1)
wsURL = strings.Replace(wsURL, "https://", "wss://", 1)
wsURL += "/socket"
// Add authentication parameters
params := url.Values{}
params.Set("api_key", daemonState.accessToken)
params.Set("deviceId", DeviceID)
wsURL += "?" + params.Encode()
ctx := context.Background()
conn, _, err := websocket.Dial(ctx, wsURL, nil)
if err != nil {
return fmt.Errorf("failed to connect to WebSocket: %w", err)
}
daemonState.wsConn = conn
logger.Println("βœ… WebSocket connected")
// Start WebSocket message handler
go handleWebSocketMessages(ctx)
return nil
}
func handleWebSocketMessages(ctx context.Context) {
defer func() {
if daemonState.wsConn != nil {
daemonState.wsConn.Close(websocket.StatusNormalClosure, "")
}
}()
for daemonState.running {
var message WebSocketMessage
err := wsjson.Read(ctx, daemonState.wsConn, &message)
if err != nil {
logger.Printf("WebSocket read error: %v", err)
// Try to reconnect
if daemonState.running {
time.Sleep(5 * time.Second)
if err := setupWebSocket(); err != nil {
logger.Printf("Failed to reconnect WebSocket: %v", err)
}
}
return
}
logger.Printf("πŸ“‘ WebSocket message: %s", message.MessageType)
handleWebSocketMessage(message.MessageType, message.Data)
}
}
func handleWebSocketMessage(messageType string, data interface{}) {
switch messageType {
case "UserDataChanged":
logger.Println("πŸ”„ User data changed - performing delta update")
if dataMap, ok := data.(map[string]interface{}); ok {
if userDataList, ok := dataMap["UserDataList"].([]interface{}); ok {
var changes []UserDataChange
for _, userData := range userDataList {
if userDataMap, ok := userData.(map[string]interface{}); ok {
change := UserDataChange{}
if itemID, ok := userDataMap["ItemId"].(string); ok {
change.ItemID = itemID
}
if percentage, ok := userDataMap["PlayedPercentage"].(float64); ok {
change.PlayedPercentage = percentage
}
if position, ok := userDataMap["PlaybackPositionTicks"].(float64); ok {
change.PlaybackPositionTicks = int64(position)
}
if played, ok := userDataMap["Played"].(bool); ok {
change.Played = played
}
if favorite, ok := userDataMap["IsFavorite"].(bool); ok {
change.IsFavorite = favorite
}
if lastPlayed, ok := userDataMap["LastPlayedDate"].(string); ok {
change.LastPlayedDate = lastPlayed
}
if playCount, ok := userDataMap["PlayCount"].(float64); ok {
change.PlayCount = int(playCount)
}
changes = append(changes, change)
}
}
if updateUserDataInCache(changes) {
if err := saveCache(nil); err != nil {
logger.Printf("Failed to save cache after user data update: %v", err)
} else {
logger.Println("βœ… User data updated successfully")
}
}
}
}
case "LibraryChanged":
logger.Println("πŸ”„ Library changed - performing delta update")
if dataMap, ok := data.(map[string]interface{}); ok {
change := LibraryChange{}
if added, ok := dataMap["ItemsAdded"].([]interface{}); ok {
for _, item := range added {
if id, ok := item.(string); ok {
change.ItemsAdded = append(change.ItemsAdded, id)
}
}
}
if removed, ok := dataMap["ItemsRemoved"].([]interface{}); ok {
for _, item := range removed {
if id, ok := item.(string); ok {
change.ItemsRemoved = append(change.ItemsRemoved, id)
}
}
}
if updated, ok := dataMap["ItemsUpdated"].([]interface{}); ok {
for _, item := range updated {
if id, ok := item.(string); ok {
change.ItemsUpdated = append(change.ItemsUpdated, id)
}
}
}
if handleLibraryChanges(change) {
if err := saveCache(nil); err != nil {
logger.Printf("Failed to save cache after library change: %v", err)
} else {
logger.Println("βœ… Library changes applied successfully")
}
}
}
case "PlaybackStart", "PlaybackStopped", "PlaybackProgress":
logger.Printf("πŸ”„ %s - performing delta playback update", messageType)
if handlePlaybackEvents(messageType, data) {
if err := saveCache(nil); err != nil {
logger.Printf("Failed to save cache after playback update: %v", err)
}
}
case "RefreshProgress":
if dataMap, ok := data.(map[string]interface{}); ok {
if progress, ok := dataMap["Progress"].(float64); ok && progress == 100 {
logger.Println("πŸ”„ Library refresh completed - performing full cache refresh")
fallbackFullRefresh()
return
}
}
logger.Println("πŸ’‘ Refresh in progress - skipping update")
case "ForceKeepAlive":
logger.Println("πŸ’‘ Keep-alive message")
case "RestartRequired", "ServerShuttingDown", "ServerRestarting":
logger.Printf("⚠️ Server message: %s", messageType)
if messageType == "ServerShuttingDown" {
daemonState.running = false
}
default:
logger.Printf("❓ Message type '%s' not handled", messageType)
}
}
// Delta update functions
func updateUserDataInCache(userDataList []UserDataChange) bool {
if daemonState.cacheData == nil {
logger.Println("No cache data available for user data update")
return false
}
updatedItems := 0
var favoritesChanges, continueWatchingChanges []string
daemonState.cacheLock.Lock()
defer daemonState.cacheLock.Unlock()
// Get current ID sets for easy lookup
currentFavorites := make(map[string]bool)
for _, id := range daemonState.cacheData.AllItems.FavoritesIDs {
currentFavorites[id] = true
}
currentContinueWatching := make(map[string]bool)
for _, id := range daemonState.cacheData.AllItems.ContinueWatchingIDs {
currentContinueWatching[id] = true
}
for _, userData := range userDataList {
if userData.ItemID == "" {
continue
}
// Determine if item should be in continue watching
shouldBeInContinueWatching := userData.PlaybackPositionTicks > 0 &&
!userData.Played &&
userData.PlayedPercentage > 5 &&
userData.PlayedPercentage < 95
itemFound := false
// Update in main data structures
categories := []*[]MediaItem{
&daemonState.cacheData.AllItems.Movies,
&daemonState.cacheData.AllItems.Series,
}
for _, items := range categories {
for i := range *items {
if (*items)[i].ID == userData.ItemID {
(*items)[i].PlayedPercentage = userData.PlayedPercentage
(*items)[i].PlaybackPositionTicks = userData.PlaybackPositionTicks
(*items)[i].IsFavorite = userData.IsFavorite
(*items)[i].Played = userData.Played
if userData.LastPlayedDate != "" {
timestamp := parseTimestamp(userData.LastPlayedDate)
(*items)[i].LastPlayedDate = timestamp
}
updatedItems++
itemFound = true
logger.Printf("Updated item %s: played=%v, percentage=%.1f%%, position=%d",
userData.ItemID, userData.Played, userData.PlayedPercentage, userData.PlaybackPositionTicks)
break
}
}
if itemFound {
break
}
}
// Update episodes
if !itemFound {
for seriesID, episodes := range daemonState.cacheData.AllItems.Episodes {
for i := range episodes {
if episodes[i].ID == userData.ItemID {
episodes[i].PlayedPercentage = userData.PlayedPercentage
episodes[i].PlaybackPositionTicks = userData.PlaybackPositionTicks
episodes[i].IsFavorite = userData.IsFavorite
episodes[i].Played = userData.Played
if userData.LastPlayedDate != "" {
timestamp := parseTimestamp(userData.LastPlayedDate)
episodes[i].LastPlayedDate = timestamp
}
daemonState.cacheData.AllItems.Episodes[seriesID] = episodes
updatedItems++
itemFound = true
logger.Printf("Updated episode %s: played=%v, percentage=%.1f%%, position=%d",
userData.ItemID, userData.Played, userData.PlayedPercentage, userData.PlaybackPositionTicks)
break
}
}
if itemFound {
break
}
}
}
if itemFound {
// Update favorites ID list
if userData.IsFavorite && !currentFavorites[userData.ItemID] {
currentFavorites[userData.ItemID] = true
favoritesChanges = append(favoritesChanges, fmt.Sprintf("Added %s", userData.ItemID))
} else if !userData.IsFavorite && currentFavorites[userData.ItemID] {
delete(currentFavorites, userData.ItemID)
favoritesChanges = append(favoritesChanges, fmt.Sprintf("Removed %s", userData.ItemID))
}
// Update continue watching ID list
if shouldBeInContinueWatching && !currentContinueWatching[userData.ItemID] {
currentContinueWatching[userData.ItemID] = true
logger.Printf("Added item %s to continue watching (progress: %.1f%%)", userData.ItemID, userData.PlayedPercentage)
continueWatchingChanges = append(continueWatchingChanges, fmt.Sprintf("Added %s", userData.ItemID))
} else if !shouldBeInContinueWatching && currentContinueWatching[userData.ItemID] {
delete(currentContinueWatching, userData.ItemID)
reason := "played"
if !userData.Played {
reason = fmt.Sprintf("progress %.1f%% outside range", userData.PlayedPercentage)
}
logger.Printf("Removed item %s from continue watching (%s)", userData.ItemID, reason)
continueWatchingChanges = append(continueWatchingChanges, fmt.Sprintf("Removed %s", userData.ItemID))
}
}
}
// Update the ID lists in cache
daemonState.cacheData.AllItems.FavoritesIDs = make([]string, 0, len(currentFavorites))
for id := range currentFavorites {
daemonState.cacheData.AllItems.FavoritesIDs = append(daemonState.cacheData.AllItems.FavoritesIDs, id)
}
daemonState.cacheData.AllItems.ContinueWatchingIDs = make([]string, 0, len(currentContinueWatching))
for id := range currentContinueWatching {
daemonState.cacheData.AllItems.ContinueWatchingIDs = append(daemonState.cacheData.AllItems.ContinueWatchingIDs, id)
}
if updatedItems > 0 {
daemonState.cacheData.Timestamp = time.Now().Unix()
logMsg := fmt.Sprintf("Updated user data for %d items (%d cache entries)", len(userDataList), updatedItems)
if len(favoritesChanges) > 0 {
logMsg += fmt.Sprintf(" - Favorites: %s", strings.Join(favoritesChanges, ", "))
}
if len(continueWatchingChanges) > 0 {
logMsg += fmt.Sprintf(" - Continue watching: %s", strings.Join(continueWatchingChanges, ", "))
}
logger.Println(logMsg)
return true
}
logger.Printf("No cache entries found for user data updates")
return false
}
func handleLibraryChanges(libraryData LibraryChange) bool {
itemsAdded := libraryData.ItemsAdded
itemsRemoved := libraryData.ItemsRemoved
itemsUpdated := libraryData.ItemsUpdated
if daemonState.cacheData == nil {
logger.Println("No cache data available for library updates")
return false
}
changesMade := false
failedOperations := 0
totalOperations := len(itemsAdded) + len(itemsRemoved) + len(itemsUpdated)
daemonState.cacheLock.Lock()
defer daemonState.cacheLock.Unlock()
// Handle removed items first
if len(itemsRemoved) > 0 {
logger.Printf("Removing %d items from cache", len(itemsRemoved))
for _, itemID := range itemsRemoved {
removedCount := 0
// Remove from main data structures
categories := []*[]MediaItem{
&daemonState.cacheData.AllItems.Movies,
&daemonState.cacheData.AllItems.Series,
&daemonState.cacheData.AllItems.Playlists,
}
for _, items := range categories {
originalCount := len(*items)
filtered := make([]MediaItem, 0, originalCount)
for _, item := range *items {
if item.ID != itemID {
filtered = append(filtered, item)
}
}
*items = filtered
removedCount += originalCount - len(*items)
}
// Remove from playlist items
if _, exists := daemonState.cacheData.AllItems.PlaylistItems[itemID]; exists {
delete(daemonState.cacheData.AllItems.PlaylistItems, itemID)
removedCount++
}
// Remove from ID-based lists
idLists := []*[]string{
&daemonState.cacheData.AllItems.FavoritesIDs,
&daemonState.cacheData.AllItems.ContinueWatchingIDs,
&daemonState.cacheData.AllItems.RecentlyAddedIDs,
}
for _, idList := range idLists {
for i, id := range *idList {
if id == itemID {
*idList = append((*idList)[:i], (*idList)[i+1:]...)
removedCount++
break
}
}
}
// Remove from episodes
for seriesID, episodes := range daemonState.cacheData.AllItems.Episodes {
originalCount := len(episodes)
filtered := make([]MediaItem, 0, originalCount)
for _, episode := range episodes {
if episode.ID != itemID {
filtered = append(filtered, episode)
}
}
daemonState.cacheData.AllItems.Episodes[seriesID] = filtered
removedCount += originalCount - len(filtered)
// Remove empty series from episodes
if len(filtered) == 0 {
delete(daemonState.cacheData.AllItems.Episodes, seriesID)
}
}
if removedCount > 0 {
logger.Printf("Removed item %s from %d locations", itemID, removedCount)
changesMade = true
}
}
}
// Handle added and updated items
itemsToFetch := append(itemsAdded, itemsUpdated...)
if len(itemsToFetch) > 0 {
logger.Printf("Processing %d items (added: %d, updated: %d)", len(itemsToFetch), len(itemsAdded), len(itemsUpdated))
for _, itemID := range itemsToFetch {
// Fetch the item data
endpoint := fmt.Sprintf("/Items/%s?Fields=Overview,Genres,CommunityRating,OfficialRating,ProductionYear,PremiereDate,MediaSources,MediaStreams,Path,Genres,Studios,Tags,Taglines,ProviderIds,UserData,People,Overview", itemID)
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
logger.Printf("Failed to fetch item %s: %v", itemID, err)
failedOperations++
continue
}
itemData := processJellyfinItem(response)
// For updated items, remove old versions first
if contains(itemsUpdated, itemID) {
categories := []*[]MediaItem{
&daemonState.cacheData.AllItems.Movies,
&daemonState.cacheData.AllItems.Series,
&daemonState.cacheData.AllItems.Playlists,
}
for _, items := range categories {
filtered := make([]MediaItem, 0, len(*items))
for _, item := range *items {
if item.ID != itemID {
filtered = append(filtered, item)
}
}
*items = filtered
}
// Remove from ID-based lists
idLists := []*[]string{
&daemonState.cacheData.AllItems.FavoritesIDs,
&daemonState.cacheData.AllItems.ContinueWatchingIDs,
&daemonState.cacheData.AllItems.RecentlyAddedIDs,
}
for _, idList := range idLists {
filtered := make([]string, 0, len(*idList))
for _, id := range *idList {
if id != itemID {
filtered = append(filtered, id)
}
}
*idList = filtered
}
// Remove from episodes if it's an episode
if itemData.Type == "Episode" {
for seriesID, episodes := range daemonState.cacheData.AllItems.Episodes {
filtered := make([]MediaItem, 0, len(episodes))
for _, episode := range episodes {
if episode.ID != itemID {
filtered = append(filtered, episode)
}
}
daemonState.cacheData.AllItems.Episodes[seriesID] = filtered
}
}
}
// Add to appropriate categories based on type
switch itemData.Type {
case "Movie":
daemonState.cacheData.AllItems.Movies = append(daemonState.cacheData.AllItems.Movies, *itemData)
changesMade = true
case "Series":
daemonState.cacheData.AllItems.Series = append(daemonState.cacheData.AllItems.Series, *itemData)
// For new series, fetch all episodes
if contains(itemsAdded, itemID) {
episodes, err := fetchEpisodesForSeries(itemID)
if err == nil && len(episodes) > 0 {
daemonState.cacheData.AllItems.Episodes[itemID] = episodes
}
}
changesMade = true
case "Episode":
seriesID := itemData.SeriesID
if seriesID != "" {
if _, exists := daemonState.cacheData.AllItems.Episodes[seriesID]; !exists {
daemonState.cacheData.AllItems.Episodes[seriesID] = []MediaItem{}
}
episodes := daemonState.cacheData.AllItems.Episodes[seriesID]
episodes = append(episodes, *itemData)
daemonState.cacheData.AllItems.Episodes[seriesID] = episodes
changesMade = true
}
case "Playlist":
// Remove old version if updating
if contains(itemsUpdated, itemID) {
filtered := make([]MediaItem, 0, len(daemonState.cacheData.AllItems.Playlists))
for _, playlist := range daemonState.cacheData.AllItems.Playlists {
if playlist.ID != itemID {
filtered = append(filtered, playlist)
}
}
daemonState.cacheData.AllItems.Playlists = filtered
// Remove old playlist items
if _, exists := daemonState.cacheData.AllItems.PlaylistItems[itemID]; exists {
delete(daemonState.cacheData.AllItems.PlaylistItems, itemID)
}
}
// Add new/updated playlist
daemonState.cacheData.AllItems.Playlists = append(daemonState.cacheData.AllItems.Playlists, *itemData)
// Fetch playlist items
playlistItems, err := fetchPlaylistItems(itemID)
if err == nil && len(playlistItems) > 0 {
daemonState.cacheData.AllItems.PlaylistItems[itemID] = playlistItems
}
changesMade = true
logger.Printf("Processed playlist %s with %d items", itemID, len(playlistItems))
}
// Update ID-based lists based on item properties
if itemData.IsFavorite {
if !contains(daemonState.cacheData.AllItems.FavoritesIDs, itemID) {
daemonState.cacheData.AllItems.FavoritesIDs = append(daemonState.cacheData.AllItems.FavoritesIDs, itemID)
}
}
// Add to recently_added_ids if it's a new item
if contains(itemsAdded, itemID) && (itemData.Type == "Movie" || itemData.Type == "Series" || itemData.Type == "Playlist") {
if !contains(daemonState.cacheData.AllItems.RecentlyAddedIDs, itemID) {
// Add to the beginning (newest first)
newRecentlyAdded := append([]string{itemID}, daemonState.cacheData.AllItems.RecentlyAddedIDs...)
// Keep only the most recent 50 items
if len(newRecentlyAdded) > 50 {
newRecentlyAdded = newRecentlyAdded[:50]
}
daemonState.cacheData.AllItems.RecentlyAddedIDs = newRecentlyAdded
logger.Printf("Added item %s to recently_added_ids", itemID)
}
}
// Add to continue_watching_ids if applicable
if (itemData.Type == "Movie" || itemData.Type == "Episode") &&
itemData.PlaybackPositionTicks > 0 && !itemData.Played &&
itemData.PlayedPercentage > 5 && itemData.PlayedPercentage < 95 {
if !contains(daemonState.cacheData.AllItems.ContinueWatchingIDs, itemID) {
daemonState.cacheData.AllItems.ContinueWatchingIDs = append(daemonState.cacheData.AllItems.ContinueWatchingIDs, itemID)
}
}
logger.Printf("Processed item %s (%s)", itemID, itemData.Type)
}
}
// Calculate success rate
successRate := 1.0 - (float64(failedOperations) / float64(max(totalOperations, 1)))
if changesMade && successRate >= 0.5 {
// Update total count and timestamp
episodeCount := 0
for _, episodes := range daemonState.cacheData.AllItems.Episodes {
episodeCount += len(episodes)
}
daemonState.cacheData.TotalItems = len(daemonState.cacheData.AllItems.Movies) +
len(daemonState.cacheData.AllItems.Series) +
len(daemonState.cacheData.AllItems.Playlists) +
episodeCount
daemonState.cacheData.Timestamp = time.Now().Unix()
logger.Printf("Library changes applied successfully: +%d -%d ~%d items (success rate: %.1f%%)",
len(itemsAdded), len(itemsRemoved), len(itemsUpdated), successRate*100)
daemonState.totalUpdates++
if failedOperations > 0 {
daemonState.failedUpdates++
}
return true
}
logger.Printf("Library update had low success rate (%.1f%%), recommending full refresh", successRate*100)
daemonState.failedUpdates++
daemonState.totalUpdates++
return false
}
func handlePlaybackEvents(messageType string, data interface{}) bool {
// Extract relevant information from playback data
if data == nil {
return false
}
var itemID, userID string
if dataMap, ok := data.(map[string]interface{}); ok {
if id, ok := dataMap["ItemId"].(string); ok {
itemID = id
}
if uid, ok := dataMap["UserId"].(string); ok {
userID = uid
}
// Skip frequent progress updates for the same item
if messageType == "PlaybackProgress" {
currentTime := time.Now()
if lastUpdate, exists := daemonState.lastPlaybackUpdate[itemID]; exists {
if currentTime.Sub(lastUpdate) < 30*time.Second {
logger.Printf("Skipping frequent progress update for item %s", itemID)
return true
}
}
daemonState.lastPlaybackUpdate[itemID] = currentTime
}
}
// For specific item playback events, fetch updated user data
if itemID != "" && userID == daemonState.userID {
logger.Printf("Handling %s for item %s", messageType, itemID)
return updateSingleItemUserData(itemID)
}
return true
}
func updateSingleItemUserData(itemID string) bool {
// Fetch user data for the item
endpoint := fmt.Sprintf("/Users/%s/Items/%s/UserData", daemonState.userID, itemID)
response, err := makeJellyfinObjectRequest("GET", endpoint, nil)
if err != nil {
logger.Printf("Failed to update user data for item %s: %v", itemID, err)
return false
}
// Convert to UserDataChange format
change := UserDataChange{
ItemID: itemID,
}
if percentage, ok := response["PlayedPercentage"].(float64); ok {
change.PlayedPercentage = percentage
}
if position, ok := response["PlaybackPositionTicks"].(float64); ok {
change.PlaybackPositionTicks = int64(position)
}
if played, ok := response["Played"].(bool); ok {
change.Played = played
}
if favorite, ok := response["IsFavorite"].(bool); ok {
change.IsFavorite = favorite
}
if lastPlayed, ok := response["LastPlayedDate"].(string); ok {
change.LastPlayedDate = lastPlayed
}
if playCount, ok := response["PlayCount"].(float64); ok {
change.PlayCount = int(playCount)
}
logger.Printf("Updating user data for item %s: played_percentage=%.1f%%, position_ticks=%d",
itemID, change.PlayedPercentage, change.PlaybackPositionTicks)
return updateUserDataInCache([]UserDataChange{change})
}
func fallbackFullRefresh() {
time.Sleep(500 * time.Millisecond)
logger.Println("Performing full cache refresh...")
cacheData, err := fetchAllJellyfinData()
if err != nil {
logger.Printf("❌ Full cache refresh failed: %v", err)
return
}
if err := saveCache(cacheData); err != nil {
logger.Printf("❌ Failed to save full cache refresh: %v", err)
} else {
logger.Println("βœ… Full cache refresh completed successfully")
// Reset failure counters after successful full refresh
daemonState.failedUpdates = 0
daemonState.totalUpdates = 1
}
}
// Utility functions
func contains(slice []string, item string) bool {
for _, s := range slice {
if s == item {
return true
}
}
return false
}
func max(a, b int) int {
if a > b {
return a
}
return b
}
// Daemon management functions
func writePIDFile() error {
if err := os.MkdirAll(CacheDir, 0755); err != nil {
return fmt.Errorf("failed to create cache directory: %w", err)
}
pidFile := filepath.Join(CacheDir, "jellyfin_daemon.pid")
return os.WriteFile(pidFile, []byte(fmt.Sprintf("%d", os.Getpid())), 0644)
}
func removePIDFile() {
pidFile := filepath.Join(CacheDir, "jellyfin_daemon.pid")
os.Remove(pidFile)
}
func isDaemonRunning() bool {
pidFile := filepath.Join(CacheDir, "jellyfin_daemon.pid")
data, err := os.ReadFile(pidFile)
if err != nil {
return false
}
pid, err := strconv.Atoi(strings.TrimSpace(string(data)))
if err != nil {
removePIDFile()
return false
}
// Check if process is running
process, err := os.FindProcess(pid)
if err != nil {
removePIDFile()
return false
}
err = process.Signal(syscall.Signal(0))
if err != nil {
removePIDFile()
return false
}
return true
}
func runDaemon() error {
if isDaemonRunning() {
return fmt.Errorf("daemon is already running")
}
if err := writePIDFile(); err != nil {
return fmt.Errorf("could not write PID file: %w", err)
}
// Setup signal handlers
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGTERM, syscall.SIGINT)
daemonState.running = true
logger.Printf("πŸš€ Jellyfin real-time cache daemon started (PID: %d)", os.Getpid())
defer func() {
logger.Println("πŸ›‘ Daemon shutting down...")
daemonState.running = false
if daemonState.wsConn != nil {
daemonState.wsConn.Close(websocket.StatusNormalClosure, "")
}
removePIDFile()
}()
// Create image cache directory
if err := os.MkdirAll(ImageCacheDir, 0755); err != nil {
return fmt.Errorf("failed to create image cache directory: %w", err)
}
// Authenticate with Jellyfin
if err := authenticateWithJellyfin(); err != nil {
return fmt.Errorf("❌ Authentication failed: %w", err)
}
// Try to load existing cache first
if err := loadCache(); err != nil {
logger.Println("πŸ“Š No existing cache found, performing initial cache population...")
initialCache, err := fetchAllJellyfinData()
if err != nil {
return fmt.Errorf("❌ Failed to fetch initial cache: %w", err)
}
if err := saveCache(initialCache); err != nil {
return fmt.Errorf("❌ Failed to save initial cache: %w", err)
}
} else {
logger.Println("πŸ“Š Using existing cache data")
}
// Setup WebSocket connection
if err := setupWebSocket(); err != nil {
return fmt.Errorf("❌ Failed to setup WebSocket: %w", err)
}
// Main daemon loop
logger.Println("βœ… Daemon fully operational - monitoring for changes...")
for {
select {
case sig := <-sigChan:
logger.Printf("Received signal %v, shutting down...", sig)
return nil
case <-time.After(5 * time.Second):
// Health check - ensure WebSocket is still connected
if daemonState.wsConn == nil && daemonState.running {
logger.Println("WebSocket disconnected, attempting reconnect...")
if err := setupWebSocket(); err != nil {
logger.Printf("Failed to reconnect WebSocket: %v", err)
}
}
}
}
}
// Command line interface
func main() {
if len(os.Args) < 2 {
os.Args = append(os.Args, "start")
}
command := os.Args[1]
switch command {
case "start":
if isDaemonRunning() {
fmt.Println("❌ Daemon is already running")
os.Exit(1)
}
if err := runDaemon(); err != nil {
fmt.Printf("❌ Failed to start daemon: %v\n", err)
os.Exit(1)
}
case "stop":
pidFile := filepath.Join(CacheDir, "jellyfin_daemon.pid")
data, err := os.ReadFile(pidFile)
if err != nil {
fmt.Println("❌ Daemon is not running")
os.Exit(1)
}
pid, err := strconv.Atoi(strings.TrimSpace(string(data)))
if err != nil {
fmt.Printf("❌ Invalid PID file: %v\n", err)
os.Exit(1)
}
process, err := os.FindProcess(pid)
if err != nil {
fmt.Printf("❌ Failed to find process: %v\n", err)
os.Exit(1)
}
if err := process.Signal(syscall.SIGTERM); err != nil {
fmt.Printf("❌ Failed to stop daemon: %v\n", err)
os.Exit(1)
}
fmt.Println("πŸ“€ Stop signal sent to daemon")
// Wait for daemon to stop
for i := 0; i < 10; i++ {
if !isDaemonRunning() {
fmt.Println("βœ… Daemon stopped successfully")
return
}
time.Sleep(1 * time.Second)
}
fmt.Println("⚠️ Daemon may still be running")
case "status":
if isDaemonRunning() {
fmt.Println("βœ… Daemon is running")
cacheFile := filepath.Join(CacheDir, "jellyfin_data.json")
if _, err := os.Stat(cacheFile); err == nil {
file, err := os.Open(cacheFile)
if err == nil {
defer file.Close()
var cacheData CacheData
if err := json.NewDecoder(file).Decode(&cacheData); err == nil {
age := time.Now().Unix() - cacheData.Timestamp
fmt.Printf("πŸ“Š Cache last updated: %d seconds ago\n", age)
fmt.Printf("πŸ“ˆ Total items: %d\n", cacheData.TotalItems)
fmt.Printf(" Movies: %d\n", len(cacheData.AllItems.Movies))
fmt.Printf(" Series: %d\n", len(cacheData.AllItems.Series))
fmt.Printf(" Playlists: %d\n", len(cacheData.AllItems.Playlists))
fmt.Printf(" Favorites: %d\n", len(cacheData.AllItems.FavoritesIDs))
fmt.Printf(" Recently Added: %d\n", len(cacheData.AllItems.RecentlyAddedIDs))
fmt.Printf(" Continue Watching: %d\n", len(cacheData.AllItems.ContinueWatchingIDs))
fmt.Printf(" Next Up: %d\n", len(cacheData.AllItems.NextUpIDs))
totalEpisodes := 0
for _, episodes := range cacheData.AllItems.Episodes {
totalEpisodes += len(episodes)
}
fmt.Printf(" Episodes: %d across %d series\n", totalEpisodes, len(cacheData.AllItems.Episodes))
// Show library filtering status
fmt.Println("\nπŸ“š Library Filtering:")
if !cacheData.LibraryFilter.Enabled {
fmt.Println(" Status: Disabled (all libraries included)")
} else {
fmt.Println(" Status: Enabled")
fmt.Printf(" Configured libraries: %v\n", cacheData.LibraryFilter.IncludedLibraries)
fmt.Printf(" Available libraries: %v\n", cacheData.LibraryFilter.AvailableLibraries)
}
fmt.Printf("\n⚑ Delta Update Stats:\n")
if daemonState.totalUpdates > 0 {
successRate := 1.0 - (float64(daemonState.failedUpdates) / float64(daemonState.totalUpdates))
fmt.Printf(" Success rate: %.1f%%\n", successRate*100)
fmt.Printf(" Total updates: %d\n", daemonState.totalUpdates)
fmt.Printf(" Failed updates: %d\n", daemonState.failedUpdates)
} else {
fmt.Println(" No updates processed yet")
}
}
}
}
} else {
fmt.Println("❌ Daemon is not running")
}
case "test":
fmt.Println("πŸ§ͺ Testing Jellyfin connection...")
if err := os.MkdirAll(ImageCacheDir, 0755); err != nil {
fmt.Printf("❌ Failed to create image cache directory: %v\n", err)
os.Exit(1)
}
if err := authenticateWithJellyfin(); err != nil {
fmt.Printf("❌ Authentication failed: %v\n", err)
os.Exit(1)
}
fmt.Println("βœ… Authentication successful")
fmt.Println("πŸ“š Testing library filtering...")
availableLibs, err := fetchAvailableLibraries()
if err != nil {
fmt.Printf("❌ Failed to get libraries: %v\n", err)
} else {
fmt.Printf("Available libraries: %v\n", getLibraryNames(availableLibs))
if IncludedLibraries != nil {
filteredIDs := getFilteredLibraryIDs(availableLibs)
fmt.Printf("Filtered library IDs: %v\n", filteredIDs)
}
}
fmt.Println("πŸ“Š Testing comprehensive data fetch...")
cacheData, err := fetchAllJellyfinData()
if err != nil {
fmt.Printf("❌ Data fetch failed: %v\n", err)
os.Exit(1)
}
fmt.Printf("βœ… Fetched %d total items\n", cacheData.TotalItems)
totalEpisodes := 0
for _, episodes := range cacheData.AllItems.Episodes {
totalEpisodes += len(episodes)
}
fmt.Printf("βœ… Fetched %d episodes across %d series\n", totalEpisodes, len(cacheData.AllItems.Episodes))
if err := saveCache(cacheData); err != nil {
fmt.Printf("❌ Failed to save cache: %v\n", err)
} else {
fmt.Println("βœ… Cache saved successfully")
}
fmt.Println("πŸ”Œ Testing WebSocket setup...")
if err := setupWebSocket(); err != nil {
fmt.Printf("❌ WebSocket setup failed: %v\n", err)
} else {
fmt.Println("βœ… WebSocket setup successful")
}
case "refresh":
fmt.Println("πŸ”„ Forcing full cache refresh...")
if err := os.MkdirAll(ImageCacheDir, 0755); err != nil {
fmt.Printf("❌ Failed to create image cache directory: %v\n", err)
os.Exit(1)
}
if err := authenticateWithJellyfin(); err != nil {
fmt.Printf("❌ Authentication failed: %v\n", err)
os.Exit(1)
}
cacheData, err := fetchAllJellyfinData()
if err != nil {
fmt.Printf("❌ Cache refresh failed: %v\n", err)
os.Exit(1)
}
if err := saveCache(cacheData); err != nil {
fmt.Printf("❌ Failed to save refreshed cache: %v\n", err)
os.Exit(1)
}
fmt.Printf("βœ… Cache refreshed successfully (%d items)\n", cacheData.TotalItems)
totalEpisodes := 0
for _, episodes := range cacheData.AllItems.Episodes {
totalEpisodes += len(episodes)
}
fmt.Printf("βœ… Refreshed %d episodes across %d series\n", totalEpisodes, len(cacheData.AllItems.Episodes))
// Show enhanced metadata stats
var allItems []MediaItem
allItems = append(allItems, cacheData.AllItems.Movies...)
allItems = append(allItems, cacheData.AllItems.Series...)
for _, episodes := range cacheData.AllItems.Episodes {
allItems = append(allItems, episodes...)
}
if len(allItems) > 0 {
itemsWithVideoCodec := 0
itemsWithSubtitles := 0
itemsWithImdb := 0
itemsWithTrailers := 0
itemsWithImages := 0
for _, item := range allItems {
if item.VideoCodec != "" {
itemsWithVideoCodec++
}
if item.HasSubtitles {
itemsWithSubtitles++
}
if item.ImdbURL != "" {
itemsWithImdb++
}
if item.TrailerURL != "" {
itemsWithTrailers++
}
if item.HasPrimaryImage || item.HasScreenshotImage {
itemsWithImages++
}
}
fmt.Println("🎬 Enhanced metadata collected:")
fmt.Printf(" Items with video codec: %d\n", itemsWithVideoCodec)
fmt.Printf(" Items with subtitles: %d\n", itemsWithSubtitles)
fmt.Printf(" Items with IMDB URLs: %d\n", itemsWithImdb)
fmt.Printf(" Items with trailers: %d\n", itemsWithTrailers)
fmt.Printf(" Items with cached images: %d\n", itemsWithImages)
}
// Reset delta update counters after successful full refresh
daemonState.failedUpdates = 0
daemonState.totalUpdates = 1
fmt.Println("πŸ”„ Delta update counters reset")
case "restart":
if isDaemonRunning() {
fmt.Println("πŸ›‘ Stopping daemon...")
// Stop the daemon
pidFile := filepath.Join(CacheDir, "jellyfin_daemon.pid")
data, err := os.ReadFile(pidFile)
if err == nil {
if pid, err := strconv.Atoi(strings.TrimSpace(string(data))); err == nil {
if process, err := os.FindProcess(pid); err == nil {
process.Signal(syscall.SIGTERM)
// Wait for daemon to stop
for i := 0; i < 10; i++ {
if !isDaemonRunning() {
break
}
time.Sleep(1 * time.Second)
}
}
}
}
}
fmt.Println("πŸš€ Starting daemon...")
if err := runDaemon(); err != nil {
fmt.Printf("❌ Failed to start daemon: %v\n", err)
os.Exit(1)
}
case "images":
fmt.Println("πŸ–ΌοΈ Image Cache Analysis:")
if _, err := os.Stat(ImageCacheDir); os.IsNotExist(err) {
fmt.Printf("Cache directory: %s\n", ImageCacheDir)
fmt.Println("Cache directory exists: ❌")
return
}
fmt.Printf("Cache directory: %s\n", ImageCacheDir)
fmt.Println("Cache directory exists: βœ…")
// Count images
totalImages := 0
primaryImages := 0
primaryImagesLarge := 0
screenshotImages := 0
screenshotImagesLarge := 0
thumbImages := 0
thumbImagesLarge := 0
var totalSize int64
err := filepath.Walk(ImageCacheDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if strings.HasSuffix(info.Name(), ".jpg") {
totalImages++
totalSize += info.Size()
if strings.Contains(info.Name(), "_primary_large.jpg") {
primaryImagesLarge++
} else if strings.Contains(info.Name(), "_primary.jpg") {
primaryImages++
} else if strings.Contains(info.Name(), "_screenshot_large.jpg") {
screenshotImagesLarge++
} else if strings.Contains(info.Name(), "_screenshot.jpg") {
screenshotImages++
} else if strings.Contains(info.Name(), "_thumb_large.jpg") {
thumbImagesLarge++
} else if strings.Contains(info.Name(), "_thumb.jpg") {
thumbImages++
}
}
return nil
})
if err != nil {
fmt.Printf("❌ Failed to analyze image cache: %v\n", err)
return
}
fmt.Printf("Total images: %d\n", totalImages)
fmt.Printf("Primary images (small): %d\n", primaryImages)
fmt.Printf("Primary images (large): %d\n", primaryImagesLarge)
fmt.Printf("Screenshot images (small): %d\n", screenshotImages)
fmt.Printf("Screenshot images (large): %d\n", screenshotImagesLarge)
fmt.Printf("Thumbnail images (small): %d\n", thumbImages)
fmt.Printf("Thumbnail images (large): %d\n", thumbImagesLarge)
fmt.Printf("Total cache size: %.2f MB\n", float64(totalSize)/(1024*1024))
case "cleanup":
fmt.Println("🧹 Cleaning up orphaned images...")
cacheFile := filepath.Join(CacheDir, "jellyfin_data.json")
if _, err := os.Stat(cacheFile); os.IsNotExist(err) {
fmt.Println("❌ No cache file found - cannot determine which images are orphaned")
return
}
file, err := os.Open(cacheFile)
if err != nil {
fmt.Printf("❌ Failed to open cache file: %v\n", err)
return
}
defer file.Close()
var cacheData CacheData
if err := json.NewDecoder(file).Decode(&cacheData); err != nil {
fmt.Printf("❌ Failed to decode cache file: %v\n", err)
return
}
// Collect current item IDs
currentItemIDs := make(map[string]bool)
for _, item := range cacheData.AllItems.Movies {
currentItemIDs[item.ID] = true
}
for _, item := range cacheData.AllItems.Series {
currentItemIDs[item.ID] = true
}
for _, item := range cacheData.AllItems.Playlists {
currentItemIDs[item.ID] = true
}
for _, episodes := range cacheData.AllItems.Episodes {
for _, episode := range episodes {
currentItemIDs[episode.ID] = true
}
}
removedCount := 0
err = filepath.Walk(ImageCacheDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if strings.HasSuffix(info.Name(), ".jpg") {
// Extract hash from filename
parts := strings.Split(info.Name(), "_")
if len(parts) >= 2 {
hashPart := parts[0]
// Check if any current item ID matches this hash
found := false
for itemID := range currentItemIDs {
if fmt.Sprintf("%x", md5.Sum([]byte(itemID))) == hashPart {
found = true
break
}
}
if !found {
if err := os.Remove(path); err == nil {
removedCount++
fmt.Printf("Removed orphaned image: %s\n", info.Name())
}
}
}
}
return nil
})
if err != nil {
fmt.Printf("❌ Cleanup failed: %v\n", err)
return
}
fmt.Printf("βœ… Removed %d orphaned images\n", removedCount)
default:
fmt.Printf("Usage: %s {start|stop|status|restart|test|refresh|images|cleanup}\n", os.Args[0])
os.Exit(1)
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment