diff --git a/api.go b/api.go index f4a1a68..b127335 100644 --- a/api.go +++ b/api.go @@ -1,6 +1,7 @@ package main import ( + "encoding/base64" "encoding/json" "fmt" "io" @@ -48,20 +49,26 @@ type APIGroupInfo struct { GroupID int64 `json:"groupId"` } -type cachedImage struct { - data []byte - contentType string - expiresAt time.Time +type APIImageBatchRequest struct { + Images []APIImageRequest `json:"images"` } -var imageCache = struct { - sync.RWMutex - images map[string]*cachedImage -}{ - images: make(map[string]*cachedImage), +type APIImageRequest struct { + TypeID int64 `json:"typeId"` + Size int `json:"size"` } -const imageCacheTTL = 24 * time.Hour +type APIImageBatchResponse struct { + Images map[string]APIImageData `json:"images"` +} + +type APIImageData struct { + Data string `json:"data"` // base64 encoded + ContentType string `json:"contentType"` + NotFound bool `json:"notFound,omitempty"` +} + +const cacheTTL = 24 * time.Hour func convertFitStatistics(stats *FitStatistics) *APIFitStatistics { api := &APIFitStatistics{ @@ -144,10 +151,11 @@ func handleStatistics(w http.ResponseWriter, r *http.Request) { } flog.Trace("Database connection obtained") - cacheKey, _ := json.Marshal(req) - flog.Debug("Checking cache for key: %s", string(cacheKey)) + cacheKeyBytes, _ := json.Marshal(req) + cacheKey := string(cacheKeyBytes) + flog.Debug("Checking cache for key: %s", cacheKey) - cachedData, found := db.GetCachedStatistics(string(cacheKey)) + cachedData, found := db.GetCachedStatistics(cacheKey) if found { flog.Info("Serving from cache") w.Header().Set("Content-Type", "application/json") @@ -178,7 +186,7 @@ func handleStatistics(w http.ResponseWriter, r *http.Request) { } flog.Debug("Storing in cache") - if err := db.CacheStatistics(string(cacheKey), responseData); err != nil { + if err := db.CacheStatistics(cacheKey, responseData); err != nil { flog.Error("Failed to cache statistics: %v", err) } @@ -477,23 +485,32 @@ func handleImage(w http.ResponseWriter, r *http.Request) { return } - cacheKey := fmt.Sprintf("%d_%d", typeID, size) + cacheKey := fmt.Sprintf("image:%d_%d", typeID, size) flog.Debug("Image request: typeID=%d, size=%d, cacheKey=%s", typeID, size, cacheKey) - imageCache.RLock() - cached, exists := imageCache.images[cacheKey] - imageCache.RUnlock() - - if exists && time.Now().Before(cached.expiresAt) { + // Check database cache + cachedData, found := db.GetCacheEntry(cacheKey, cacheTTL) + if found { flog.Trace("Serving from cache") - if cached.data == nil { + // If found but data is nil, it means 404 was cached + if cachedData == nil { flog.Trace("Cached 404, returning 404") http.Error(w, "Image not found", http.StatusNotFound) return } - w.Header().Set("Content-Type", cached.contentType) + // Determine content type from data + contentType := "image/png" + if len(cachedData) > 0 { + // Try to detect from magic bytes + if len(cachedData) >= 2 && cachedData[0] == 0xFF && cachedData[1] == 0xD8 { + contentType = "image/jpeg" + } else if len(cachedData) >= 8 && string(cachedData[0:8]) == "\x89PNG\r\n\x1a\n" { + contentType = "image/png" + } + } + w.Header().Set("Content-Type", contentType) w.Header().Set("Cache-Control", "public, max-age=86400") - w.Write(cached.data) + w.Write(cachedData) return } @@ -511,14 +528,10 @@ func handleImage(w http.ResponseWriter, r *http.Request) { flog.Debug("ESI response status: %d for typeID=%d, size=%d", resp.StatusCode, typeID, size) if resp.StatusCode == http.StatusNotFound { flog.Debug("Image not found on ESI: typeID=%d, size=%d", typeID, size) - cached = &cachedImage{ - data: nil, - contentType: "", - expiresAt: time.Now().Add(imageCacheTTL), + // Cache 404 as nil blob in database + if err := db.CacheEntry(cacheKey, nil); err != nil { + flog.Error("Failed to cache 404: %v", err) } - imageCache.Lock() - imageCache.images[cacheKey] = cached - imageCache.Unlock() http.Error(w, "Image not found", http.StatusNotFound) return } @@ -541,16 +554,11 @@ func handleImage(w http.ResponseWriter, r *http.Request) { contentType = "image/png" } - cached = &cachedImage{ - data: data, - contentType: contentType, - expiresAt: time.Now().Add(imageCacheTTL), + // Store as blob in database cache + if err := db.CacheEntry(cacheKey, data); err != nil { + flog.Error("Failed to cache image: %v", err) } - imageCache.Lock() - imageCache.images[cacheKey] = cached - imageCache.Unlock() - flog.Info("Cached image: typeID=%d, size=%d, size=%d bytes", typeID, size, len(data)) w.Header().Set("Content-Type", contentType) @@ -558,6 +566,138 @@ func handleImage(w http.ResponseWriter, r *http.Request) { w.Write(data) } +func handleImageBatch(w http.ResponseWriter, r *http.Request) { + flog := logger.Default.WithPrefix("handleImageBatch") + flog.Trace("Request received: %s %s", r.Method, r.URL.Path) + + if r.Method != http.MethodPost { + flog.Debug("Method not allowed: %s", r.Method) + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var req APIImageBatchRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + flog.Error("Failed to decode request body: %v", err) + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + if len(req.Images) == 0 { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(APIImageBatchResponse{Images: make(map[string]APIImageData)}) + return + } + + type imageResult struct { + key string + data APIImageData + } + + results := make(map[string]APIImageData) + resultChan := make(chan imageResult, len(req.Images)) + var wg sync.WaitGroup + + for _, imgReq := range req.Images { + wg.Add(1) + go func(img APIImageRequest) { + defer wg.Done() + cacheKey := fmt.Sprintf("image:%d_%d", img.TypeID, img.Size) + + // Check database cache + cachedData, found := db.GetCacheEntry(cacheKey, cacheTTL) + if found { + if cachedData == nil { + resultChan <- imageResult{ + key: fmt.Sprintf("%d_%d", img.TypeID, img.Size), // Return without prefix for response + data: APIImageData{NotFound: true}, + } + return + } + // Determine content type from data + contentType := "image/png" + if len(cachedData) > 0 { + if len(cachedData) >= 2 && cachedData[0] == 0xFF && cachedData[1] == 0xD8 { + contentType = "image/jpeg" + } else if len(cachedData) >= 8 && string(cachedData[0:8]) == "\x89PNG\r\n\x1a\n" { + contentType = "image/png" + } + } + resultChan <- imageResult{ + key: fmt.Sprintf("%d_%d", img.TypeID, img.Size), // Return without prefix for response + data: APIImageData{ + Data: base64.StdEncoding.EncodeToString(cachedData), + ContentType: contentType, + }, + } + return + } + + esiURL := fmt.Sprintf("https://images.evetech.net/types/%d/icon?size=%d", img.TypeID, img.Size) + resp, err := http.Get(esiURL) + if err != nil { + flog.Error("Failed to fetch from ESI: %v", err) + return + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusNotFound { + // Cache 404 as nil blob in database + if err := db.CacheEntry(cacheKey, nil); err != nil { + flog.Error("Failed to cache 404: %v", err) + } + resultChan <- imageResult{ + key: fmt.Sprintf("%d_%d", img.TypeID, img.Size), // Return without prefix for response + data: APIImageData{NotFound: true}, + } + return + } + + if resp.StatusCode != http.StatusOK { + flog.Error("ESI returned status %d for typeID=%d, size=%d", resp.StatusCode, img.TypeID, img.Size) + return + } + + data, err := io.ReadAll(resp.Body) + if err != nil { + flog.Error("Failed to read image data: %v", err) + return + } + + contentType := resp.Header.Get("Content-Type") + if contentType == "" { + contentType = "image/png" + } + + // Store as blob in database cache + if err := db.CacheEntry(cacheKey, data); err != nil { + flog.Error("Failed to cache image: %v", err) + } + + encodedData := base64.StdEncoding.EncodeToString(data) + resultChan <- imageResult{ + key: fmt.Sprintf("%d_%d", img.TypeID, img.Size), // Return without prefix for response + data: APIImageData{ + Data: encodedData, + ContentType: contentType, + }, + } + }(imgReq) + } + + go func() { + wg.Wait() + close(resultChan) + }() + + for result := range resultChan { + results[result.key] = result.data + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(APIImageBatchResponse{Images: results}) +} + func StartAPIServer(port string) { flog := logger.Default.WithPrefix("StartAPIServer") flog.Info("Initializing API server") @@ -567,6 +707,7 @@ func StartAPIServer(port string) { mux.HandleFunc("/api/search", corsMiddleware(handleSearch)) mux.HandleFunc("/api/items/names", corsMiddleware(handleItemNames)) mux.HandleFunc("/api/items/", corsMiddleware(handleItemGroup)) + mux.HandleFunc("/api/images/batch", corsMiddleware(handleImageBatch)) mux.HandleFunc("/api/images/", corsMiddleware(handleImage)) flog.Debug("Registered routes:") @@ -574,6 +715,7 @@ func StartAPIServer(port string) { flog.Debug(" GET /api/search") flog.Debug(" GET /api/items/names") flog.Debug(" GET /api/items/{id}/group") + flog.Debug(" POST /api/images/batch") flog.Debug(" GET /api/images/{typeId}/{size}") flog.Info("Starting API server on port %s", port) diff --git a/db.go b/db.go index ae05ce7..96fdc4f 100644 --- a/db.go +++ b/db.go @@ -24,17 +24,22 @@ type QueryParams struct { Groups []int64 } -type StatisticsCache struct { - ID int64 `gorm:"primaryKey;autoIncrement"` - CacheKey string `gorm:"column:cache_key;not null;index:idx_cache_key_created"` - ResponseData []byte `gorm:"column:response_data;type:BLOB;not null"` - CreatedAt time.Time `gorm:"column:created_at;not null;index:idx_cache_key_created"` +// CacheEntry stores both statistics (JSON) and images (blobs) in unified cache +// For 404s, we store a special marker: []byte{0xFF, 0xFE, 0xFD} (NOT_FOUND_MARKER) +type CacheEntry struct { + ID int64 `gorm:"primaryKey;autoIncrement"` + CacheKey string `gorm:"column:cache_key;not null"` + Data []byte `gorm:"column:data;type:BLOB;not null"` + CreatedAt time.Time `gorm:"column:created_at;not null"` } -func (StatisticsCache) TableName() string { - return "statistics_cache" +var notFoundMarker = []byte{0xFF, 0xFE, 0xFD} // Special marker for cached 404s + +func (CacheEntry) TableName() string { + return "cache_entries" } + type FitStatistics struct { TotalKillmails int64 ShipBreakdown map[int64]SystemStats // shipTypeID -> {Count, Percentage} @@ -62,6 +67,8 @@ type DB interface { QueryFits(params QueryParams) (*FitStatistics, error) GetCachedStatistics(cacheKey string) ([]byte, bool) CacheStatistics(cacheKey string, data []byte) error + GetCacheEntry(cacheKey string, maxAge time.Duration) ([]byte, bool) + CacheEntry(cacheKey string, data []byte) error SearchShips(query string, limit int) ([]models.InvType, error) SearchSystems(query string, limit int) ([]models.MapSolarSystem, error) SearchModules(query string, limit int) ([]models.InvType, error) @@ -120,10 +127,23 @@ func GetDB() (DB, error) { func (db *DBWrapper) InitTables() error { ctx := context.Background() - if err := db.gormDB.AutoMigrate(&StatisticsCache{}); err != nil { - return fmt.Errorf("failed to migrate cache table: %w", err) + // Migrate unified cache table + // Use raw SQL to create table and index with IF NOT EXISTS to avoid errors + // For 404s, we store a special marker byte sequence instead of NULL + if err := db.gormDB.Exec(` + CREATE TABLE IF NOT EXISTS cache_entries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + cache_key TEXT NOT NULL, + data BLOB NOT NULL, + created_at DATETIME NOT NULL + ) + `).Error; err != nil { + return fmt.Errorf("failed to create cache_entries table: %w", err) } + // Create index if it doesn't exist + db.gormDB.Exec(`CREATE INDEX IF NOT EXISTS idx_cache_key_created ON cache_entries(cache_key, created_at)`) + // Create flat_killmails table createFlatKillmails := ` CREATE TABLE IF NOT EXISTS flat_killmails ( @@ -560,24 +580,63 @@ func (db *DBWrapper) QueryFits(params QueryParams) (*FitStatistics, error) { } func (db *DBWrapper) GetCachedStatistics(cacheKey string) ([]byte, bool) { - var cached StatisticsCache + // Use unified cache with "stats:" prefix + return db.GetCacheEntry("stats:"+cacheKey, 3*24*time.Hour) +} + +func (db *DBWrapper) CacheStatistics(cacheKey string, data []byte) error { + // Store in unified cache with "stats:" prefix + return db.CacheEntry("stats:"+cacheKey, data) +} + +func (db *DBWrapper) GetCacheEntry(cacheKey string, maxAge time.Duration) ([]byte, bool) { + var cached CacheEntry err := db.gormDB. - Where("cache_key = ? AND created_at > ?", cacheKey, time.Now().Add(-3*24*time.Hour)). + Where("cache_key = ? AND created_at > ?", cacheKey, time.Now().Add(-maxAge)). Order("created_at DESC"). Limit(1). First(&cached).Error - if err != nil || len(cached.ResponseData) == 0 { + if err != nil { return nil, false } - return cached.ResponseData, true + + // Check if this is a 404 marker + if len(cached.Data) == len(notFoundMarker) { + isNotFound := true + for i, b := range notFoundMarker { + if cached.Data[i] != b { + isNotFound = false + break + } + } + if isNotFound { + return nil, true // Cached 404 + } + } + + // If Data is empty, treat as not found + if len(cached.Data) == 0 { + return nil, false + } + + return cached.Data, true } -func (db *DBWrapper) CacheStatistics(cacheKey string, data []byte) error { - return db.gormDB.Create(&StatisticsCache{ - CacheKey: cacheKey, - ResponseData: data, - CreatedAt: time.Now(), +func (db *DBWrapper) CacheEntry(cacheKey string, data []byte) error { + // Delete old entries with same key to avoid duplicates + db.gormDB.Where("cache_key = ?", cacheKey).Delete(&CacheEntry{}) + + // If data is nil (404), store the special marker + cacheData := data + if data == nil { + cacheData = notFoundMarker + } + + return db.gormDB.Create(&CacheEntry{ + CacheKey: cacheKey, + Data: cacheData, + CreatedAt: time.Now(), }).Error } diff --git a/frontend b/frontend index 9ecbcab..c448bc6 160000 --- a/frontend +++ b/frontend @@ -1 +1 @@ -Subproject commit 9ecbcab96c6ec5894df6976ddb90ebb0b215bbb8 +Subproject commit c448bc6b9d210733d8142a4693bd8a9711bb5e10