Files
event-driven-shoppinglist/merging.go
PhatPhuckDave 5f21d144c0 Deretardify the fucking events
Good job claude complicate everything why don't you
2025-09-29 09:36:42 +02:00

147 lines
4.1 KiB
Go

package main
import (
"fmt"
"time"
"github.com/pocketbase/pocketbase"
"github.com/pocketbase/pocketbase/core"
)
// MergeEventLog merges old events by resolving them into current state
// and creating new create events with resolved data
func (es *SimpleEventStore) MergeEventLog(cutoffDays int) error {
cutoffTime := time.Now().AddDate(0, 0, -cutoffDays)
// Get all events older than cutoff
oldEvents, err := es.app.FindRecordsByFilter("events", "timestamp < {:cutoff}", "seq", 10000, 0, map[string]any{"cutoff": cutoffTime})
if err != nil {
return fmt.Errorf("failed to get old events: %w", err)
}
if len(oldEvents) == 0 {
return nil // Nothing to merge
}
// Get all collections that have events
collections := make(map[string]bool)
for _, event := range oldEvents {
collections[event.GetString("collection")] = true
}
// Get latest event to preserve sequence continuity
latestEvent, err := es.GetLatestEvent()
if err != nil {
return fmt.Errorf("failed to get latest event: %w", err)
}
nextSeq := 1
prevHash := ""
if latestEvent != nil {
nextSeq = latestEvent.Seq + 1
prevHash = latestEvent.Hash
}
// For each collection, get current state and create consolidated create events
for collectionName := range collections {
items, err := es.GetAllItems(collectionName)
if err != nil {
return fmt.Errorf("failed to get items for collection %s: %w", collectionName, err)
}
// Create new create events for each existing item
for _, item := range items {
itemID, ok := item["id"].(string)
if !ok {
continue
}
// Create consolidated create event using JSON Patch "add" operations
patches := []PatchOperation{}
for key, value := range item {
if key != "id" && key != "created_at" && key != "updated_at" { // Skip system fields
patches = append(patches, PatchOperation{
Op: "add",
Path: "/" + key,
Value: value,
})
}
}
// TODO: Rethink merging for single operations
consolidatedEvent := &Event{
Seq: nextSeq,
ItemID: itemID,
Collection: collectionName,
Operation: "add", // Placeholder - merging needs redesign
Path: "/",
Value: "consolidated",
Timestamp: time.Now(),
}
// Generate new event ID and hash
consolidatedEvent.EventID = generateEventID()
consolidatedEvent.Hash = consolidatedEvent.calculateHash(prevHash)
// Save the consolidated event
if err := es.saveEvent(consolidatedEvent); err != nil {
return fmt.Errorf("failed to save consolidated event: %w", err)
}
nextSeq++
prevHash = consolidatedEvent.Hash
}
}
// Archive old events (save to backup file)
if err := es.archiveEvents(oldEvents); err != nil {
return fmt.Errorf("failed to archive old events: %w", err)
}
// Delete old events
for _, event := range oldEvents {
if err := es.app.Delete(event); err != nil {
return fmt.Errorf("failed to delete old event: %w", err)
}
}
return nil
}
// archiveEvents saves old events to a backup file
func (es *SimpleEventStore) archiveEvents(events []*core.Record) error {
// For now, just log that we would archive
// In a real implementation, you'd save to a file with timestamp
fmt.Printf("Would archive %d events to backup file with timestamp %s\n",
len(events), time.Now().Format("2006-01-02_15-04-05"))
return nil
}
// ScheduleEventMerging sets up periodic event log merging
func (es *SimpleEventStore) ScheduleEventMerging(app *pocketbase.PocketBase) {
// This would typically use a job scheduler
// For this basic implementation, we'll just provide the method
// In production, you'd use something like cron or a job queue
// Example of how you might call it:
// go func() {
// ticker := time.NewTicker(24 * time.Hour)
// defer ticker.Stop()
// for {
// select {
// case <-ticker.C:
// if err := es.MergeEventLog(2); err != nil {
// // Log error
// }
// }
// }
// }()
}
// generateEventID generates a new UUID for events
func generateEventID() string {
// Import uuid package at the top of file
// For now, return a placeholder
return "generated-uuid"
}