package main import ( "fmt" "io" "log" "os" "regexp" "sync" "time" ) type DownloadRequest struct { Link string `json:"link"` } type RssWatcher struct { Feed *RssFeed } var videoRegex = regexp.MustCompile(`yt:video:(?[^ ]+) (?:[^ ]+ ){2}(?.+?)https(?:[^ ]+ ){2}(?[^ ]+)`) var feeds []*RssFeed = []*RssFeed{ {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCMwJJL5FJFuTRT55ksbQ4GQ", Id: "@AsmongoldClips"}, // {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UC8nZUXCwCTffxthKLtOp6ng", Id: "@Splattercatgaming"}, // {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UC2THf0jmDDeBujMzG1sD2-Q", Id: "@thesingleplayersquad"}, // {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCmtyQOKKmrMVaKuRXz02jbQ", Id: "@SebastianLague"}, // {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCywBfpGBYhsczNuyyh6Cf6w", Id: "@WorthABuyreviews"}, } func (w *RssWatcher) Watch(videoUrls chan string) error { ticker := time.NewTicker(1 * time.Minute) defer ticker.Stop() log.Printf("Watcher for feed %s started, checking every minute.", w.Feed.Id) w.CheckFeed(videoUrls) for { select { case <-ticker.C: log.Printf("Checking feed: %s", w.Feed.Url) err := w.CheckFeed(videoUrls) if err != nil { log.Printf("Error checking feed %s: %v", w.Feed.Id, err) return fmt.Errorf("watcher %s failed to check feed: %w", w.Feed.Id, err) } log.Printf("Successfully checked feed: %s", w.Feed.Url) } } } func (w *RssWatcher) CheckFeed(videoUrls chan string) error { // log.Printf("Checking feed URL: %s", w.Feed.Url) // resp, err := http.Get(w.Feed.Url) // if err != nil { // log.Printf("Error creating request for feed %s: %v", w.Feed.Id, err) // return fmt.Errorf("failed to create request: %w", err) // } // defer resp.Body.Close() // log.Printf("Received response with status code: %d", resp.StatusCode) // body, err := io.ReadAll(resp.Body) // if err != nil { // log.Printf("Error reading response body for feed %s: %v", w.Feed.Id, err) // return fmt.Errorf("failed to read response body: %w", err) // } // os.WriteFile("cache.xml", body, 0644) body, err := os.ReadFile("cache.xml") if err != nil { return fmt.Errorf("failed to read cache file: %w", err) } matches := videoRegex.FindAllStringSubmatch(string(body), -1) for _, match := range matches { log.Println(match[1]) } return nil } var Error *log.Logger var Warning *log.Logger func init() { log.SetFlags(log.Lmicroseconds | log.Lshortfile) logFile, err := os.Create("ywatcher.log") if err != nil { log.Printf("Error creating log file: %v", err) os.Exit(1) } logger := io.MultiWriter(os.Stdout, logFile) log.SetOutput(logger) Error = log.New(io.MultiWriter(logFile, os.Stderr, os.Stdout), fmt.Sprintf("%sERROR:%s ", "\033[0;101m", "\033[0m"), log.Lmicroseconds|log.Lshortfile) Warning = log.New(io.MultiWriter(logFile, os.Stdout), fmt.Sprintf("%sWarning:%s ", "\033[0;93m", "\033[0m"), log.Lmicroseconds|log.Lshortfile) } func main() { videoUrls := make(chan string, 12) wg := &sync.WaitGroup{} for _, feed := range feeds { wg.Add(1) go func(feed *RssFeed) { err := feed.UpdateLastSeen() if err != nil { Error.Printf("failed to update lastseen for feed %s: %v", feed.Id, err) panic(err) } defer wg.Done() watcher := RssWatcher{ Feed: feed, } err = watcher.Watch(videoUrls) if err != nil { Error.Printf("watcher %s failed to watch feed: %v", feed.Id, err) panic(err) } }(feed) } go func() { for videoUrl := range videoUrls { log.Println(videoUrl) } }() wg.Wait() }