Implement writing urls to nsq for downloading

This commit is contained in:
2025-04-13 17:28:58 +02:00
parent 014e99751e
commit d420f78ab8
3 changed files with 76 additions and 20 deletions

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"io"
"log"
"net/http"
"os"
"regexp"
"sync"
@@ -21,10 +22,10 @@ type RssWatcher struct {
var videoRegex = regexp.MustCompile(`yt:video:(?<videoid>[^ ]+) (?:[^ ]+ ){2}(?<videotitle>.+?)https(?:[^ ]+ ){2}(?<date>[^ ]+)`)
var feeds []*RssFeed = []*RssFeed{
{Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCMwJJL5FJFuTRT55ksbQ4GQ", Id: "@AsmongoldClips"},
// {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UC8nZUXCwCTffxthKLtOp6ng", Id: "@Splattercatgaming"},
// {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UC2THf0jmDDeBujMzG1sD2-Q", Id: "@thesingleplayersquad"},
// {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCmtyQOKKmrMVaKuRXz02jbQ", Id: "@SebastianLague"},
// {Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCywBfpGBYhsczNuyyh6Cf6w", Id: "@WorthABuyreviews"},
{Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UC8nZUXCwCTffxthKLtOp6ng", Id: "@Splattercatgaming"},
{Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UC2THf0jmDDeBujMzG1sD2-Q", Id: "@thesingleplayersquad"},
{Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCmtyQOKKmrMVaKuRXz02jbQ", Id: "@SebastianLague"},
{Url: "https://www.youtube.com/feeds/videos.xml?channel_id=UCywBfpGBYhsczNuyyh6Cf6w", Id: "@WorthABuyreviews"},
}
func (w *RssWatcher) Watch(videoUrls chan string) error {
@@ -48,24 +49,24 @@ func (w *RssWatcher) Watch(videoUrls chan string) error {
}
func (w *RssWatcher) CheckFeed(videoUrls chan string) error {
// log.Printf("Checking feed URL: %s", w.Feed.Url)
// resp, err := http.Get(w.Feed.Url)
// if err != nil {
// return fmt.Errorf("[%s]: failed to create request: %w", w.Feed.Id, err)
// }
// defer resp.Body.Close()
log.Printf("Checking feed URL: %s", w.Feed.Url)
resp, err := http.Get(w.Feed.Url)
if err != nil {
return fmt.Errorf("[%s]: failed to create request: %w", w.Feed.Id, err)
}
defer resp.Body.Close()
// log.Printf("Received response with status code: %d", resp.StatusCode)
// body, err := io.ReadAll(resp.Body)
// if err != nil {
// return fmt.Errorf("[%s]: failed to read response body: %w", w.Feed.Id, err)
// }
log.Printf("Received response with status code: %d", resp.StatusCode)
body, err := io.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("[%s]: failed to read response body: %w", w.Feed.Id, err)
}
// os.WriteFile("cache.xml", body, 0644)
body, err := os.ReadFile("cache.xml")
if err != nil {
return fmt.Errorf("[%s]: failed to read cache file: %w", w.Feed.Id, err)
}
// body, err := os.ReadFile("cache.xml")
// if err != nil {
// return fmt.Errorf("[%s]: failed to read cache file: %w", w.Feed.Id, err)
// }
var feed Feed
err = xml.Unmarshal(body, &feed)
@@ -143,6 +144,11 @@ func main() {
go func() {
for videoUrl := range videoUrls {
log.Printf("Got new video with url %q", videoUrl)
err := Download(videoUrl)
if err != nil {
Error.Printf("failed to download video: %v", err)
panic(err)
}
}
}()