Implement auto downloading mismatched files
This commit is contained in:
1196
Content/hashes.txt
1196
Content/hashes.txt
File diff suppressed because it is too large
Load Diff
119
updater/main.go
119
updater/main.go
@@ -2,11 +2,13 @@ package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
@@ -28,10 +30,10 @@ func init() {
|
||||
log.Lmicroseconds|log.Lshortfile)
|
||||
}
|
||||
|
||||
const remoteUrl = "https://git.site.quack-lab.dev/dave/barotrauma-gamefiles/src/branch/master/Content"
|
||||
const remoteUrl = "https://git.site.quack-lab.dev/dave/barotrauma-gamefiles/raw/branch/master/Content"
|
||||
|
||||
func main() {
|
||||
makehash := flag.Bool("hash", false, "make hash")
|
||||
savehash := flag.Bool("savehash", false, "save hash")
|
||||
hashfile := flag.String("hashfile", "hashes.txt", "hashfile")
|
||||
flag.Parse()
|
||||
|
||||
@@ -51,6 +53,13 @@ func main() {
|
||||
}
|
||||
log.Printf("loaded hashes")
|
||||
|
||||
remoteHashes, err := LoadRemoteHashes(remoteUrl + "/hashes.txt")
|
||||
if err != nil {
|
||||
Error.Printf("error loading remote hashes: %v", err)
|
||||
return
|
||||
}
|
||||
log.Printf("loaded remote hashes")
|
||||
|
||||
files, err := doublestar.Glob(os.DirFS(root), "**/*.xml")
|
||||
if err != nil {
|
||||
Error.Printf("error globbing files: %v", err)
|
||||
@@ -63,21 +72,52 @@ func main() {
|
||||
go func(file string) {
|
||||
defer wg.Done()
|
||||
path := filepath.Join(root, file)
|
||||
log.Printf("file: %s", path)
|
||||
if *makehash {
|
||||
hash, err := GetLocalHash(path)
|
||||
if err != nil {
|
||||
Error.Printf("error getting hash: %v", err)
|
||||
return
|
||||
}
|
||||
log.Printf("hash: %s", hash)
|
||||
hashes.Store(file, hash)
|
||||
}
|
||||
}(file)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
if *makehash {
|
||||
mismatched := 0
|
||||
checked := 0
|
||||
toDownload := []string{}
|
||||
remoteHashes.Range(func(key, value interface{}) bool {
|
||||
localhash, ok := hashes.Load(key)
|
||||
if !ok {
|
||||
Error.Printf("local hash not found: %s", key)
|
||||
return true
|
||||
}
|
||||
if localhash != value {
|
||||
Warning.Printf("hash mismatch: %s", key)
|
||||
mismatched++
|
||||
toDownload = append(toDownload, key.(string))
|
||||
}
|
||||
checked++
|
||||
return true
|
||||
})
|
||||
log.Printf("Hashes checked: %d, mismatched: %d", checked, mismatched)
|
||||
|
||||
if mismatched > 0 {
|
||||
log.Printf("Downloading %d files", len(toDownload))
|
||||
wg := sync.WaitGroup{}
|
||||
for _, file := range toDownload {
|
||||
wg.Add(1)
|
||||
go func(file string) {
|
||||
defer wg.Done()
|
||||
log.Printf("Downloading %s", file)
|
||||
err := UpdateLocalFile(file, remoteUrl)
|
||||
if err != nil {
|
||||
Error.Printf("error updating local file: %v", err)
|
||||
}
|
||||
}(file)
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
if *savehash {
|
||||
err := SaveLocalHashes(*hashfile, hashes)
|
||||
if err != nil {
|
||||
Error.Printf("error saving hashes: %v", err)
|
||||
@@ -107,6 +147,7 @@ func LoadLocalHashes(path string) (*sync.Map, error) {
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
count := 0
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
// Hopefully none of the files have spaces in the name.................
|
||||
@@ -116,7 +157,9 @@ func LoadLocalHashes(path string) (*sync.Map, error) {
|
||||
return nil, fmt.Errorf("invalid line: %s", line)
|
||||
}
|
||||
hashes.Store(parts[0], parts[1])
|
||||
count++
|
||||
}
|
||||
log.Printf("loaded %d local hashes", count)
|
||||
|
||||
return hashes, nil
|
||||
}
|
||||
@@ -136,17 +179,51 @@ func SaveLocalHashes(path string, hashes *sync.Map) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// func GetRemoteHash(path string) (string, error) {
|
||||
// resp, err := http.Get(remoteUrl + path)
|
||||
// if err != nil {
|
||||
// return "", err
|
||||
// }
|
||||
// defer resp.Body.Close()
|
||||
//
|
||||
// body, err := io.ReadAll(resp.Body)
|
||||
// if err != nil {
|
||||
// return "", err
|
||||
// }
|
||||
//
|
||||
// hash := sha256.Sum256(body)
|
||||
// }
|
||||
func LoadRemoteHashes(url string) (*sync.Map, error) {
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
count := 0
|
||||
hashes := &sync.Map{}
|
||||
scanner := bufio.NewScanner(bytes.NewReader(body))
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
parts := strings.Split(line, " ")
|
||||
if len(parts) != 2 {
|
||||
return nil, fmt.Errorf("invalid line: %s", line)
|
||||
}
|
||||
count++
|
||||
hashes.Store(parts[0], parts[1])
|
||||
}
|
||||
log.Printf("loaded %d remote hashes", count)
|
||||
|
||||
return hashes, nil
|
||||
}
|
||||
|
||||
func UpdateLocalFile(path string, remoteUrl string) error {
|
||||
resp, err := http.Get(remoteUrl)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = os.WriteFile(path, body, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
Reference in New Issue
Block a user