Compare commits
19 Commits
7c2debf051
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| e841dd5ecb | |||
| 11beac50eb | |||
| 5e6a5e830e | |||
| 9035a8284c | |||
| 4fbc49c52f | |||
| cdefc1735f | |||
| 54cd559abe | |||
| 69459d8822 | |||
| 259801c699 | |||
| 72d8c4052c | |||
| b44e24e732 | |||
| 70c417e7f4 | |||
| 4301294b66 | |||
| 5a1bff50d3 | |||
| c1cc508dcf | |||
| d3efd378f2 | |||
| 3f659d351d | |||
| f55a6de8c0 | |||
| 028aa4e80b |
2760
Heimdall.lua
Normal file
2760
Heimdall.lua
Normal file
File diff suppressed because it is too large
Load Diff
12
README.md
Normal file
12
README.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
Currently the only services used are the main one (see succ.sh)<br>
|
||||||
|
And the data
|
||||||
|
|
||||||
|
The NSQ service was meant to be used for pulling achievements from other players<br>
|
||||||
|
But currently there are no other players so it simply is not necessary
|
||||||
|
|
||||||
|
The cacher service is also obsolete because we switched our approach to `service/data/cache.sql`<br>
|
||||||
|
A single file, much more better<br>
|
||||||
|
Currently it runs for about 20 seconds for half a million rows in achievements<br>
|
||||||
|
Could be better, could be worse
|
||||||
|
|
||||||
|
The querying is then done via `service/data/groupPlayers.sql` with the query inserted into line 48
|
||||||
101
dbwriter.go
101
dbwriter.go
@@ -1,9 +1,27 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
batchSize = 1000
|
||||||
|
timeout = 5 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
batchBuffer = make([]NSQMessage, 0, batchSize)
|
||||||
|
batchMutex sync.Mutex
|
||||||
|
)
|
||||||
|
|
||||||
var whitelistedAchievements = map[string]bool{
|
var whitelistedAchievements = map[string]bool{
|
||||||
"15": true,
|
"15": true,
|
||||||
"958": true,
|
"958": true,
|
||||||
@@ -65,19 +83,90 @@ var whitelistedAchievements = map[string]bool{
|
|||||||
"12448": true,
|
"12448": true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func Save(message *NSQMessage, db *DB) error {
|
func Save(message NSQMessage) error {
|
||||||
_, ok := whitelistedAchievements[message.ID]
|
_, ok := whitelistedAchievements[message.ID]
|
||||||
if !ok {
|
if !ok {
|
||||||
logger.Debug("Received message for non-whitelisted achievement %s", message.ID)
|
logger.Debug("Received message for non-whitelisted achievement %s", message.ID)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := db.writeConn.Exec("INSERT OR IGNORE INTO achievements (name, id, date, completed) VALUES (?, ?, ?, ?)",
|
batchMutex.Lock()
|
||||||
message.Name, message.ID, message.Date, message.Completed)
|
batchBuffer = append(batchBuffer, message)
|
||||||
if err != nil {
|
currentBatchSize := len(batchBuffer)
|
||||||
logger.Error("Error inserting into database: %v", err)
|
batchMutex.Unlock()
|
||||||
return err
|
|
||||||
|
logger.Debug("Added achievement to batch. Current batch size: %d/%d", currentBatchSize, batchSize)
|
||||||
|
|
||||||
|
if currentBatchSize >= batchSize {
|
||||||
|
batchMutex.Lock()
|
||||||
|
batch := make([]NSQMessage, len(batchBuffer))
|
||||||
|
copy(batch, batchBuffer)
|
||||||
|
batchBuffer = batchBuffer[:0]
|
||||||
|
batchMutex.Unlock()
|
||||||
|
|
||||||
|
logger.Info("Batch size reached %d, sending batch to %s", len(batch), backendEndpoint)
|
||||||
|
return sendBatch(batch)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Flush sends any remaining achievements in the buffer
|
||||||
|
func Flush() error {
|
||||||
|
batchMutex.Lock()
|
||||||
|
if len(batchBuffer) == 0 {
|
||||||
|
batchMutex.Unlock()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
batch := make([]NSQMessage, len(batchBuffer))
|
||||||
|
copy(batch, batchBuffer)
|
||||||
|
batchBuffer = batchBuffer[:0]
|
||||||
|
batchMutex.Unlock()
|
||||||
|
|
||||||
|
logger.Info("Flushing final batch of %d achievements to %s", len(batch), backendEndpoint)
|
||||||
|
return sendBatch(batch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func sendBatch(batch []NSQMessage) error {
|
||||||
|
logger.Debug("Preparing to send batch of %d achievements", len(batch))
|
||||||
|
|
||||||
|
data, err := json.Marshal(batch)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to marshal batch: %v", err)
|
||||||
|
return fmt.Errorf("error marshaling batch: %v", err)
|
||||||
|
}
|
||||||
|
logger.Debug("Successfully marshaled batch to JSON, size: %d bytes", len(data))
|
||||||
|
|
||||||
|
client := &http.Client{
|
||||||
|
Timeout: timeout,
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", backendEndpoint, bytes.NewBuffer(data))
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to create HTTP request: %v", err)
|
||||||
|
return fmt.Errorf("error creating request: %v", err)
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
logger.Debug("Created HTTP request to %s", backendEndpoint)
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to send HTTP request: %v", err)
|
||||||
|
return fmt.Errorf("error sending batch: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to read response body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
logger.Error("Received non-OK status code: %d, body: %s", resp.StatusCode, string(body))
|
||||||
|
return fmt.Errorf("unexpected status code: %d, body: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("Successfully sent batch of %d achievements", len(batch))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
336
main.go
336
main.go
@@ -20,14 +20,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const nsqEndpoint = "https://nsq.site.quack-lab.dev/pub?topic=wowspy"
|
const nsqEndpoint = "https://nsq.site.quack-lab.dev/pub?topic=wowspy"
|
||||||
|
const backendEndpoint = "https://sniffer-be.site.quack-lab.dev/achievements"
|
||||||
|
|
||||||
var debug *bool
|
var debug *bool
|
||||||
var nsqWorkers = 32
|
var nsqWorkers = 32
|
||||||
|
|
||||||
var allPlayersAchievementsGlobal = make(map[string][]NSQMessage) // PlayerName -> list of all their achievements
|
|
||||||
var allPlayerNamesGlobal = make(map[string]bool) // Set of all player names
|
|
||||||
var globalDataMutex = &sync.Mutex{}
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
root := flag.String("root", ".", "Root workdir")
|
root := flag.String("root", ".", "Root workdir")
|
||||||
debug = flag.Bool("d", false, "Debug")
|
debug = flag.Bool("d", false, "Debug")
|
||||||
@@ -37,19 +34,9 @@ func main() {
|
|||||||
logger.SetLevel(logger.LevelDebug) // Assuming LevelDebug is the correct constant for cylogger
|
logger.SetLevel(logger.LevelDebug) // Assuming LevelDebug is the correct constant for cylogger
|
||||||
}
|
}
|
||||||
|
|
||||||
db := DB{
|
|
||||||
path: "service/data/db.db",
|
|
||||||
}
|
|
||||||
err := db.Open()
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("error opening database: %v", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer db.Close()
|
|
||||||
|
|
||||||
logger.Info("Root: %q", *root)
|
logger.Info("Root: %q", *root)
|
||||||
cleanedRoot := strings.Replace(*root, "~", os.Getenv("HOME"), 1)
|
cleanedRoot := strings.Replace(*root, "~", os.Getenv("HOME"), 1)
|
||||||
cleanedRoot, err = filepath.Abs(cleanedRoot)
|
cleanedRoot, err := filepath.Abs(cleanedRoot)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error("error getting absolute path: %v", err)
|
logger.Error("error getting absolute path: %v", err)
|
||||||
return
|
return
|
||||||
@@ -68,136 +55,216 @@ func main() {
|
|||||||
logger.Info("No Heimdall.lua files found. Exiting.")
|
logger.Info("No Heimdall.lua files found. Exiting.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
for i, match := range matches {
|
||||||
// matches = matches[:1]
|
matches[i] = filepath.Join(cleanedRoot, match)
|
||||||
|
|
||||||
// --- Pass 1: Extract all data ---
|
|
||||||
logger.Info("Starting Pass 1: Extracting data from all Heimdall.lua files...")
|
|
||||||
var wgPass1 sync.WaitGroup
|
|
||||||
for _, match := range matches {
|
|
||||||
wgPass1.Add(1)
|
|
||||||
go loadAchievements(filepath.Join(cleanedRoot, match), &wgPass1)
|
|
||||||
}
|
|
||||||
wgPass1.Wait()
|
|
||||||
logger.Info("Finished Pass 1: Loaded %d unique players from %d files.", len(allPlayerNamesGlobal), len(matches))
|
|
||||||
if *debug {
|
|
||||||
globalDataMutex.Lock()
|
|
||||||
logger.Debug("Total achievements loaded globally: %d", countTotalAchievements(allPlayersAchievementsGlobal))
|
|
||||||
globalDataMutex.Unlock()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
wgSave := sync.WaitGroup{}
|
luaStates := loadLuaStates(matches)
|
||||||
wgSave.Add(1)
|
achievements := loadAchievements(luaStates)
|
||||||
|
wg := sync.WaitGroup{}
|
||||||
|
wg.Add(1)
|
||||||
|
// We can save the achievements to the database while doing something else unrelated
|
||||||
go func() {
|
go func() {
|
||||||
logger.Info("Saving achievements to database...")
|
defer wg.Done()
|
||||||
for playerName, achList := range allPlayersAchievementsGlobal {
|
saveAchievements(achievements)
|
||||||
logger.Debug("Saving %d achievements for player %s", len(achList), playerName)
|
|
||||||
for _, ach := range achList {
|
|
||||||
Save(&ach, &db)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
wgSave.Done()
|
|
||||||
}()
|
}()
|
||||||
|
saveAchievementsToSourceFiles(luaStates, achievements)
|
||||||
|
wg.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
// --- Process and Send to NSQ ---
|
func saveAchievementsToSourceFiles(luaStates *sync.Map, achievements *sync.Map) {
|
||||||
// logger.Info("Starting NSQ message publishing...")
|
wg := sync.WaitGroup{}
|
||||||
// nsqMessagesChan := make(chan NSQMessage, 10000) // Increased buffer size
|
luaStates.Range(func(k, v any) bool {
|
||||||
// var wgNsqWorkers sync.WaitGroup
|
path := k.(string)
|
||||||
// for i := 0; i < nsqWorkers; i++ {
|
state := v.(*lua.LState)
|
||||||
// wgNsqWorkers.Add(1)
|
log := logger.Default.WithPrefix(path)
|
||||||
// go NsqWorker(&wgNsqWorkers, nsqMessagesChan)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// go func() {
|
log.Info("Clearing existing achievements")
|
||||||
// globalDataMutex.Lock()
|
achievementTable := state.GetGlobal("Heimdall_Achievements")
|
||||||
// defer globalDataMutex.Unlock()
|
if achievementTable.Type() != lua.LTTable {
|
||||||
// for playerName, achList := range allPlayersAchievementsGlobal {
|
achievementTable = &lua.LTable{}
|
||||||
// for _, ach := range achList {
|
state.SetGlobal("Heimdall_Achievements", achievementTable)
|
||||||
// // ach.Name is already correctly set during extraction
|
|
||||||
// nsqMessagesChan <- ach
|
|
||||||
// logger.Debug("Queued NSQ message for Player: %s, AchID: %s", playerName, ach.ID)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// close(nsqMessagesChan) // Close channel when all messages are sent
|
|
||||||
// logger.Info("All NSQ messages queued.")
|
|
||||||
// }()
|
|
||||||
|
|
||||||
// --- Pass 2: Update Lua file states (in memory) ---
|
|
||||||
logger.Info("Starting Pass 2: Updating Lua states (setting alreadySeen and clearing players)...")
|
|
||||||
var wgPass2 sync.WaitGroup
|
|
||||||
if len(allPlayerNamesGlobal) > 0 { // Only run pass 2 if there are players to report
|
|
||||||
for _, match := range matches {
|
|
||||||
wgPass2.Add(1)
|
|
||||||
go updateLuaFileState(filepath.Join(cleanedRoot, match), &wgPass2, allPlayerNamesGlobal)
|
|
||||||
}
|
}
|
||||||
wgPass2.Wait()
|
log.Info("Clearing existing players table")
|
||||||
logger.Info("Finished Pass 2: Lua states updated where applicable.")
|
emptyTable := &lua.LTable{}
|
||||||
} else {
|
state.SetField(achievementTable, "players", emptyTable)
|
||||||
logger.Info("Skipping Pass 2 as no players were found globally.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// wgNsqWorkers.Wait() // Wait for all NSQ messages to be processed
|
log.Info("Updating seen table")
|
||||||
wgSave.Wait()
|
seenTable := state.GetField(achievementTable, "alreadySeen")
|
||||||
logger.Info("All NSQ workers finished. Program complete.")
|
if seenTable.Type() != lua.LTTable {
|
||||||
|
seenTable = &lua.LTable{}
|
||||||
|
state.SetField(achievementTable, "alreadySeen", seenTable)
|
||||||
|
}
|
||||||
|
|
||||||
|
fixSource(path, achievements)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
wg.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to count total achievements for debugging
|
func fixSource(path string, achievements *sync.Map) {
|
||||||
func countTotalAchievements(achMap map[string][]NSQMessage) int {
|
|
||||||
count := 0
|
|
||||||
for _, achList := range achMap {
|
|
||||||
count += len(achList)
|
|
||||||
}
|
|
||||||
return count
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadAchievements(path string, wg *sync.WaitGroup) {
|
|
||||||
log := logger.Default.WithPrefix(path)
|
log := logger.Default.WithPrefix(path)
|
||||||
log.Info("Extracting achievements")
|
log.Info("Reading source file")
|
||||||
defer wg.Done()
|
fileContent, err := os.ReadFile(path)
|
||||||
L := lua.NewState()
|
|
||||||
defer L.Close()
|
|
||||||
|
|
||||||
filestat, err := os.Stat(path)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("error getting file stats: %v", err)
|
logger.Error("Failed to read file: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Info("File size: %.2f MB", float64(filestat.Size())/1024/1024)
|
strContent := string(fileContent)
|
||||||
|
log.Info("Read %d bytes", len(strContent))
|
||||||
|
|
||||||
log.Info("Running Lua file")
|
strContent = removeAchievements(strContent)
|
||||||
if err := L.DoFile(path); err != nil {
|
log.Info("Removed achievements, now %d bytes", len(strContent))
|
||||||
log.Error("error executing Lua file %q: %v", path, err)
|
|
||||||
|
strContent = addAlreadySeen(strContent, achievements)
|
||||||
|
log.Info("Added alreadySeen, now %d bytes", len(strContent))
|
||||||
|
|
||||||
|
log.Info("Writing file")
|
||||||
|
err = os.WriteFile(path, []byte(strContent), 0644)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to write file: %v", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
log.Info("Done")
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeAchievements(sourceContent string) string {
|
||||||
|
lines := strings.Split(sourceContent, "\n")
|
||||||
|
writeIndex := 0
|
||||||
|
isInPlayers := false
|
||||||
|
for _, line := range lines {
|
||||||
|
if strings.HasPrefix(line, "\t[\"players\"] = {") {
|
||||||
|
isInPlayers = true
|
||||||
|
lines[writeIndex] = line
|
||||||
|
writeIndex++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isInPlayers && strings.HasPrefix(line, "\t}") {
|
||||||
|
isInPlayers = false
|
||||||
|
lines[writeIndex] = line
|
||||||
|
writeIndex++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !isInPlayers {
|
||||||
|
lines[writeIndex] = line
|
||||||
|
writeIndex++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(lines[:writeIndex], "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func addAlreadySeen(strContent string, achievements *sync.Map) string {
|
||||||
|
lines := strings.Split(strContent, "\n")
|
||||||
|
modifiedLines := make([]string, 0, len(lines))
|
||||||
|
for _, line := range lines {
|
||||||
|
if strings.HasPrefix(line, "\t[\"alreadySeen\"] = {") {
|
||||||
|
modifiedLines = append(modifiedLines, line)
|
||||||
|
achievements.Range(func(k, v any) bool {
|
||||||
|
logger.Trace("Adding alreadySeen for %s", k)
|
||||||
|
playerName := k.(string)
|
||||||
|
modifiedLines = append(modifiedLines, fmt.Sprintf("\t\t[\"%s\"] = true,", playerName))
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
modifiedLines = append(modifiedLines, line)
|
||||||
|
}
|
||||||
|
return strings.Join(modifiedLines, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func saveAchievements(achievements *sync.Map) {
|
||||||
|
count := 0
|
||||||
|
achievements.Range(func(k, v any) bool {
|
||||||
|
playerName := k.(string)
|
||||||
|
playerAchievements := v.(*[]NSQMessage)
|
||||||
|
logger.Debug("Saving %d achievements for player %s", len(*playerAchievements), playerName)
|
||||||
|
for _, ach := range *playerAchievements {
|
||||||
|
Save(ach)
|
||||||
|
}
|
||||||
|
count++
|
||||||
|
if count%1000 == 0 {
|
||||||
|
logger.Info("Saved %d achievements", count)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
Flush()
|
||||||
|
logger.Info("Saved %d achievements", count)
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadLuaStates(matches []string) *sync.Map {
|
||||||
|
wg := sync.WaitGroup{}
|
||||||
|
fileLuaStates := &sync.Map{}
|
||||||
|
for _, match := range matches {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(path string) {
|
||||||
|
defer wg.Done()
|
||||||
|
log := logger.Default.WithPrefix(path)
|
||||||
|
L := lua.NewState()
|
||||||
|
|
||||||
|
filestat, err := os.Stat(match)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("error getting file stats: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Info("File size: %.2f MB", float64(filestat.Size())/1024/1024)
|
||||||
|
|
||||||
|
log.Info("Running Lua file")
|
||||||
|
if err := L.DoFile(path); err != nil {
|
||||||
|
log.Error("error executing Lua file %q: %v", path, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Info("Lua file loaded")
|
||||||
|
fileLuaStates.Store(match, L)
|
||||||
|
}(match)
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
return fileLuaStates
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadAchievements(luaStates *sync.Map) *sync.Map {
|
||||||
|
achievements := &sync.Map{}
|
||||||
|
wg := sync.WaitGroup{}
|
||||||
|
luaStates.Range(func(path, state any) bool {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(path string, state *lua.LState) {
|
||||||
|
log := logger.Default.WithPrefix(path)
|
||||||
|
defer wg.Done()
|
||||||
|
// We directly mutate achievements to avoid reducing and mapping later on
|
||||||
|
// Removing 1 off of the x of the O(xn)
|
||||||
|
loadStateAchievements(state, log, achievements)
|
||||||
|
}(path.(string), state.(*lua.LState))
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
wg.Wait()
|
||||||
|
return achievements
|
||||||
|
}
|
||||||
|
func loadStateAchievements(L *lua.LState, log *logger.Logger, achievements *sync.Map) {
|
||||||
log.Info("Getting Heimdall_Achievements")
|
log.Info("Getting Heimdall_Achievements")
|
||||||
heimdallAchievements := L.GetGlobal("Heimdall_Achievements")
|
heimdallAchievements := L.GetGlobal("Heimdall_Achievements")
|
||||||
if heimdallAchievements.Type() == lua.LTNil {
|
if heimdallAchievements.Type() == lua.LTNil {
|
||||||
log.Warning("Heimdall_Achievements not found in %q. Skipping file.", path)
|
log.Warning("Heimdall_Achievements not found. Skipping file.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Info("Getting players table")
|
log.Info("Getting players table")
|
||||||
playersTableLua := L.GetField(heimdallAchievements, "players")
|
playersTableLua := L.GetField(heimdallAchievements, "players")
|
||||||
if playersTableLua.Type() == lua.LTNil {
|
if playersTableLua.Type() == lua.LTNil {
|
||||||
log.Info("'players' table is nil in Heimdall_Achievements in %q. No player data to extract.", path)
|
log.Info("'players' table is nil in Heimdall_Achievements. No player data to extract.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
log.Info("Casting players table")
|
log.Info("Casting players table")
|
||||||
playersTable, ok := playersTableLua.(*lua.LTable)
|
playersTable, ok := playersTableLua.(*lua.LTable)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Warning("'players' field in Heimdall_Achievements is not a table in %q (type: %s). Skipping.", path, playersTableLua.Type().String())
|
log.Warning("'players' field in Heimdall_Achievements is not a table. Skipping.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
var filePlayerAchievements []NSQMessage
|
|
||||||
var filePlayerNames = make(map[string]bool)
|
|
||||||
|
|
||||||
log.Info("Iterating over players")
|
log.Info("Iterating over players")
|
||||||
counter := 0
|
counter := 0
|
||||||
playersTable.ForEach(func(playerNameLua lua.LValue, playerAchievementsLua lua.LValue) {
|
playersTable.ForEach(func(playerNameLua lua.LValue, playerAchievementsLua lua.LValue) {
|
||||||
currentPlayerName := playerNameLua.String()
|
currentPlayerName := playerNameLua.String()
|
||||||
filePlayerNames[currentPlayerName] = true // Track name
|
playerAchievements, _ := achievements.LoadOrStore(currentPlayerName, &[]NSQMessage{})
|
||||||
|
playerAchievementsSlice := playerAchievements.(*[]NSQMessage)
|
||||||
|
|
||||||
achievementsTableLua, ok := playerAchievementsLua.(*lua.LTable)
|
achievementsTableLua, ok := playerAchievementsLua.(*lua.LTable)
|
||||||
if !ok {
|
if !ok {
|
||||||
@@ -238,7 +305,8 @@ func loadAchievements(path string, wg *sync.WaitGroup) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if currentAchievement.ID != "" { // Ensure we have at least an ID before adding
|
if currentAchievement.ID != "" { // Ensure we have at least an ID before adding
|
||||||
filePlayerAchievements = append(filePlayerAchievements, currentAchievement)
|
// Will this change be reflected in the map...?
|
||||||
|
*playerAchievementsSlice = append(*playerAchievementsSlice, currentAchievement)
|
||||||
}
|
}
|
||||||
|
|
||||||
counter++
|
counter++
|
||||||
@@ -248,59 +316,13 @@ func loadAchievements(path string, wg *sync.WaitGroup) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
log.Info("Processed %d achievements", counter)
|
log.Info("Processed %d achievements", counter)
|
||||||
|
achievements.Range(func(key, value any) bool {
|
||||||
if len(filePlayerAchievements) > 0 || len(filePlayerNames) > 0 {
|
log.Trace("Player: %s, Achievements: %d", key, len(*value.(*[]NSQMessage)))
|
||||||
globalDataMutex.Lock()
|
return true
|
||||||
for _, ach := range filePlayerAchievements {
|
})
|
||||||
allPlayersAchievementsGlobal[ach.Name] = append(allPlayersAchievementsGlobal[ach.Name], ach)
|
|
||||||
}
|
|
||||||
for name := range filePlayerNames {
|
|
||||||
allPlayerNamesGlobal[name] = true
|
|
||||||
}
|
|
||||||
globalDataMutex.Unlock()
|
|
||||||
log.Info("Players in file: %d. Achievements in file: %d.", len(filePlayerNames), len(filePlayerAchievements))
|
|
||||||
} else {
|
|
||||||
log.Info("No player data or names extracted")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// updateLuaFileState is for Pass 2
|
// updateLuaFileState is for Pass 2
|
||||||
func updateLuaFileState(path string, wg *sync.WaitGroup, allKnownPlayerNames map[string]bool) {
|
|
||||||
log := logger.Default.WithPrefix(filepath.Base(path))
|
|
||||||
log.Info("Updating Lua state")
|
|
||||||
defer wg.Done()
|
|
||||||
L := lua.NewState()
|
|
||||||
defer L.Close()
|
|
||||||
|
|
||||||
if err := L.DoFile(path); err != nil {
|
|
||||||
log.Error("error executing Lua file %q: %v. Cannot update its state.", path, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
heimdallAchievementsVal := L.GetGlobal("Heimdall_Achievements")
|
|
||||||
if heimdallAchievementsVal.Type() == lua.LTNil {
|
|
||||||
log.Warning("Heimdall_Achievements not found in %q after script execution. Cannot set 'alreadySeen' or clear 'players'.", path)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
heimdallAchievementsTable, ok := heimdallAchievementsVal.(*lua.LTable)
|
|
||||||
if !ok {
|
|
||||||
log.Warning("Heimdall_Achievements in %q is not a table (type: %s). Cannot update.", path, heimdallAchievementsVal.Type().String())
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
luaAlreadySeen := L.NewTable()
|
|
||||||
for name := range allKnownPlayerNames {
|
|
||||||
luaAlreadySeen.RawSetString(name, lua.LTrue)
|
|
||||||
}
|
|
||||||
|
|
||||||
L.SetField(heimdallAchievementsTable, "alreadySeen", luaAlreadySeen)
|
|
||||||
log.Debug("Set Heimdall_Achievements.alreadySeen for %q with %d total player names.", path, len(allKnownPlayerNames))
|
|
||||||
|
|
||||||
L.SetField(heimdallAchievementsTable, "players", L.NewTable())
|
|
||||||
log.Debug("Cleared Heimdall_Achievements.players for %q.", path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func NsqWorker(wg *sync.WaitGroup, messages <-chan NSQMessage) { // Changed to read-only channel
|
func NsqWorker(wg *sync.WaitGroup, messages <-chan NSQMessage) { // Changed to read-only channel
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
for msg := range messages {
|
for msg := range messages {
|
||||||
|
|||||||
9
service/chatsniffer/go.mod
Normal file
9
service/chatsniffer/go.mod
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
module chatsniffer
|
||||||
|
|
||||||
|
go 1.24.3
|
||||||
|
|
||||||
|
require (
|
||||||
|
git.site.quack-lab.dev/dave/cylogger v1.2.2
|
||||||
|
github.com/bmatcuk/doublestar/v4 v4.8.1
|
||||||
|
github.com/yuin/gopher-lua v1.1.1
|
||||||
|
)
|
||||||
6
service/chatsniffer/go.sum
Normal file
6
service/chatsniffer/go.sum
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
git.site.quack-lab.dev/dave/cylogger v1.2.2 h1:4xUXASEBlG9NiGxh7f57xHh9imW4unHzakIEpQoKC5E=
|
||||||
|
git.site.quack-lab.dev/dave/cylogger v1.2.2/go.mod h1:VS9MI4Y/cwjCBZgel7dSfCQlwtAgHmfvixOoBgBhtKg=
|
||||||
|
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
|
||||||
|
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||||
|
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||||
|
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||||
247
service/chatsniffer/init_meili.go
Normal file
247
service/chatsniffer/init_meili.go
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||||
|
)
|
||||||
|
|
||||||
|
var meiliToken string
|
||||||
|
|
||||||
|
type IndexConfig struct {
|
||||||
|
Uid string `json:"uid"`
|
||||||
|
PrimaryKey string `json:"primaryKey"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ChatMessage struct {
|
||||||
|
MessageHash string `json:"message_hash"`
|
||||||
|
Timestamp string `json:"timestamp"` // ISO timestamp
|
||||||
|
EpochTime int64 `json:"epoch_time"` // Unix epoch timestamp for filtering
|
||||||
|
Event string `json:"event"`
|
||||||
|
Sender string `json:"sender"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Language string `json:"language"`
|
||||||
|
Channel string `json:"channel"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func Init() error {
|
||||||
|
// Load Meilisearch token
|
||||||
|
meiliToken = os.Getenv("MEILI_TOKEN")
|
||||||
|
if meiliToken == "" {
|
||||||
|
return fmt.Errorf("MEILI_TOKEN environment variable not set")
|
||||||
|
}
|
||||||
|
logger.Info("Meilisearch token loaded")
|
||||||
|
|
||||||
|
config := IndexConfig{
|
||||||
|
Uid: meiliIndex,
|
||||||
|
PrimaryKey: "message_hash", // Meilisearch will use this for deduplication
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create index
|
||||||
|
err := createIndex(config)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating index: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set up index settings
|
||||||
|
err = setIndexSettings()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error setting index settings: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateMessageHash creates a unique hash for a message that will be identical
|
||||||
|
// for identical messages, ensuring perfect deduplication
|
||||||
|
func GenerateMessageHash(timestamp, event, sender, msg, language, channel string) string {
|
||||||
|
// Combine all fields that make a message unique
|
||||||
|
content := fmt.Sprintf("%s|%s|%s|%s|%s|%s",
|
||||||
|
timestamp,
|
||||||
|
event,
|
||||||
|
sender,
|
||||||
|
msg,
|
||||||
|
language,
|
||||||
|
channel,
|
||||||
|
)
|
||||||
|
|
||||||
|
// Create SHA-256 hash of the combined content
|
||||||
|
hash := sha256.Sum256([]byte(content))
|
||||||
|
return hex.EncodeToString(hash[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddMessages adds multiple messages to the index in a single batch request
|
||||||
|
// Meilisearch will handle deduplication based on the message_hash
|
||||||
|
func AddMessages(messages []ChatMessage) error {
|
||||||
|
jsonData, err := json.Marshal(messages)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling messages: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(
|
||||||
|
http.MethodPost,
|
||||||
|
meiliEndpoint+"indexes/"+meiliIndex+"/documents",
|
||||||
|
bytes.NewBuffer(jsonData),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Authorization", "Bearer "+meiliToken)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error adding messages: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// Read response body for better error messages
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading response body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusAccepted {
|
||||||
|
return fmt.Errorf("failed to add messages. Status: %d, Response: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func createIndex(config IndexConfig) error {
|
||||||
|
jsonData, err := json.Marshal(config)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling config: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(
|
||||||
|
http.MethodPost,
|
||||||
|
meiliEndpoint+"indexes",
|
||||||
|
bytes.NewBuffer(jsonData),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Authorization", "Bearer "+meiliToken)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating index: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// Read response body for better error messages
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading response body: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusAccepted {
|
||||||
|
return fmt.Errorf("failed to create index. Status: %d, Response: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("Index created successfully")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func setIndexSettings() error {
|
||||||
|
// First set searchable attributes
|
||||||
|
searchableAttributes := []string{
|
||||||
|
"timestamp",
|
||||||
|
"event",
|
||||||
|
"sender",
|
||||||
|
"msg",
|
||||||
|
"language",
|
||||||
|
"channel",
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonData, err := json.Marshal(searchableAttributes)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling searchable settings: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(
|
||||||
|
http.MethodPut,
|
||||||
|
meiliEndpoint+"indexes/"+meiliIndex+"/settings/searchable-attributes",
|
||||||
|
bytes.NewBuffer(jsonData),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating searchable settings request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Authorization", "Bearer "+meiliToken)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error setting searchable attributes: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading searchable settings response: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusAccepted {
|
||||||
|
return fmt.Errorf("failed to set searchable attributes. Status: %d, Response: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then set filterable attributes
|
||||||
|
filterableAttributes := []string{
|
||||||
|
"timestamp",
|
||||||
|
"epoch_time", // Add epoch_time for numeric filtering
|
||||||
|
"event",
|
||||||
|
"sender",
|
||||||
|
"language",
|
||||||
|
"channel",
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonData, err = json.Marshal(filterableAttributes)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling filterable settings: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err = http.NewRequest(
|
||||||
|
http.MethodPut,
|
||||||
|
meiliEndpoint+"indexes/"+meiliIndex+"/settings/filterable-attributes",
|
||||||
|
bytes.NewBuffer(jsonData),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating filterable settings request: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Authorization", "Bearer "+meiliToken)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err = client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error setting filterable attributes: %v", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err = io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading filterable settings response: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusAccepted {
|
||||||
|
return fmt.Errorf("failed to set filterable attributes. Status: %d, Response: %s", resp.StatusCode, string(body))
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("Index settings set successfully")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
267
service/chatsniffer/main.go
Normal file
267
service/chatsniffer/main.go
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||||
|
"github.com/bmatcuk/doublestar/v4"
|
||||||
|
lua "github.com/yuin/gopher-lua"
|
||||||
|
)
|
||||||
|
|
||||||
|
const meiliEndpoint = "https://meili.site.quack-lab.dev/"
|
||||||
|
const meiliIndex = "chatlog"
|
||||||
|
|
||||||
|
var debug *bool
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
root := flag.String("root", ".", "Root workdir")
|
||||||
|
debug = flag.Bool("d", false, "Debug")
|
||||||
|
flag.Parse()
|
||||||
|
logger.InitFlag()
|
||||||
|
if *debug {
|
||||||
|
logger.SetLevel(logger.LevelDebug)
|
||||||
|
}
|
||||||
|
|
||||||
|
err := Init()
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to initialize Meilisearch: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Info("Root: %q", *root)
|
||||||
|
cleanedRoot := strings.Replace(*root, "~", os.Getenv("HOME"), 1)
|
||||||
|
cleanedRoot, err = filepath.Abs(cleanedRoot)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("error getting absolute path: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cleanedRoot = filepath.Clean(cleanedRoot)
|
||||||
|
cleanedRoot = strings.TrimSuffix(cleanedRoot, "/")
|
||||||
|
|
||||||
|
logger.Info("Looking for Heimdall.lua in %q", cleanedRoot)
|
||||||
|
matches, err := doublestar.Glob(os.DirFS(cleanedRoot), "**/Heimdall.lua")
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("error matching Heimdall.lua: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logger.Info("Found %d Heimdall.lua files.", len(matches))
|
||||||
|
if len(matches) == 0 {
|
||||||
|
logger.Info("No Heimdall.lua files found. Exiting.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for i, match := range matches {
|
||||||
|
matches[i] = filepath.Join(cleanedRoot, match)
|
||||||
|
}
|
||||||
|
|
||||||
|
luaStates := loadLuaStates(matches)
|
||||||
|
chatMessages := loadChatMessages(luaStates)
|
||||||
|
|
||||||
|
// Save messages to Meilisearch
|
||||||
|
if err := AddMessages(chatMessages); err != nil {
|
||||||
|
logger.Error("Failed to save messages: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logger.Info("Successfully saved %d messages", len(chatMessages))
|
||||||
|
|
||||||
|
// Clear chat tables in source files
|
||||||
|
clearChatTables(matches)
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadLuaStates(matches []string) *sync.Map {
|
||||||
|
wg := sync.WaitGroup{}
|
||||||
|
fileLuaStates := &sync.Map{}
|
||||||
|
for _, match := range matches {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(path string) {
|
||||||
|
defer wg.Done()
|
||||||
|
log := logger.Default.WithPrefix(path)
|
||||||
|
L := lua.NewState()
|
||||||
|
|
||||||
|
filestat, err := os.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("error getting file stats: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Info("File size: %.2f MB", float64(filestat.Size())/1024/1024)
|
||||||
|
|
||||||
|
log.Info("Running Lua file")
|
||||||
|
if err := L.DoFile(path); err != nil {
|
||||||
|
log.Error("error executing Lua file %q: %v", path, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Info("Lua file loaded")
|
||||||
|
fileLuaStates.Store(path, L)
|
||||||
|
}(match)
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
return fileLuaStates
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadChatMessages(luaStates *sync.Map) []ChatMessage {
|
||||||
|
var messages []ChatMessage
|
||||||
|
wg := sync.WaitGroup{}
|
||||||
|
messageChan := make(chan []ChatMessage, 100) // Buffer for concurrent processing
|
||||||
|
|
||||||
|
luaStates.Range(func(path, state any) bool {
|
||||||
|
wg.Add(1)
|
||||||
|
go func(path string, state *lua.LState) {
|
||||||
|
defer wg.Done()
|
||||||
|
log := logger.Default.WithPrefix(path)
|
||||||
|
fileMessages := loadStateChatMessages(state, log)
|
||||||
|
messageChan <- fileMessages
|
||||||
|
}(path.(string), state.(*lua.LState))
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
// Close channel when all goroutines are done
|
||||||
|
go func() {
|
||||||
|
wg.Wait()
|
||||||
|
close(messageChan)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Collect all messages
|
||||||
|
for fileMessages := range messageChan {
|
||||||
|
messages = append(messages, fileMessages...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadStateChatMessages(L *lua.LState, log *logger.Logger) []ChatMessage {
|
||||||
|
log.Info("Getting Heimdall_Chat")
|
||||||
|
heimdallChat := L.GetGlobal("Heimdall_Chat")
|
||||||
|
if heimdallChat.Type() == lua.LTNil {
|
||||||
|
log.Warning("Heimdall_Chat not found. Skipping file.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
chatTable, ok := heimdallChat.(*lua.LTable)
|
||||||
|
if !ok {
|
||||||
|
log.Warning("Heimdall_Chat is not a table. Skipping file.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var messages []ChatMessage
|
||||||
|
chatTable.ForEach(func(_, value lua.LValue) {
|
||||||
|
chatStr := value.String()
|
||||||
|
// Remove quotes and trailing comma if present
|
||||||
|
chatStr = strings.Trim(chatStr, "\", ")
|
||||||
|
|
||||||
|
// Parse the chat message
|
||||||
|
message, err := parseChatMessage(chatStr)
|
||||||
|
if err != nil {
|
||||||
|
log.Warning("Invalid chat format: %s", chatStr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
messages = append(messages, message)
|
||||||
|
})
|
||||||
|
|
||||||
|
log.Info("Loaded %d chat messages", len(messages))
|
||||||
|
return messages
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseChatMessage(chatStr string) (ChatMessage, error) {
|
||||||
|
// Debug: Print the raw string
|
||||||
|
logger.Debug("Raw chat string: %q", chatStr)
|
||||||
|
|
||||||
|
// Split by pipe - we expect 6 parts (5 pipes)
|
||||||
|
parts := strings.Split(chatStr, "|")
|
||||||
|
logger.Debug("Split into %d parts: %v", len(parts), parts)
|
||||||
|
|
||||||
|
if len(parts) != 6 {
|
||||||
|
return ChatMessage{}, fmt.Errorf("invalid message format: expected 6 parts, got %d: %s", len(parts), chatStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
timestamp := parts[0]
|
||||||
|
event := parts[1]
|
||||||
|
sender := parts[2]
|
||||||
|
msg := parts[3]
|
||||||
|
language := parts[4]
|
||||||
|
channel := parts[5]
|
||||||
|
|
||||||
|
// Parse ISO timestamp to epoch
|
||||||
|
epochTime, err := parseISOTimestamp(timestamp)
|
||||||
|
if err != nil {
|
||||||
|
return ChatMessage{}, fmt.Errorf("invalid timestamp format: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ChatMessage{
|
||||||
|
MessageHash: GenerateMessageHash(timestamp, event, sender, msg, language, channel),
|
||||||
|
Timestamp: timestamp,
|
||||||
|
EpochTime: epochTime,
|
||||||
|
Event: event,
|
||||||
|
Sender: sender,
|
||||||
|
Msg: msg,
|
||||||
|
Language: language,
|
||||||
|
Channel: channel,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseISOTimestamp(isoTime string) (int64, error) {
|
||||||
|
// Debug: Print the timestamp we're trying to parse
|
||||||
|
logger.Debug("Parsing timestamp: %q", isoTime)
|
||||||
|
|
||||||
|
// Parse the timestamp format used in chat messages (e.g., "2025-05-25 02:51:05")
|
||||||
|
t, err := time.Parse("2006-01-02 15:04:05", isoTime)
|
||||||
|
if err != nil {
|
||||||
|
logger.Debug("Failed to parse timestamp: %v", err)
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return t.Unix(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func clearChatTables(matches []string) {
|
||||||
|
for _, path := range matches {
|
||||||
|
log := logger.Default.WithPrefix(path)
|
||||||
|
log.Info("Reading source file")
|
||||||
|
fileContent, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to read file: %v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
strContent := string(fileContent)
|
||||||
|
log.Info("Read %d bytes", len(strContent))
|
||||||
|
|
||||||
|
strContent = clearChatTable(strContent)
|
||||||
|
log.Info("Cleared chat table, now %d bytes", len(strContent))
|
||||||
|
|
||||||
|
log.Info("Writing file")
|
||||||
|
err = os.WriteFile(path, []byte(strContent), 0644)
|
||||||
|
if err != nil {
|
||||||
|
logger.Error("Failed to write file: %v", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
log.Info("Done")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func clearChatTable(sourceContent string) string {
|
||||||
|
lines := strings.Split(sourceContent, "\n")
|
||||||
|
writeIndex := 0
|
||||||
|
isInChat := false
|
||||||
|
for _, line := range lines {
|
||||||
|
if strings.HasPrefix(line, "Heimdall_Chat = {") {
|
||||||
|
isInChat = true
|
||||||
|
lines[writeIndex] = "Heimdall_Chat = {"
|
||||||
|
writeIndex++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if isInChat && strings.HasPrefix(line, "}") {
|
||||||
|
isInChat = false
|
||||||
|
lines[writeIndex] = "}"
|
||||||
|
writeIndex++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !isInChat {
|
||||||
|
lines[writeIndex] = line
|
||||||
|
writeIndex++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(lines[:writeIndex], "\n")
|
||||||
|
}
|
||||||
1
service/chatsniffer/succ.sh
Normal file
1
service/chatsniffer/succ.sh
Normal file
@@ -0,0 +1 @@
|
|||||||
|
./chatsniffer.exe -root "C:/Users/Administrator/Seafile/Games-WoW/Ruski/WTF/"
|
||||||
@@ -4,14 +4,14 @@ with char1_achievements as (
|
|||||||
date,
|
date,
|
||||||
completed
|
completed
|
||||||
from achievements
|
from achievements
|
||||||
where name = 'Extazyk'
|
where name = $1
|
||||||
),
|
),
|
||||||
char2_achievements as (
|
char2_achievements as (
|
||||||
select id,
|
select id,
|
||||||
date,
|
date,
|
||||||
completed
|
completed
|
||||||
from achievements
|
from achievements
|
||||||
where name = 'Smokemantra'
|
where name = $2
|
||||||
),
|
),
|
||||||
all_achievements as (
|
all_achievements as (
|
||||||
select id
|
select id
|
||||||
|
|||||||
@@ -5,15 +5,15 @@ with RECURSIVE connected_players as (
|
|||||||
total_achievements1 as achievements,
|
total_achievements1 as achievements,
|
||||||
similarity_percentage
|
similarity_percentage
|
||||||
from similar_pairs
|
from similar_pairs
|
||||||
where similarity_percentage >= 70
|
where similarity_percentage >= $1
|
||||||
union
|
union
|
||||||
select name2,
|
select name2,
|
||||||
name1,
|
name1,
|
||||||
matching_count,
|
matching_count,
|
||||||
total_achievements2,
|
total_achievements2,
|
||||||
similarity_percentage
|
similarity_percentage
|
||||||
from similar_pairs
|
from similar_pairs
|
||||||
where similarity_percentage >= 70
|
where similarity_percentage >= $1
|
||||||
union
|
union
|
||||||
select case
|
select case
|
||||||
when sp.name1 = cp.player_name then sp.name2
|
when sp.name1 = cp.player_name then sp.name2
|
||||||
@@ -31,7 +31,7 @@ with RECURSIVE connected_players as (
|
|||||||
sp.name1 = cp.player_name
|
sp.name1 = cp.player_name
|
||||||
or sp.name2 = cp.player_name
|
or sp.name2 = cp.player_name
|
||||||
)
|
)
|
||||||
and sp.similarity_percentage >= 70
|
and sp.similarity_percentage >= $1
|
||||||
where case
|
where case
|
||||||
when sp.name1 = cp.player_name then sp.name2
|
when sp.name1 = cp.player_name then sp.name2
|
||||||
else sp.name2
|
else sp.name2
|
||||||
@@ -45,7 +45,7 @@ select group_root,
|
|||||||
MIN(matching_count) as min_matching,
|
MIN(matching_count) as min_matching,
|
||||||
AVG(matching_count) as avg_matching
|
AVG(matching_count) as avg_matching
|
||||||
from connected_players
|
from connected_players
|
||||||
where group_root in ('Paskoo')
|
where group_root in ($2)
|
||||||
group by group_root
|
group by group_root
|
||||||
having count(*) > 1
|
having count(*) > 1
|
||||||
order by count(*) desc,
|
order by count(*) desc,
|
||||||
|
|||||||
Reference in New Issue
Block a user