Initial commit

This commit is contained in:
2025-05-18 23:48:22 +02:00
commit 0806e0c845
28 changed files with 2624675 additions and 0 deletions

View File

@@ -0,0 +1,84 @@
package main
import (
"database/sql"
"fmt"
"log"
"os"
"time"
_ "github.com/mattn/go-sqlite3"
)
type DB struct {
Ready bool
path string
readConn *sql.DB
writeConn *sql.DB
}
func (db *DB) Open() error {
if db.path == "" {
return fmt.Errorf("database path not set")
}
file, err := os.Open(db.path)
if err != nil {
if os.IsNotExist(err) {
log.Printf("Database file does not exist at %s, creating", db.path)
file, err := os.Create(db.path)
if err != nil {
return fmt.Errorf("failed to create database file: %v", err)
}
log.Printf("Database created at %s", db.path)
file.Close()
} else {
return fmt.Errorf("failed to open database file: %v", err)
}
}
file.Close()
writeConn, err := sql.Open("sqlite3", db.path+"?_journal=WAL&_synchronous=NORMAL")
if err != nil {
Error.Printf("%++v", err)
return err
}
writeConn.SetMaxOpenConns(1)
writeConn.SetConnMaxIdleTime(30 * time.Second)
writeConn.SetConnMaxLifetime(30 * time.Second)
db.writeConn = writeConn
readConn, err := sql.Open("sqlite3", db.path+"?mode=ro&_journal=WAL&_synchronous=NORMAL&_mode=ro")
if err != nil {
Error.Printf("%++v", err)
return err
}
readConn.SetMaxOpenConns(4)
readConn.SetConnMaxIdleTime(30 * time.Second)
readConn.SetConnMaxLifetime(30 * time.Second)
db.readConn = readConn
db.Ready = true
return nil
}
func (db *DB) Init(ddl string) error {
if !db.Ready {
return fmt.Errorf("database not ready")
}
return nil
}
func (db *DB) Close() error {
err := db.writeConn.Close()
if err != nil {
return err
}
err = db.readConn.Close()
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,5 @@
module main
go 1.23.2
require github.com/mattn/go-sqlite3 v1.14.24

View File

@@ -0,0 +1,2 @@
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=

View File

@@ -0,0 +1,128 @@
package main
import (
"fmt"
"io"
"log"
"os"
"strings"
_ "embed"
)
var Error *log.Logger
var Warning *log.Logger
func init() {
log.SetFlags(log.Lmicroseconds | log.Lshortfile)
logFile, err := os.Create("main.log")
if err != nil {
log.Printf("Error creating log file: %v", err)
os.Exit(1)
}
logger := io.MultiWriter(os.Stdout, logFile)
log.SetOutput(logger)
Error = log.New(io.MultiWriter(logFile, os.Stderr, os.Stdout),
fmt.Sprintf("%sERROR:%s ", "\033[0;101m", "\033[0m"),
log.Lmicroseconds|log.Lshortfile)
Warning = log.New(io.MultiWriter(logFile, os.Stdout),
fmt.Sprintf("%sWarning:%s ", "\033[0;93m", "\033[0m"),
log.Lmicroseconds|log.Lshortfile)
}
var db DB
func main() {
db = DB{
path: "../data/db.db",
}
err := db.Open()
if err != nil {
Error.Printf("Error opening database: %v", err)
return
}
defer db.Close()
res, err := db.readConn.Query("SELECT name FROM achievements GROUP BY name")
if err != nil {
Error.Printf("Error reading from database: %v", err)
return
}
defer res.Close()
characters := map[string]struct{}{}
for res.Next() {
var name string
err = res.Scan(&name)
if err != nil {
Error.Printf("Error scanning row: %v", err)
return
}
characters[name] = struct{}{}
}
log.Printf("Loaded %d characters", len(characters))
// err = CookWeakAuras("./WeakAuras.lua", characters)
// if err != nil {
// Error.Printf("Error cooking WeakAuras.lua: %v", err)
// return
// }
weakauras := []string{
"C:/Users/Administrator/Seafile/WoW/Ruski/WTF/Account/Iridian/SavedVariables/WeakAuras.lua",
"C:/Users/Administrator/Seafile/WoW/Ruski/WTF/Account/phatphuckdave/SavedVariables/WeakAuras.lua",
"C:/Users/Administrator/Seafile/WoW/Ruski/WTF/Account/phaterphuckdave/SavedVariables/WeakAuras.lua",
}
for _, path := range weakauras {
err := CookWeakAuras(path, characters)
if err != nil {
Error.Printf("Error cooking WeakAuras.lua: %v", err)
return
}
}
}
func CookWeakAuras(path string, characters map[string]struct{}) error {
filedata, err := os.ReadFile(path)
if err != nil {
return err
}
modifiedLines := []string{}
lines := strings.Split(string(filedata), "\n")
log.Printf("Original lines: %d", len(lines))
inTable := false
for _, line := range lines {
if strings.Contains(line, ` ["AchievementSniffer2"] = {`) {
inTable = true
modifiedLines = append(modifiedLines, line)
for character := range characters {
modifiedLines = append(modifiedLines, "\t\t\t"+fmt.Sprintf(`["%s"] = true,`, character))
}
continue
}
if inTable {
if strings.Contains(line, "},") {
inTable = false
modifiedLines = append(modifiedLines, line)
}
continue
}
modifiedLines = append(modifiedLines, line)
}
log.Printf("Modified lines: %d", len(modifiedLines))
err = os.WriteFile(path, []byte(strings.Join(modifiedLines, "\n")), 0644)
if err != nil {
return err
}
err = os.WriteFile(path+".bak", []byte(strings.Join(lines, "\n")), 0644)
if err != nil {
return err
}
return nil
}
// /run WeakAurasSaved.Cyka.MData = nil

2623417
service/WAdeduplicator/out.test Normal file

File diff suppressed because one or more lines are too long

21
service/cacher/cache.sql Normal file
View File

@@ -0,0 +1,21 @@
with common_rows as (
select count(*) as match_count
from achievements as t1
join achievements as t2 on t1.id = t2.id
and t1.date = t2.date
where t1.name = $1
and t2.name = $2
),
total_rows as (
select count(distinct id) as total_count
from achievements
where name in ($1, $2)
),
similarity as (
select (match_count * 100.0 / total_count) as similarity_percentage
from common_rows,
total_rows
)
insert into cached (character1, character2, similarity)
select $1, $2, similarity_percentage
from similarity;

View File

@@ -0,0 +1,16 @@
with common_rows as (
select count(*) as match_count
from achievements as t1
join achievements as t2 on t1.id = t2.id
and t1.date = t2.date
where t1.name = $1
and t2.name = $2
),
total_rows as (
select count(distinct id) as total_count
from achievements
where name in ($1, $2)
)
select (match_count * 100 / total_count) as similarity
from common_rows,
total_rows;

84
service/cacher/db.go Normal file
View File

@@ -0,0 +1,84 @@
package main
import (
"database/sql"
"fmt"
"log"
"os"
"time"
_ "github.com/mattn/go-sqlite3"
)
type DB struct {
Ready bool
path string
readConn *sql.DB
writeConn *sql.DB
}
func (db *DB) Open() error {
if db.path == "" {
return fmt.Errorf("database path not set")
}
file, err := os.Open(db.path)
if err != nil {
if os.IsNotExist(err) {
log.Printf("Database file does not exist at %s, creating", db.path)
file, err := os.Create(db.path)
if err != nil {
return fmt.Errorf("failed to create database file: %v", err)
}
log.Printf("Database created at %s", db.path)
file.Close()
} else {
return fmt.Errorf("failed to open database file: %v", err)
}
}
file.Close()
writeConn, err := sql.Open("sqlite3", db.path+"?_journal=WAL&_synchronous=NORMAL")
if err != nil {
Error.Printf("%++v", err)
return err
}
writeConn.SetMaxOpenConns(1)
writeConn.SetConnMaxIdleTime(30 * time.Second)
writeConn.SetConnMaxLifetime(30 * time.Second)
db.writeConn = writeConn
readConn, err := sql.Open("sqlite3", db.path+"?mode=ro&_journal=WAL&_synchronous=NORMAL&_mode=ro")
if err != nil {
Error.Printf("%++v", err)
return err
}
readConn.SetMaxOpenConns(4)
readConn.SetConnMaxIdleTime(30 * time.Second)
readConn.SetConnMaxLifetime(30 * time.Second)
db.readConn = readConn
db.Ready = true
return nil
}
func (db *DB) Init(ddl string) error {
if !db.Ready {
return fmt.Errorf("database not ready")
}
return nil
}
func (db *DB) Close() error {
err := db.writeConn.Close()
if err != nil {
return err
}
err = db.readConn.Close()
if err != nil {
return err
}
return nil
}

5
service/cacher/go.mod Normal file
View File

@@ -0,0 +1,5 @@
module main
go 1.22.2
require github.com/mattn/go-sqlite3 v1.14.24

2
service/cacher/go.sum Normal file
View File

@@ -0,0 +1,2 @@
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=

148
service/cacher/main.go Normal file
View File

@@ -0,0 +1,148 @@
package main
import (
"fmt"
"io"
"log"
"os"
"strings"
_ "embed"
)
var Error *log.Logger
var Warning *log.Logger
func init() {
log.SetFlags(log.Lmicroseconds | log.Lshortfile)
logFile, err := os.Create("main.log")
if err != nil {
log.Printf("Error creating log file: %v", err)
os.Exit(1)
}
logger := io.MultiWriter(os.Stdout, logFile)
log.SetOutput(logger)
Error = log.New(io.MultiWriter(logFile, os.Stderr, os.Stdout),
fmt.Sprintf("%sERROR:%s ", "\033[0;101m", "\033[0m"),
log.Lmicroseconds|log.Lshortfile)
Warning = log.New(io.MultiWriter(logFile, os.Stdout),
fmt.Sprintf("%sWarning:%s ", "\033[0;93m", "\033[0m"),
log.Lmicroseconds|log.Lshortfile)
}
var db DB
//go:embed cache.sql
var cacheSql string
//go:embed compare.sql
var compareSql string
//go:embed unique-noncached-pairs.sql
var uniqueNonCachedPairsSql string
func main() {
db = DB{
path: "../data/db.db",
}
err := db.Open()
if err != nil {
Error.Printf("Error opening database: %v", err)
return
}
defer db.Close()
for {
err := Process(50000)
if err != nil {
Error.Printf("Error processing: %v", err)
return
}
}
}
func ComputeSimilarity(pair CharPair) (float64, error) {
res := db.readConn.QueryRow(compareSql, pair.A, pair.B)
var similarity float64
err := res.Scan(&similarity)
if err != nil {
return 0, fmt.Errorf("error scanning result: %v", err)
}
return similarity, nil
}
type CharPair struct {
A string
B string
}
func GetUncachedPairs(n int) ([]CharPair, error) {
res := []CharPair{}
rows, err := db.readConn.Query(uniqueNonCachedPairsSql, n)
if err != nil {
return res, fmt.Errorf("error running query: %v", err)
}
defer rows.Close()
for rows.Next() {
var a, b string
err := rows.Scan(&a, &b)
if err != nil {
return res, fmt.Errorf("error scanning row: %v", err)
}
res = append(res, CharPair{A: a, B: b})
}
return res, nil
}
func CacheSimilarity(pair CharPair, similarity float64) error {
log.Printf("Caching similarity for %s and %s (%.1f)", pair.A, pair.B, similarity)
_, err := db.writeConn.Exec("INSERT INTO cached (character1, character2, similarity) VALUES (?, ?, ?);", pair.A, pair.B, similarity)
if err != nil {
return fmt.Errorf("error running query: %v", err)
}
return nil
}
func Process(n int) error {
log.Printf("Processing %d pairs", n)
pairs, err := GetUncachedPairs(n)
if err != nil {
return fmt.Errorf("error getting pairs: %v", err)
}
if len(pairs) == 0 {
return fmt.Errorf("no pairs found, done?")
}
sql := []string{"INSERT INTO cached (character1, character2, similarity) VALUES "}
for i, pair := range pairs {
similarity, err := ComputeSimilarity(pair)
if err != nil {
return fmt.Errorf("error computing similarity: %v", err)
}
// err = CacheSimilarity(pair, similarity)
// if err != nil {
// return fmt.Errorf("error caching similarity: %v", err)
// }
sql = append(sql, fmt.Sprintf("('%s', '%s', %f)", pair.A, pair.B, similarity))
if i != len(pairs)-1 {
sql = append(sql, ",")
}
}
sql = append(sql, ";")
log.Printf("Saving %d pairs", n)
_, err = db.writeConn.Exec(strings.Join(sql, ""))
if err != nil {
return fmt.Errorf("error running query: %v", err)
}
return nil
}

View File

@@ -0,0 +1,21 @@
with unique_pairs as (
select distinct a1.name as character1,
a2.name as character2
from achievements a1
join achievements a2 on a1.id = a2.id
and a1.name < a2.name
)
select up.character1,
up.character2
from unique_pairs up
left join cached c on (
up.character1 = c.character1
and up.character2 = c.character2
)
or (
up.character1 = c.character2
and up.character2 = c.character1
)
where c.character1 is null
and c.character2 is null
limit ?

84
service/data/cache.sql Normal file
View File

@@ -0,0 +1,84 @@
with
pair_cutoff_dates as (
select
case
when a1.name < a2.name then a1.name
else a2.name
end as name1,
case
when a1.name < a2.name then a2.name
else a1.name
end as name2,
MAX(a1.date) as cutoff_date
from
achievements a1
join achievements a2 on a1.id = a2.id
and a1.date = a2.date
and a1.completed = a2.completed
and a1.name != a2.name
group by
case
when a1.name < a2.name then a1.name
else a2.name
end,
case
when a1.name < a2.name then a2.name
else a1.name
end
),
matching_achievements as (
select
pcd.name1,
pcd.name2,
pcd.cutoff_date,
count(*) as matching_count
from
achievements a1
join achievements a2 on a1.id = a2.id
and a1.date = a2.date
and a1.completed = a2.completed
join pair_cutoff_dates pcd on (
a1.name = pcd.name1
and a2.name = pcd.name2
)
where
a1.date <= pcd.cutoff_date
group by
pcd.name1,
pcd.name2,
pcd.cutoff_date
),
achievement_counts as (
select
name,
count(*) as total_achievements
from
achievements
group by
name
)
insert or replace into
similar_pairs (
name1,
name2,
matching_count,
total_achievements1,
total_achievements2,
similarity_percentage,
cutoff_date
)
select
m.name1,
m.name2,
m.matching_count,
ac1.total_achievements,
ac2.total_achievements,
CAST(m.matching_count as REAL) * 100.0 / MIN(ac1.total_achievements, ac2.total_achievements) as similarity_percentage,
m.cutoff_date
from
matching_achievements m
join achievement_counts ac1 on m.name1 = ac1.name
join achievement_counts ac2 on m.name2 = ac2.name
where
m.matching_count >= 10
and CAST(m.matching_count as REAL) * 100.0 / MIN(ac1.total_achievements, ac2.total_achievements) >= 70.0;

22
service/data/ddl.sql Normal file
View File

@@ -0,0 +1,22 @@
create table achievements (
name text not null,
id integer,
date text,
completed integer,
unique (name, id)
);
create index if not exists idx_achievements_name on achievements(name);
create index if not exists idx_achievements_composite on achievements(id, date, completed);
create table if not exists similar_pairs (
name1 TEXT not null,
name2 TEXT not null,
matching_count INTEGER not null,
total_achievements1 INTEGER not null,
total_achievements2 INTEGER not null,
similarity_percentage REAL not null,
cutoff_date TEXT NOT NULL,
last_updated TEXT not null default (datetime('now')),
primary key (name1, name2)
);

42
service/data/diff.sql Normal file
View File

@@ -0,0 +1,42 @@
-- Extazyk,Smokefire,Smokemantra,Муркот,Растафаркрай,Хихихантер
with char1_achievements as (
select id,
date,
completed
from achievements
where name = 'Extazyk'
),
char2_achievements as (
select id,
date,
completed
from achievements
where name = 'Smokemantra'
),
all_achievements as (
select id
from char1_achievements
union
select id
from char2_achievements
)
select all_achievements.id,
COALESCE(char1_achievements.completed, 0) as char1_completed,
char1_achievements.date as char1_date,
COALESCE(char2_achievements.completed, 0) as char2_completed,
char2_achievements.date as char2_date,
case
when char1_achievements.id is null then 'Only in Character2'
when char2_achievements.id is null then 'Only in Character1'
when char1_achievements.completed != char2_achievements.completed then 'Completion status differs'
when char1_achievements.date != char2_achievements.date then 'Dates differ'
else 'Same'
end as difference_type
from all_achievements
left join char1_achievements on all_achievements.id = char1_achievements.id
left join char2_achievements on all_achievements.id = char2_achievements.id
where char1_achievements.id is null
or char2_achievements.id is null
or char1_achievements.completed != char2_achievements.completed
or char1_achievements.date != char2_achievements.date
order by all_achievements.id;

View File

@@ -0,0 +1,52 @@
with RECURSIVE connected_players as (
select name1 as player_name,
name1 as group_root,
matching_count,
total_achievements1 as achievements,
similarity_percentage
from similar_pairs
where similarity_percentage >= 70
union
select name2,
name1,
matching_count,
total_achievements2,
similarity_percentage
from similar_pairs
ity_percentage >= 70
union
select case
when sp.name1 = cp.player_name then sp.name2
else sp.name1
end,
cp.group_root,
sp.matching_count,
case
when sp.name1 = cp.player_name then sp.total_achievements2
else sp.total_achievements1
end,
sp.similarity_percentage
from connected_players cp
join similar_pairs sp on (
sp.name1 = cp.player_name
or sp.name2 = cp.player_name
)
and sp.similarity_percentage >= 70
where case
when sp.name1 = cp.player_name then sp.name2
else sp.name2
end != cp.group_root
)
select group_root,
count(*) as group_size,
group_concat(distinct player_name) as connected_players,
MIN(similarity_percentage) as min_similarity,
AVG(similarity_percentage) as avg_similarity,
MIN(matching_count) as min_matching,
AVG(matching_count) as avg_matching
from connected_players
where group_root in ('')
group by group_root
having count(*) > 1
order by count(*) desc,
avg_similarity desc

84
service/nsq/db.go Normal file
View File

@@ -0,0 +1,84 @@
package main
import (
"database/sql"
"fmt"
"log"
"os"
"time"
_ "github.com/mattn/go-sqlite3"
)
type DB struct {
Ready bool
path string
readConn *sql.DB
writeConn *sql.DB
}
func (db *DB) Open() error {
if db.path == "" {
return fmt.Errorf("database path not set")
}
file, err := os.Open(db.path)
if err != nil {
if os.IsNotExist(err) {
log.Printf("Database file does not exist at %s, creating", db.path)
file, err := os.Create(db.path)
if err != nil {
return fmt.Errorf("failed to create database file: %v", err)
}
log.Printf("Database created at %s", db.path)
file.Close()
} else {
return fmt.Errorf("failed to open database file: %v", err)
}
}
file.Close()
writeConn, err := sql.Open("sqlite3", db.path+"?_journal=WAL&_synchronous=NORMAL")
if err != nil {
Error.Printf("%++v", err)
return err
}
writeConn.SetMaxOpenConns(1)
writeConn.SetConnMaxIdleTime(30 * time.Second)
writeConn.SetConnMaxLifetime(30 * time.Second)
db.writeConn = writeConn
readConn, err := sql.Open("sqlite3", db.path+"?mode=ro&_journal=WAL&_synchronous=NORMAL&_mode=ro")
if err != nil {
Error.Printf("%++v", err)
return err
}
readConn.SetMaxOpenConns(4)
readConn.SetConnMaxIdleTime(30 * time.Second)
readConn.SetConnMaxLifetime(30 * time.Second)
db.readConn = readConn
db.Ready = true
return nil
}
func (db *DB) Init(ddl string) error {
if !db.Ready {
return fmt.Errorf("database not ready")
}
return nil
}
func (db *DB) Close() error {
err := db.writeConn.Close()
if err != nil {
return err
}
err = db.readConn.Close()
if err != nil {
return err
}
return nil
}

10
service/nsq/go.mod Normal file
View File

@@ -0,0 +1,10 @@
module achievementsnifferws
go 1.23.2
require (
github.com/mattn/go-sqlite3 v1.14.24
github.com/nsqio/go-nsq v1.1.0
)
require github.com/golang/snappy v0.0.1 // indirect

6
service/nsq/go.sum Normal file
View File

@@ -0,0 +1,6 @@
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/nsqio/go-nsq v1.1.0 h1:PQg+xxiUjA7V+TLdXw7nVrJ5Jbl3sN86EhGCQj4+FYE=
github.com/nsqio/go-nsq v1.1.0/go.mod h1:vKq36oyeVXgsS5Q8YEO7WghqidAVXQlcFxzQbQTuDEY=

189
service/nsq/main.go Normal file
View File

@@ -0,0 +1,189 @@
package main
import (
"context"
"encoding/json"
"fmt"
"io"
"log"
"os"
"os/signal"
"syscall"
"time"
"github.com/nsqio/go-nsq"
)
var Error *log.Logger
var Warning *log.Logger
func init() {
log.SetFlags(log.Lmicroseconds | log.Lshortfile)
logFile, err := os.Create("main.log")
if err != nil {
log.Printf("Error creating log file: %v", err)
os.Exit(1)
}
logger := io.MultiWriter(os.Stdout, logFile)
log.SetOutput(logger)
Error = log.New(io.MultiWriter(logFile, os.Stderr, os.Stdout),
fmt.Sprintf("%sERROR:%s ", "\033[0;101m", "\033[0m"),
log.Lmicroseconds|log.Lshortfile)
Warning = log.New(io.MultiWriter(logFile, os.Stdout),
fmt.Sprintf("%sWarning:%s ", "\033[0;93m", "\033[0m"),
log.Lmicroseconds|log.Lshortfile)
}
const DOWNLOAD_WORKERS = 50
var whitelistedAchievements = map[string]bool{
"15": true,
"958": true,
"1276": true,
"2088": true,
"2151": true,
"5466": true,
"5759": true,
"6470": true,
"6763": true,
"7392": true,
"7393": true,
"7394": true,
"7958": true,
"8939": true,
"8992": true,
"9048": true,
"94103": true,
"10059": true,
"10079": true,
"10278": true,
"10657": true,
"10672": true,
"10684": true,
"10688": true,
"10689": true,
"10692": true,
"10693": true,
"10698": true,
"10790": true,
"10875": true,
"11124": true,
"11126": true,
"11127": true,
"11128": true,
"11157": true,
"11164": true,
"11188": true,
"11189": true,
"11190": true,
"11446": true,
"11473": true,
"11610": true,
"11674": true,
"11992": true,
"11993": true,
"11994": true,
"11995": true,
"11996": true,
"11997": true,
"11998": true,
"11999": true,
"12000": true,
"12001": true,
"12026": true,
"12074": true,
"12445": true,
"12447": true,
"12448": true,
}
type MsgHandler struct{}
func (*MsgHandler) HandleMessage(message *nsq.Message) error {
log.Printf("Received message '%s' with %d attempts", message.Body, message.Attempts)
data := NSQMessage{}
err := json.Unmarshal(message.Body, &data)
if err != nil {
Error.Printf("Error unmarshalling message: %v", err)
return err
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
go func() {
ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
for {
select {
case <-ticker.C:
message.Touch()
case <-ctx.Done():
return
}
}
}()
_, ok := whitelistedAchievements[data.ID]
if !ok {
Warning.Printf("Received message for non-whitelisted achievement %s", data.ID)
return nil
}
_, err = db.writeConn.Exec("INSERT OR IGNORE INTO achievements (name, id, date, completed) VALUES (?, ?, ?, ?)",
data.Name, data.ID, data.Date, data.Completed)
if err != nil {
Error.Printf("Error inserting into database: %v", err)
return err
}
message.Finish()
return nil
}
var db DB
func main() {
config := nsq.NewConfig()
config.MaxAttempts = 5
config.MaxInFlight = DOWNLOAD_WORKERS
config.MsgTimeout = 10 * time.Second
db = DB{
path: "../data/db.db",
}
err := db.Open()
if err != nil {
Error.Printf("Error opening database: %v", err)
return
}
defer db.Close()
consumer, err := nsq.NewConsumer("wowspy", "achievement", config)
if err != nil {
Error.Printf("Error creating consumer: %v", err)
return
}
for i := 0; i < DOWNLOAD_WORKERS; i++ {
consumer.AddHandler(&MsgHandler{})
}
err = consumer.ConnectToNSQD("nsq.site.quack-lab.dev:41505")
if err != nil {
Error.Printf("Error connecting to nsqlookupd: %v", err)
return
}
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
<-sigChan
log.Println("Received signal to terminate. Initiating graceful shutdown...")
consumer.Stop()
<-consumer.StopChan
log.Println("Graceful shutdown completed.")
}

10
service/nsq/types.go Normal file
View File

@@ -0,0 +1,10 @@
package main
type (
NSQMessage struct {
Name string `json:"name"`
ID string `json:"id"`
Date string `json:"date"`
Completed bool `json:"completed"`
}
)

View File

@@ -0,0 +1,9 @@
with unique_pairs as (
select distinct a1.name as character1,
a2.name as character2
from achievements a1
join achievements a2 on a1.id = a2.id
and a1.name < a2.name
)
select *
from unique_pairs