11 Commits

10 changed files with 727 additions and 114 deletions

9
go.mod
View File

@@ -13,7 +13,6 @@ require (
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/hexops/valast v1.5.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
@@ -21,10 +20,8 @@ require (
github.com/mattn/go-sqlite3 v1.14.22 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.0 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
golang.org/x/mod v0.21.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/text v0.22.0 // indirect
@@ -33,4 +30,8 @@ require (
mvdan.cc/gofumpt v0.4.0 // indirect
)
require gorm.io/driver/sqlite v1.6.0
require (
github.com/google/go-cmp v0.6.0
github.com/tidwall/gjson v1.18.0
gorm.io/driver/sqlite v1.6.0
)

3
go.sum
View File

@@ -36,15 +36,12 @@ github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0t
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=

335
isolate_test.go Normal file
View File

@@ -0,0 +1,335 @@
package main
import (
"os"
"path/filepath"
"testing"
"cook/utils"
logger "git.site.quack-lab.dev/dave/cylogger"
"github.com/stretchr/testify/assert"
)
func TestIsolateCommandsSequentialExecution(t *testing.T) {
// Create a temporary directory for testing
tmpDir, err := os.MkdirTemp("", "isolate-sequential-test")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Create test file content
testContent := `BEGIN
block1 content with value 42
END
Some other content
BEGIN
block2 content with value 100
END
More content
BEGIN
block3 content with value 200
END`
testFile := filepath.Join(tmpDir, "test.txt")
err = os.WriteFile(testFile, []byte(testContent), 0644)
if err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
// Change to temp directory
origDir, _ := os.Getwd()
defer os.Chdir(origDir)
os.Chdir(tmpDir)
// Create isolate commands that work sequentially on the same block
// First command: 42 -> 84
// Second command: 84 -> 168 (works on result of first command)
// Third command: 168 -> 336 (works on result of second command)
commands := []utils.ModifyCommand{
{
Name: "MultiplyFirst",
Regex: `BEGIN\n(?P<block>.*?value 42.*?)\nEND`,
Lua: `replacement = "BEGIN\n" .. string.gsub(block, "42", "84") .. "\nEND"; return true`,
Files: []string{"test.txt"},
Isolate: true,
},
{
Name: "MultiplySecond",
Regex: `BEGIN\nblock1 content with value (?P<value>!num)\nEND`,
Lua: `value = "168"; return true`,
Files: []string{"test.txt"},
Isolate: true,
},
{
Name: "MultiplyThird",
Regex: `BEGIN\nblock1 content with value (?P<value>!num)\nEND`,
Lua: `value = "336"; return true`,
Files: []string{"test.txt"},
Isolate: true,
},
}
// Associate files with commands
files := []string{"test.txt"}
associations, err := utils.AssociateFilesWithCommands(files, commands)
if err != nil {
t.Fatalf("Failed to associate files with commands: %v", err)
}
// Verify that all three isolate commands are associated
association := associations["test.txt"]
assert.Len(t, association.IsolateCommands, 3, "Expected 3 isolate commands to be associated")
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
// Run the isolate commands
result, err := RunIsolateCommands(association, "test.txt", testContent)
if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err)
}
// Verify that all isolate commands were applied sequentially
// First command: 42 -> 84
// Second command: 84 -> 168 (works on result of first)
// Third command: 168 -> 336 (works on result of second)
assert.Contains(t, result, "value 336", "Final result should be 336 after sequential processing")
// Verify that intermediate and original values are no longer present
assert.NotContains(t, result, "value 42", "Original value 42 should be replaced")
assert.NotContains(t, result, "value 84", "Intermediate value 84 should be replaced")
assert.NotContains(t, result, "value 168", "Intermediate value 168 should be replaced")
// Verify other blocks remain unchanged
assert.Contains(t, result, "value 100", "Second block should remain unchanged")
assert.Contains(t, result, "value 200", "Third block should remain unchanged")
t.Logf("Original content:\n%s\n", testContent)
t.Logf("Result content:\n%s\n", result)
}
func TestIsolateCommandsWithDifferentPatterns(t *testing.T) {
// Create a temporary directory for testing
tmpDir, err := os.MkdirTemp("", "isolate-different-patterns-test")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Create test file content with distinct patterns
testContent := `SECTION1
value = 10
END_SECTION1
SECTION2
value = 20
END_SECTION2`
testFile := filepath.Join(tmpDir, "test.txt")
err = os.WriteFile(testFile, []byte(testContent), 0644)
if err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
// Change to temp directory
origDir, _ := os.Getwd()
defer os.Chdir(origDir)
os.Chdir(tmpDir)
// Create isolate commands with different patterns on the same content
commands := []utils.ModifyCommand{
{
Name: "UpdateSection1",
Regex: `SECTION1.*?value = (?P<value>!num).*?END_SECTION1`,
Lua: `value = "100"; return true`,
Files: []string{"test.txt"},
Isolate: true,
},
{
Name: "UpdateSection2",
Regex: `SECTION2.*?value = (?P<value>!num).*?END_SECTION2`,
Lua: `value = "200"; return true`,
Files: []string{"test.txt"},
Isolate: true,
},
}
// Associate files with commands
files := []string{"test.txt"}
associations, err := utils.AssociateFilesWithCommands(files, commands)
if err != nil {
t.Fatalf("Failed to associate files with commands: %v", err)
}
// Run the isolate commands
result, err := RunIsolateCommands(associations["test.txt"], "test.txt", testContent)
if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err)
}
// Verify that both isolate commands were applied
assert.Contains(t, result, "value = 100", "Section1 should be updated")
assert.Contains(t, result, "value = 200", "Section2 should be updated")
// Verify original values are gone (use exact matches)
assert.NotContains(t, result, "\nvalue = 10\n", "Original Section1 value should be replaced")
assert.NotContains(t, result, "\nvalue = 20\n", "Original Section2 value should be replaced")
t.Logf("Original content:\n%s\n", testContent)
t.Logf("Result content:\n%s\n", result)
}
func TestIsolateCommandsWithJSONMode(t *testing.T) {
// Create a temporary directory for testing
tmpDir, err := os.MkdirTemp("", "isolate-json-test")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Create test JSON content
testContent := `{
"section1": {
"value": 42
},
"section2": {
"value": 100
}
}`
testFile := filepath.Join(tmpDir, "test.json")
err = os.WriteFile(testFile, []byte(testContent), 0644)
if err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
// Change to temp directory
origDir, _ := os.Getwd()
defer os.Chdir(origDir)
os.Chdir(tmpDir)
// Create isolate commands with JSON mode
commands := []utils.ModifyCommand{
{
Name: "UpdateJSONFirst",
JSON: true,
Lua: `data.section1.value = data.section1.value * 2; return true`,
Files: []string{"test.json"},
Isolate: true,
},
{
Name: "UpdateJSONSecond",
JSON: true,
Lua: `data.section2.value = data.section2.value * 3; return true`,
Files: []string{"test.json"},
Isolate: true,
},
}
// Associate files with commands
files := []string{"test.json"}
associations, err := utils.AssociateFilesWithCommands(files, commands)
if err != nil {
t.Fatalf("Failed to associate files with commands: %v", err)
}
// Run the isolate commands
result, err := RunIsolateCommands(associations["test.json"], "test.json", testContent)
if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err)
}
// Verify that both JSON isolate commands were applied
assert.Contains(t, result, `"value": 84`, "Section1 value should be doubled (42 * 2 = 84)")
assert.Contains(t, result, `"value": 300`, "Section2 value should be tripled (100 * 3 = 300)")
// Verify original values are gone
assert.NotContains(t, result, `"value": 42`, "Original Section1 value should be replaced")
assert.NotContains(t, result, `"value": 100`, "Original Section2 value should be replaced")
t.Logf("Original content:\n%s\n", testContent)
t.Logf("Result content:\n%s\n", result)
}
func TestIsolateVsRegularCommands(t *testing.T) {
// Create a temporary directory for testing
tmpDir, err := os.MkdirTemp("", "isolate-regular-test")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
// Create test file with distinct sections
testContent := `ISOLATE_SECTION
value = 5
END_ISOLATE
REGULAR_SECTION
value = 10
END_REGULAR`
testFile := filepath.Join(tmpDir, "test.txt")
err = os.WriteFile(testFile, []byte(testContent), 0644)
if err != nil {
t.Fatalf("Failed to write test file: %v", err)
}
// Change to temp directory
origDir, _ := os.Getwd()
defer os.Chdir(origDir)
os.Chdir(tmpDir)
// Create both isolate and regular commands
commands := []utils.ModifyCommand{
{
Name: "IsolateMultiply",
Regex: `ISOLATE_SECTION.*?value = (?P<value>!num).*?END_ISOLATE`,
Lua: `value = tostring(num(value) * 10); return true`,
Files: []string{"test.txt"},
Isolate: true,
},
{
Name: "RegularMultiply",
Regex: `value = (?P<value>!num)`,
Lua: `value = tostring(num(value) + 100); return true`,
Files: []string{"test.txt"},
},
}
// Associate files with commands
files := []string{"test.txt"}
associations, err := utils.AssociateFilesWithCommands(files, commands)
if err != nil {
t.Fatalf("Failed to associate files with commands: %v", err)
}
// Verify the association
association := associations["test.txt"]
assert.Len(t, association.IsolateCommands, 1, "Expected 1 isolate command")
assert.Len(t, association.Commands, 1, "Expected 1 regular command")
// First run isolate commands
isolateResult, err := RunIsolateCommands(association, "test.txt", testContent)
if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err)
}
// Verify isolate command result
assert.Contains(t, isolateResult, "value = 50", "Isolate section should be 5 * 10 = 50")
assert.Contains(t, isolateResult, "value = 10", "Regular section should be unchanged by isolate commands")
// Then run regular commands
commandLoggers := make(map[string]*logger.Logger)
finalResult, err := RunOtherCommands("test.txt", isolateResult, association, commandLoggers)
if err != nil && err != NothingToDo {
t.Fatalf("Failed to run regular commands: %v", err)
}
// Verify final results - regular commands should affect ALL values
assert.Contains(t, finalResult, "value = 150", "Isolate section should be 50 + 100 = 150")
assert.Contains(t, finalResult, "value = 110", "Regular section should be 10 + 100 = 110")
t.Logf("Original content:\n%s\n", testContent)
t.Logf("After isolate commands:\n%s\n", isolateResult)
t.Logf("Final result:\n%s\n", finalResult)
}

54
main.go
View File

@@ -82,7 +82,7 @@ func main() {
}
mainLogger.Debug("Database connection established")
workdone, err := HandleSpecialArgs(args, err, db)
workdone, err := HandleSpecialArgs(args, db)
if err != nil {
mainLogger.Error("Failed to handle special args: %v", err)
return
@@ -366,28 +366,34 @@ func main() {
}
}
func HandleSpecialArgs(args []string, err error, db utils.DB) (bool, error) {
func HandleSpecialArgs(args []string, db utils.DB) (bool, error) {
handleSpecialArgsLogger := logger.Default.WithPrefix("HandleSpecialArgs")
handleSpecialArgsLogger.Debug("Handling special arguments: %v", args)
if len(args) == 0 {
handleSpecialArgsLogger.Warning("No arguments provided to HandleSpecialArgs")
return false, nil
}
switch args[0] {
case "reset":
handleSpecialArgsLogger.Info("Resetting all files")
err = utils.ResetAllFiles(db)
handleSpecialArgsLogger.Info("Resetting all files to their original state from database")
err := utils.ResetAllFiles(db)
if err != nil {
handleSpecialArgsLogger.Error("Failed to reset all files: %v", err)
return true, err
}
handleSpecialArgsLogger.Info("All files reset")
handleSpecialArgsLogger.Info("Successfully reset all files to original state")
return true, nil
case "dump":
handleSpecialArgsLogger.Info("Dumping all files from database")
err = db.RemoveAllFiles()
handleSpecialArgsLogger.Info("Dumping all files from database (clearing snapshots)")
err := db.RemoveAllFiles()
if err != nil {
handleSpecialArgsLogger.Error("Failed to remove all files from database: %v", err)
return true, err
}
handleSpecialArgsLogger.Info("All files removed from database")
handleSpecialArgsLogger.Info("Successfully cleared all file snapshots from database")
return true, nil
default:
handleSpecialArgsLogger.Debug("Unknown special argument: %q", args[0])
}
handleSpecialArgsLogger.Debug("No special arguments handled, returning false")
return false, nil
@@ -636,11 +642,13 @@ func RunIsolateCommands(association utils.FileCommandAssociation, file string, f
runIsolateCommandsLogger.Trace("File data before isolate modifications: %s", utils.LimitString(fileDataStr, 200))
anythingDone := false
currentFileData := fileDataStr
for _, isolateCommand := range association.IsolateCommands {
// Check if this isolate command should use JSON mode
if isolateCommand.JSON || *utils.JSON {
runIsolateCommandsLogger.Debug("Begin processing file with JSON isolate command %q", isolateCommand.Name)
modifications, err := processor.ProcessJSON(fileDataStr, isolateCommand, file)
modifications, err := processor.ProcessJSON(currentFileData, isolateCommand, file)
if err != nil {
runIsolateCommandsLogger.Error("Failed to process file with JSON isolate command %q: %v", isolateCommand.Name, err)
continue
@@ -655,15 +663,21 @@ func RunIsolateCommands(association utils.FileCommandAssociation, file string, f
runIsolateCommandsLogger.Debug("Executing %d JSON isolate modifications for file", len(modifications))
runIsolateCommandsLogger.Trace("JSON isolate modifications: %v", modifications)
var count int
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
runIsolateCommandsLogger.Trace("File data after JSON isolate modifications: %s", utils.LimitString(fileDataStr, 200))
currentFileData, count = utils.ExecuteModifications(modifications, currentFileData)
runIsolateCommandsLogger.Trace("File data after JSON isolate modifications: %s", utils.LimitString(currentFileData, 200))
atomic.AddInt64(&stats.TotalModifications, int64(count))
cmdCount, ok := stats.ModificationsPerCommand.Load(isolateCommand.Name)
if !ok {
stats.ModificationsPerCommand.Store(isolateCommand.Name, 0)
cmdCount = 0
}
stats.ModificationsPerCommand.Store(isolateCommand.Name, cmdCount.(int)+len(modifications))
runIsolateCommandsLogger.Info("Executed %d JSON isolate modifications for file", count)
} else {
// Regular regex processing for isolate commands
runIsolateCommandsLogger.Debug("Begin processing file with isolate command %q", isolateCommand.Regex)
patterns := isolateCommand.Regexes
if len(patterns) == 0 {
patterns = []string{isolateCommand.Regex}
@@ -671,7 +685,8 @@ func RunIsolateCommands(association utils.FileCommandAssociation, file string, f
for idx, pattern := range patterns {
tmpCmd := isolateCommand
tmpCmd.Regex = pattern
modifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
runIsolateCommandsLogger.Debug("Begin processing file with isolate command %q (pattern %d/%d)", isolateCommand.Name, idx+1, len(patterns))
modifications, err := processor.ProcessRegex(currentFileData, tmpCmd, file)
if err != nil {
runIsolateCommandsLogger.Error("Failed to process file with isolate command %q (pattern %d/%d): %v", isolateCommand.Name, idx+1, len(patterns), err)
continue
@@ -686,11 +701,18 @@ func RunIsolateCommands(association utils.FileCommandAssociation, file string, f
runIsolateCommandsLogger.Debug("Executing %d isolate modifications for file", len(modifications))
runIsolateCommandsLogger.Trace("Isolate modifications: %v", modifications)
var count int
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
runIsolateCommandsLogger.Trace("File data after isolate modifications: %s", utils.LimitString(fileDataStr, 200))
currentFileData, count = utils.ExecuteModifications(modifications, currentFileData)
runIsolateCommandsLogger.Trace("File data after isolate modifications: %s", utils.LimitString(currentFileData, 200))
atomic.AddInt64(&stats.TotalModifications, int64(count))
cmdCount, ok := stats.ModificationsPerCommand.Load(isolateCommand.Name)
if !ok {
stats.ModificationsPerCommand.Store(isolateCommand.Name, 0)
cmdCount = 0
}
stats.ModificationsPerCommand.Store(isolateCommand.Name, cmdCount.(int)+len(modifications))
runIsolateCommandsLogger.Info("Executed %d isolate modifications for file", count)
}
}
@@ -699,5 +721,5 @@ func RunIsolateCommands(association utils.FileCommandAssociation, file string, f
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
return fileDataStr, NothingToDo
}
return fileDataStr, nil
return currentFileData, nil
}

View File

@@ -89,10 +89,11 @@ func ProcessJSON(content string, command utils.ModifyCommand, filename string) (
return commands, fmt.Errorf("failed to convert Lua table back to Go: %v", err)
}
commands, err = applyJSONChanges(content, jsonData, goData)
processJsonLogger.Debug("About to call applyChanges with original data and modified data")
commands, err = applyChanges(content, jsonData, goData)
if err != nil {
processJsonLogger.Error("Failed to apply JSON changes: %v", err)
return commands, fmt.Errorf("failed to apply JSON changes: %v", err)
processJsonLogger.Error("Failed to apply surgical JSON changes: %v", err)
return commands, fmt.Errorf("failed to apply surgical JSON changes: %v", err)
}
processJsonLogger.Debug("Total JSON processing time: %v", time.Since(startTime))
@@ -145,24 +146,23 @@ func applyChanges(content string, originalData, modifiedData interface{}) ([]uti
// Apply removals first (from end to beginning to avoid index shifting)
for _, removalPath := range removals {
actualPath := strings.TrimSuffix(removalPath, "@remove")
index := extractIndexFromRemovalPath(removalPath)
elementIndex := extractIndexFromRemovalPath(actualPath)
arrayPath := getArrayPathFromElementPath(actualPath)
// Get the array element to remove
result := gjson.Get(content, actualPath)
if !result.Exists() {
continue
}
jsonLogger.Debug("Processing removal: path=%s, index=%d, arrayPath=%s", actualPath, elementIndex, arrayPath)
// Find the exact byte range to remove
from, to := findArrayElementRemovalRange(content, arrayPath, elementIndex)
jsonLogger.Debug("Removing bytes %d-%d", from, to)
// Find the exact byte range to remove (including comma/formatting)
startPos, endPos := findArrayElementRemovalRange(content, arrayPath, index)
if startPos >= 0 && endPos > startPos {
commands = append(commands, utils.ReplaceCommand{
From: startPos,
To: endPos,
With: "", // Remove the element
From: from,
To: to,
With: "",
})
}
jsonLogger.Debug("Added removal command: From=%d, To=%d, With=\"\"", from, to)
}
// Apply additions (new fields)
@@ -200,10 +200,11 @@ func applyChanges(content string, originalData, modifiedData interface{}) ([]uti
// Convert the new value to JSON string
newValueStr := convertValueToJSONString(newValue)
// Insert the new field
// Insert the new field with pretty-printed formatting
// Format: ,"fieldName": { ... }
insertText := fmt.Sprintf(`,"%s": %s`, fieldName, newValueStr)
jsonLogger.Debug("Inserting text: %q", insertText)
commands = append(commands, utils.ReplaceCommand{
From: startPos,
@@ -311,10 +312,31 @@ func convertValueToJSONString(value interface{}) string {
return strconv.FormatBool(v)
case nil:
return "null"
case map[string]interface{}:
// Handle maps specially to avoid double-escaping of keys
var pairs []string
for key, val := range v {
// The key might already have escaped quotes from Lua, so we need to be careful
// If the key already contains escaped quotes, we need to unescape them first
keyStr := key
if strings.Contains(key, `\"`) {
// Key already has escaped quotes, use it as-is
keyStr = `"` + key + `"`
} else {
// Normal key, escape quotes
keyStr = `"` + strings.ReplaceAll(key, `"`, `\"`) + `"`
}
valStr := convertValueToJSONString(val)
pairs = append(pairs, keyStr+":"+valStr)
}
return "{" + strings.Join(pairs, ",") + "}"
default:
// For complex types, we need to avoid json.Marshal
// This should not happen if we're doing true surgical edits
return ""
// For other complex types (arrays), we need to use json.Marshal
jsonBytes, err := json.Marshal(v)
if err != nil {
return "null" // Fallback to null if marshaling fails
}
return string(jsonBytes)
}
}

View File

@@ -487,15 +487,6 @@ func EvalRegex(L *lua.LState) int {
evalRegexLogger := processorLogger.WithPrefix("evalRegex")
evalRegexLogger.Debug("Lua evalRegex function called")
defer func() {
if r := recover(); r != nil {
evalRegexLogger.Error("Panic in EvalRegex: %v", r)
// Push empty table on panic
emptyTable := L.NewTable()
L.Push(emptyTable)
}
}()
pattern := L.ToString(1)
input := L.ToString(2)
@@ -505,14 +496,19 @@ func EvalRegex(L *lua.LState) int {
matches := re.FindStringSubmatch(input)
evalRegexLogger.Debug("Go regex matches: %v (count: %d)", matches, len(matches))
evalRegexLogger.Debug("Matches is nil: %t", matches == nil)
if len(matches) > 0 {
matchesTable := L.NewTable()
for i, match := range matches {
matchesTable.RawSetInt(i, lua.LString(match))
matchesTable.RawSetString(fmt.Sprintf("%d", i), lua.LString(match))
evalRegexLogger.Debug("Set table[%d] = %q", i, match)
}
L.Push(matchesTable)
} else {
L.Push(lua.LNil)
}
evalRegexLogger.Debug("Pushed matches table to Lua stack")
return 1

View File

@@ -21,7 +21,7 @@ func TestEvalRegex_CaptureGroupsReturned(t *testing.T) {
result := processor.EvalRegex(L)
assert.Equal(t, 0, result, "Expected return value to be 0")
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
out := L.Get(-1)
tbl, ok := out.(*lua.LTable)
@@ -35,26 +35,19 @@ func TestEvalRegex_CaptureGroupsReturned(t *testing.T) {
}
}
// Happy Path: Function returns an empty Lua table when regex pattern does not match input string.
func TestEvalRegex_NoMatchReturnsEmptyTable(t *testing.T) {
// Happy Path: Function returns nil when regex pattern does not match input string.
func TestEvalRegex_NoMatchReturnsNil(t *testing.T) {
L := lua.NewState()
defer L.Close()
L.Push(lua.LString(`(foo)(bar)`))
L.Push(lua.LString("no-match-here"))
result := processor.EvalRegex(L)
assert.Equal(t, 0, result)
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
out := L.Get(-1)
tbl, ok := out.(*lua.LTable)
if !ok {
t.Fatalf("Expected Lua table, got %T", out)
}
count := 0
tbl.ForEach(func(k, v lua.LValue) {
count++
})
assert.Zero(t, count, "Expected no items in the table for non-matching input")
// Should be nil when no matches found
assert.Equal(t, lua.LNil, out, "Expected nil when no matches found")
}
// Happy Path: Function handles patterns with no capture groups by returning the full match in the Lua table.
@@ -67,7 +60,7 @@ func TestEvalRegex_NoCaptureGroups(t *testing.T) {
L.Push(lua.LString(input))
result := processor.EvalRegex(L)
assert.Equal(t, 0, result)
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
out := L.Get(-1)
tbl, ok := out.(*lua.LTable)
@@ -84,7 +77,7 @@ func TestEvalRegex_NoCaptureGroups(t *testing.T) {
assert.Equal(t, 1, count)
}
// Edge Case: Function panics or errors when given an invalid regex pattern.
// Edge Case: Function handles invalid regex pattern by letting regexp.MustCompile panic (which is expected behavior)
func TestEvalRegex_InvalidPattern(t *testing.T) {
L := lua.NewState()
defer L.Close()
@@ -92,15 +85,13 @@ func TestEvalRegex_InvalidPattern(t *testing.T) {
L.Push(lua.LString(pattern))
L.Push(lua.LString("someinput"))
defer func() {
if r := recover(); r == nil {
t.Error("Expected panic for invalid regex pattern, but did not panic")
}
}()
// This should panic due to invalid regex pattern
assert.Panics(t, func() {
processor.EvalRegex(L)
}, "Expected panic for invalid regex pattern")
}
// Edge Case: Function returns an empty Lua table when input string is empty.
// Edge Case: Function returns nil when input string is empty and pattern doesn't match.
func TestEvalRegex_EmptyInputString(t *testing.T) {
L := lua.NewState()
defer L.Close()
@@ -108,19 +99,11 @@ func TestEvalRegex_EmptyInputString(t *testing.T) {
L.Push(lua.LString(""))
result := processor.EvalRegex(L)
assert.Equal(t, 0, result)
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
out := L.Get(-1)
tbl, ok := out.(*lua.LTable)
if !ok {
t.Fatalf("Expected Lua table, got %T", out)
}
// Should be empty
count := 0
tbl.ForEach(func(k, v lua.LValue) {
count++
})
assert.Zero(t, count, "Expected empty table when input is empty")
// Should be nil when no matches found
assert.Equal(t, lua.LNil, out, "Expected nil when input is empty and pattern doesn't match")
}
// Edge Case: Function handles nil or missing arguments gracefully without causing a runtime panic.
@@ -138,7 +121,7 @@ func TestEvalRegex_MissingArguments(t *testing.T) {
}
func TestEvalComplexRegex(t *testing.T) {
// 23:47:35.567068 processor.go:369 [g:22 ] [LUA] Pistol_Round ^((Bulk_)?(Pistol|Rifle).*?Round.*?)$
// Test complex regex pattern with multiple capture groups
L := lua.NewState()
defer L.Close()
pattern := `^((Bulk_)?(Pistol|Rifle).*?Round.*?)$`
@@ -153,10 +136,13 @@ func TestEvalComplexRegex(t *testing.T) {
if !ok {
t.Fatalf("Expected Lua table, got %T", out)
}
count := 0
// Pattern should match: ["Pistol_Round", "Pistol_Round", "", "Pistol"]
// This creates 4 elements in the matches array, not 1
expectedCount := 4
actualCount := 0
tbl.ForEach(func(k, v lua.LValue) {
fmt.Println(k, v)
count++
actualCount++
})
assert.Equal(t, 1, count)
assert.Equal(t, expectedCount, actualCount, "Expected %d matches for pattern %q with input %q", expectedCount, pattern, input)
}

View File

@@ -3,6 +3,8 @@ package processor
import (
"cook/utils"
"testing"
"github.com/google/go-cmp/cmp"
)
func TestSurgicalJSONEditing(t *testing.T) {
@@ -92,6 +94,11 @@ modified = true
result = result[:cmd.From] + cmd.With + result[cmd.To:]
}
diff := cmp.Diff(result, tt.expected)
if diff != "" {
t.Errorf("Differences:\n%s", diff)
}
// Check the actual result matches expected
if result != tt.expected {
t.Errorf("Expected:\n%s\n\nGot:\n%s", tt.expected, result)
@@ -178,6 +185,11 @@ modified = true
result = result[:cmd.From] + cmd.With + result[cmd.To:]
}
diff := cmp.Diff(result, expected)
if diff != "" {
t.Errorf("Differences:\n%s", diff)
}
// Check that the result matches expected (preserves formatting and changes weight)
if result != expected {
t.Errorf("Expected:\n%s\n\nGot:\n%s", expected, result)
@@ -500,6 +512,11 @@ func TestSurgicalJSONPreservesFormatting2(t *testing.T) {
result = result[:cmd.From] + cmd.With + result[cmd.To:]
}
diff := cmp.Diff(result, expected)
if diff != "" {
t.Errorf("Differences:\n%s", diff)
}
// Check that the result matches expected (preserves formatting and changes weight)
if result != expected {
t.Errorf("Expected:\n%s\n\nGot:\n%s", expected, result)
@@ -592,8 +609,239 @@ func TestRetardedJSONEditing(t *testing.T) {
result = result[:cmd.From] + cmd.With + result[cmd.To:]
}
diff := cmp.Diff(result, expected)
if diff != "" {
t.Errorf("Differences:\n%s", diff)
}
// Check that the weight was changed
if result != expected {
t.Errorf("Expected:\n%s\nGot:\n%s", expected, result)
}
}
func TestRetardedJSONEditing2(t *testing.T) {
original := `
{
"Rows": [
{
"Name": "Deep_Mining_Drill_Biofuel",
"Meshable": {
"RowName": "Mesh_Deep_Mining_Drill_Biofuel"
},
"Itemable": {
"RowName": "Item_Deep_Mining_Drill_Biofuel"
},
"Interactable": {
"RowName": "Deployable"
},
"Focusable": {
"RowName": "Focusable_1H"
},
"Highlightable": {
"RowName": "Generic"
},
"Actionable": {
"RowName": "Deployable"
},
"Usable": {
"RowName": "Place"
},
"Deployable": {
"RowName": "Deep_Mining_Drill_Biofuel"
},
"Durable": {
"RowName": "Deployable_750"
},
"Inventory": {
"RowName": "Deep_Mining_Drill_Biofuel"
},
"Decayable": {
"RowName": "Decay_MetaItem"
},
"Generator": {
"RowName": "Deep_Mining_Biofuel_Drill"
},
"Resource": {
"RowName": "Simple_Internal_Flow_Only"
},
"Manual_Tags": {
"GameplayTags": [
{
"TagName": "Item.Machine"
}
]
},
"Generated_Tags": {
"GameplayTags": [
{
"TagName": "Item.Machine"
},
{
"TagName": "Traits.Meshable"
},
{
"TagName": "Traits.Itemable"
},
{
"TagName": "Traits.Interactable"
},
{
"TagName": "Traits.Highlightable"
},
{
"TagName": "Traits.Actionable"
},
{
"TagName": "Traits.Usable"
},
{
"TagName": "Traits.Deployable"
},
{
"TagName": "Traits.Durable"
},
{
"TagName": "Traits.Inventory"
}
],
"ParentTags": []
}
}
]
}
`
expected := `
{
"Rows": [
{
"Name": "Deep_Mining_Drill_Biofuel",
"Meshable": {
"RowName": "Mesh_Deep_Mining_Drill_Biofuel"
},
"Itemable": {
"RowName": "Item_Deep_Mining_Drill_Biofuel"
},
"Interactable": {
"RowName": "Deployable"
},
"Focusable": {
"RowName": "Focusable_1H"
},
"Highlightable": {
"RowName": "Generic"
},
"Actionable": {
"RowName": "Deployable"
},
"Usable": {
"RowName": "Place"
},
"Deployable": {
"RowName": "Deep_Mining_Drill_Biofuel"
},
"Durable": {
"RowName": "Deployable_750"
},
"Inventory": {
"RowName": "Deep_Mining_Drill_Biofuel"
},
"Decayable": {
"RowName": "Decay_MetaItem"
},
"Generator": {
"RowName": "Deep_Mining_Biofuel_Drill"
},
"Resource": {
"RowName": "Simple_Internal_Flow_Only"
},
"Manual_Tags": {
"GameplayTags": [
{
"TagName": "Item.Machine"
}
]
},
"Generated_Tags": {
"GameplayTags": [
{
"TagName": "Item.Machine"
},
{
"TagName": "Traits.Meshable"
},
{
"TagName": "Traits.Itemable"
},
{
"TagName": "Traits.Interactable"
},
{
"TagName": "Traits.Highlightable"
},
{
"TagName": "Traits.Actionable"
},
{
"TagName": "Traits.Usable"
},
{
"TagName": "Traits.Deployable"
},
{
"TagName": "Traits.Durable"
},
{
"TagName": "Traits.Inventory"
}
],
"ParentTags": []
}
,"AdditionalStats": {"(Value=\"BaseDeepMiningDrillSpeed_+%\")":4000}}
]
}
`
command := utils.ModifyCommand{
Name: "test",
Lua: `
for i, row in ipairs(data.Rows) do
-- Special case: Deep_Mining_Drill_Biofuel
if string.find(row.Name, "Deep_Mining_Drill_Biofuel") then
print("[DEBUG] Special case: Deep_Mining_Drill_Biofuel")
if not row.AdditionalStats then
print("[DEBUG] Creating AdditionalStats table for Deep_Mining_Drill_Biofuel")
row.AdditionalStats = {}
end
print("[DEBUG] Setting BaseDeepMiningDrillSpeed_+% to 4000")
row.AdditionalStats["(Value=\\\"BaseDeepMiningDrillSpeed_+%\\\")"] = 4000
end
end
`,
}
commands, err := ProcessJSON(original, command, "test.json")
if err != nil {
t.Fatalf("ProcessJSON failed: %v", err)
}
if len(commands) == 0 {
t.Fatal("Expected at least one command")
}
// Apply the commands
result := original
for _, cmd := range commands {
result = result[:cmd.From] + cmd.With + result[cmd.To:]
}
diff := cmp.Diff(result, expected)
if diff != "" {
t.Errorf("Differences:\n%s", diff)
}
if result != expected {
t.Errorf("Expected:\n%s\nGot:\n%s", expected, result)
}
}

View File

@@ -1,6 +1,7 @@
package utils
import (
"errors"
"path/filepath"
"time"
@@ -41,24 +42,25 @@ func GetDB() (DB, error) {
dbFile := filepath.Join("data.sqlite")
getDBLogger.Debug("Opening database file: %q", dbFile)
getDBLogger.Trace("Database configuration: PrepareStmt=true, GORM logger=Silent")
db, err := gorm.Open(sqlite.Open(dbFile), &gorm.Config{
// SkipDefaultTransaction: true,
PrepareStmt: true,
Logger: gormlogger.Default.LogMode(gormlogger.Silent),
})
if err != nil {
getDBLogger.Error("Failed to open database: %v", err)
getDBLogger.Error("Failed to open database file %q: %v", dbFile, err)
return nil, err
}
getDBLogger.Debug("Database opened successfully, running auto migration")
getDBLogger.Debug("Database opened successfully, running auto migration for FileSnapshot model")
if err := db.AutoMigrate(&FileSnapshot{}); err != nil {
getDBLogger.Error("Auto migration failed: %v", err)
getDBLogger.Error("Auto migration failed for FileSnapshot model: %v", err)
return nil, err
}
getDBLogger.Debug("Auto migration completed")
getDBLogger.Info("Database initialized and migrated successfully")
globalDB = &DBWrapper{db: db}
getDBLogger.Debug("Database wrapper initialized")
getDBLogger.Debug("Database wrapper initialized and cached globally")
return globalDB, nil
}
@@ -88,7 +90,7 @@ func (db *DBWrapper) FileExists(filePath string) (bool, error) {
}
func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
saveFileLogger := dbLogger.WithPrefix("SaveFile").WithField("filePath", filePath)
saveFileLogger := dbLogger.WithPrefix("SaveFile").WithField("filePath", filePath).WithField("dataSize", len(fileData))
saveFileLogger.Debug("Attempting to save file to database")
saveFileLogger.Trace("File data length: %d", len(fileData))
@@ -98,7 +100,7 @@ func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
return err
}
if exists {
saveFileLogger.Debug("File already exists, skipping save")
saveFileLogger.Debug("File already exists in database, skipping save to avoid overwriting original snapshot")
return nil
}
saveFileLogger.Debug("Creating new file snapshot in database")
@@ -110,7 +112,7 @@ func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
if err != nil {
saveFileLogger.Error("Failed to create file snapshot: %v", err)
} else {
saveFileLogger.Debug("File saved successfully to database")
saveFileLogger.Info("File successfully saved to database")
}
return err
}
@@ -121,8 +123,11 @@ func (db *DBWrapper) GetFile(filePath string) ([]byte, error) {
var fileSnapshot FileSnapshot
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).First(&fileSnapshot).Error
if err != nil {
// Downgrade not-found to warning to avoid noisy errors during first run
if errors.Is(err, gorm.ErrRecordNotFound) {
getFileLogger.Debug("File not found in database: %v", err)
} else {
getFileLogger.Warning("Failed to get file from database: %v", err)
}
return nil, err
}
getFileLogger.Debug("File found in database")

View File

@@ -16,6 +16,7 @@ var (
)
func init() {
flagsLogger.Debug("Initializing flags")
flagsLogger.Trace("ParallelFiles initial value: %d, Filter initial value: %q, JSON initial value: %t", *ParallelFiles, *Filter, *JSON)
flagsLogger.Debug("Initializing command-line flags")
flagsLogger.Trace("Initial flag values - ParallelFiles: %d, Filter: %q, JSON: %t", *ParallelFiles, *Filter, *JSON)
flagsLogger.Debug("Flag definitions: -P (parallel files), -f (filter), -json (JSON mode)")
}