Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a18573c9f8 | |||
| eacc92ce4b | |||
| 3bcc958dda | |||
| 11f0bbee53 | |||
| c145ad0900 | |||
| e02c1f018f | |||
| 07fea6238f | |||
| 5f1fdfa6c1 | |||
| 4fb25d0463 | |||
| bf23894188 | |||
| aec0f9f171 | |||
| 83fed68432 | |||
| 4311533445 | |||
| ce28b948d0 | |||
| efc602e0ba | |||
| 917063db0c | |||
| 3e552428a5 | |||
| 50455c491d | |||
| 12ec399b09 | |||
| 5a49998c2c | |||
| 590f19603e | |||
| ee8c4b9aa5 | |||
| e8d6613ac8 | |||
| 91ad9006fa | |||
| 60ba3ad417 | |||
| b74e4724d4 | |||
| 30246fd626 |
82
README.md
82
README.md
@@ -16,6 +16,7 @@ A Go-based tool for modifying XML, JSON, and text documents using XPath/JSONPath
|
|||||||
- String manipulations
|
- String manipulations
|
||||||
- Date conversions
|
- Date conversions
|
||||||
- Structural changes
|
- Structural changes
|
||||||
|
- CSV/TSV parsing with comments and headers
|
||||||
- Whole ass Lua environment
|
- Whole ass Lua environment
|
||||||
- **Error Handling**: Comprehensive error detection for:
|
- **Error Handling**: Comprehensive error detection for:
|
||||||
- Invalid XML/JSON
|
- Invalid XML/JSON
|
||||||
@@ -101,6 +102,87 @@ chef -xml "//item" "if tonumber(v.stock) > 0 then v.price = v.price * 0.8 end" i
|
|||||||
<item stock="5" price="8.00"/>
|
<item stock="5" price="8.00"/>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### 6. CSV/TSV Processing
|
||||||
|
The Lua environment includes CSV parsing functions that support comments, headers, and custom delimiters.
|
||||||
|
|
||||||
|
```lua
|
||||||
|
-- Basic CSV parsing
|
||||||
|
local rows = fromCSV(csvText)
|
||||||
|
|
||||||
|
-- With options
|
||||||
|
local rows = fromCSV(csvText, {
|
||||||
|
delimiter = "\t", -- Tab delimiter for TSV (default: ",")
|
||||||
|
hasHeaders = true, -- First row is headers (default: false)
|
||||||
|
hasComments = true -- Filter lines starting with # (default: false)
|
||||||
|
})
|
||||||
|
|
||||||
|
-- Access by index
|
||||||
|
local value = rows[1][2]
|
||||||
|
|
||||||
|
-- Access by header name (when hasHeaders = true)
|
||||||
|
local value = rows[1].Name
|
||||||
|
|
||||||
|
-- Convert back to CSV
|
||||||
|
local csv = toCSV(rows, "\t") -- Optional delimiter parameter
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example with commented TSV file:**
|
||||||
|
```lua
|
||||||
|
-- Input file:
|
||||||
|
-- #mercenary_profiles
|
||||||
|
-- Id Name Value
|
||||||
|
-- 1 Test 100
|
||||||
|
-- 2 Test2 200
|
||||||
|
|
||||||
|
local csv = readFile("mercenaries.tsv")
|
||||||
|
local rows = fromCSV(csv, {
|
||||||
|
delimiter = "\t",
|
||||||
|
hasHeaders = true,
|
||||||
|
hasComments = true
|
||||||
|
})
|
||||||
|
|
||||||
|
-- Access data
|
||||||
|
rows[1].Name -- "Test"
|
||||||
|
rows[2].Value -- "200"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Lua Helper Functions
|
||||||
|
|
||||||
|
The Lua environment includes many helper functions:
|
||||||
|
|
||||||
|
### Math Functions
|
||||||
|
- `min(a, b)`, `max(a, b)` - Min/max of two numbers
|
||||||
|
- `round(x, n)` - Round to n decimal places
|
||||||
|
- `floor(x)`, `ceil(x)` - Floor/ceiling functions
|
||||||
|
|
||||||
|
### String Functions
|
||||||
|
- `upper(s)`, `lower(s)` - Case conversion
|
||||||
|
- `trim(s)` - Remove leading/trailing whitespace
|
||||||
|
- `format(s, ...)` - String formatting
|
||||||
|
- `strsplit(inputstr, sep)` - Split string by separator
|
||||||
|
|
||||||
|
### CSV Functions
|
||||||
|
- `fromCSV(csv, options)` - Parse CSV/TSV text into table of rows
|
||||||
|
- Options: `delimiter` (default: ","), `hasHeaders` (default: false), `hasComments` (default: false)
|
||||||
|
- `toCSV(rows, delimiter)` - Convert table of rows back to CSV text
|
||||||
|
|
||||||
|
### Conversion Functions
|
||||||
|
- `num(str)` - Convert string to number (returns 0 if invalid)
|
||||||
|
- `str(num)` - Convert number to string
|
||||||
|
- `is_number(str)` - Check if string is numeric
|
||||||
|
|
||||||
|
### Table Functions
|
||||||
|
- `isArray(t)` - Check if table is a sequential array
|
||||||
|
- `dump(table, depth)` - Print table structure recursively
|
||||||
|
|
||||||
|
### HTTP Functions
|
||||||
|
- `fetch(url, options)` - Make HTTP request, returns response table
|
||||||
|
- Options: `method`, `headers`, `body`
|
||||||
|
- Returns: `{status, statusText, ok, body, headers}`
|
||||||
|
|
||||||
|
### Regex Functions
|
||||||
|
- `re(pattern, input)` - Apply regex pattern, returns table with matches
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ func TestGlobExpansion(t *testing.T) {
|
|||||||
for _, pattern := range tc.patterns {
|
for _, pattern := range tc.patterns {
|
||||||
patternMap[pattern] = struct{}{}
|
patternMap[pattern] = struct{}{}
|
||||||
}
|
}
|
||||||
files, err := utils.ExpandGLobs(patternMap)
|
files, err := utils.ExpandGlobs(patternMap)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("ExpandGLobs failed: %v", err)
|
t.Fatalf("ExpandGLobs failed: %v", err)
|
||||||
}
|
}
|
||||||
|
|||||||
4
go.mod
4
go.mod
@@ -12,7 +12,6 @@ require (
|
|||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/BurntSushi/toml v1.5.0 // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/hexops/valast v1.5.0 // indirect
|
github.com/hexops/valast v1.5.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
@@ -22,7 +21,6 @@ require (
|
|||||||
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||||
github.com/spf13/cobra v1.10.1 // indirect
|
|
||||||
github.com/spf13/pflag v1.0.9 // indirect
|
github.com/spf13/pflag v1.0.9 // indirect
|
||||||
github.com/tidwall/match v1.1.1 // indirect
|
github.com/tidwall/match v1.1.1 // indirect
|
||||||
github.com/tidwall/pretty v1.2.0 // indirect
|
github.com/tidwall/pretty v1.2.0 // indirect
|
||||||
@@ -35,7 +33,9 @@ require (
|
|||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/BurntSushi/toml v1.5.0
|
||||||
github.com/google/go-cmp v0.6.0
|
github.com/google/go-cmp v0.6.0
|
||||||
|
github.com/spf13/cobra v1.10.1
|
||||||
github.com/tidwall/gjson v1.18.0
|
github.com/tidwall/gjson v1.18.0
|
||||||
gorm.io/driver/sqlite v1.6.0
|
gorm.io/driver/sqlite v1.6.0
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -84,8 +84,8 @@ END`
|
|||||||
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
|
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
|
||||||
|
|
||||||
// Run the isolate commands
|
// Run the isolate commands
|
||||||
result, err := RunIsolateCommands(association, "test.txt", testContent)
|
result, err := RunIsolateCommands(association, "test.txt", testContent, false)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
t.Fatalf("Failed to run isolate commands: %v", err)
|
t.Fatalf("Failed to run isolate commands: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -162,8 +162,8 @@ END_SECTION2`
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Run the isolate commands
|
// Run the isolate commands
|
||||||
result, err := RunIsolateCommands(associations["test.txt"], "test.txt", testContent)
|
result, err := RunIsolateCommands(associations["test.txt"], "test.txt", testContent, false)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
t.Fatalf("Failed to run isolate commands: %v", err)
|
t.Fatalf("Failed to run isolate commands: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -234,8 +234,8 @@ func TestIsolateCommandsWithJSONMode(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Run the isolate commands
|
// Run the isolate commands
|
||||||
result, err := RunIsolateCommands(associations["test.json"], "test.json", testContent)
|
result, err := RunIsolateCommands(associations["test.json"], "test.json", testContent, false)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
t.Fatalf("Failed to run isolate commands: %v", err)
|
t.Fatalf("Failed to run isolate commands: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -309,8 +309,8 @@ END_REGULAR`
|
|||||||
assert.Len(t, association.Commands, 1, "Expected 1 regular command")
|
assert.Len(t, association.Commands, 1, "Expected 1 regular command")
|
||||||
|
|
||||||
// First run isolate commands
|
// First run isolate commands
|
||||||
isolateResult, err := RunIsolateCommands(association, "test.txt", testContent)
|
isolateResult, err := RunIsolateCommands(association, "test.txt", testContent, false)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
t.Fatalf("Failed to run isolate commands: %v", err)
|
t.Fatalf("Failed to run isolate commands: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -320,8 +320,8 @@ END_REGULAR`
|
|||||||
|
|
||||||
// Then run regular commands
|
// Then run regular commands
|
||||||
commandLoggers := make(map[string]*logger.Logger)
|
commandLoggers := make(map[string]*logger.Logger)
|
||||||
finalResult, err := RunOtherCommands("test.txt", isolateResult, association, commandLoggers)
|
finalResult, err := RunOtherCommands("test.txt", isolateResult, association, commandLoggers, false)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
t.Fatalf("Failed to run regular commands: %v", err)
|
t.Fatalf("Failed to run regular commands: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -364,12 +364,12 @@ irons_spellbooks:chain_lightning
|
|||||||
// Second command: targets all SpellPowerMultiplier with multiplier *4
|
// Second command: targets all SpellPowerMultiplier with multiplier *4
|
||||||
commands := []utils.ModifyCommand{
|
commands := []utils.ModifyCommand{
|
||||||
{
|
{
|
||||||
Name: "healing",
|
Name: "healing",
|
||||||
Regexes: []string{
|
Regexes: []string{
|
||||||
`irons_spellbooks:chain_creeper[\s\S]*?SpellPowerMultiplier = !num`,
|
`irons_spellbooks:chain_creeper[\s\S]*?SpellPowerMultiplier = !num`,
|
||||||
`irons_spellbooks:chain_lightning[\s\S]*?SpellPowerMultiplier = !num`,
|
`irons_spellbooks:chain_lightning[\s\S]*?SpellPowerMultiplier = !num`,
|
||||||
},
|
},
|
||||||
Lua: `v1 * 4`, // This should multiply by 4
|
Lua: `v1 * 4`, // This should multiply by 4
|
||||||
Files: []string{"irons_spellbooks-server.toml"},
|
Files: []string{"irons_spellbooks-server.toml"},
|
||||||
Reset: true,
|
Reset: true,
|
||||||
Isolate: true,
|
Isolate: true,
|
||||||
@@ -377,7 +377,7 @@ irons_spellbooks:chain_lightning
|
|||||||
{
|
{
|
||||||
Name: "spellpower",
|
Name: "spellpower",
|
||||||
Regex: `SpellPowerMultiplier = !num`,
|
Regex: `SpellPowerMultiplier = !num`,
|
||||||
Lua: `v1 * 4`, // This should multiply by 4 again
|
Lua: `v1 * 4`, // This should multiply by 4 again
|
||||||
Files: []string{"irons_spellbooks-server.toml"},
|
Files: []string{"irons_spellbooks-server.toml"},
|
||||||
Reset: true,
|
Reset: true,
|
||||||
Isolate: true,
|
Isolate: true,
|
||||||
@@ -397,8 +397,8 @@ irons_spellbooks:chain_lightning
|
|||||||
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
|
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
|
||||||
|
|
||||||
// Run the isolate commands
|
// Run the isolate commands
|
||||||
result, err := RunIsolateCommands(association, "irons_spellbooks-server.toml", testContent)
|
result, err := RunIsolateCommands(association, "irons_spellbooks-server.toml", testContent, false)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
t.Fatalf("Failed to run isolate commands: %v", err)
|
t.Fatalf("Failed to run isolate commands: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -414,4 +414,4 @@ irons_spellbooks:chain_lightning
|
|||||||
|
|
||||||
t.Logf("Original content:\n%s\n", testContent)
|
t.Logf("Original content:\n%s\n", testContent)
|
||||||
t.Logf("Result content:\n%s\n", result)
|
t.Logf("Result content:\n%s\n", result)
|
||||||
}
|
}
|
||||||
|
|||||||
25
main.go
25
main.go
@@ -12,8 +12,8 @@ import (
|
|||||||
"cook/processor"
|
"cook/processor"
|
||||||
"cook/utils"
|
"cook/utils"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed example_cook.toml
|
//go:embed example_cook.toml
|
||||||
@@ -54,12 +54,16 @@ Features:
|
|||||||
- Parallel file processing
|
- Parallel file processing
|
||||||
- Command filtering and organization`,
|
- Command filtering and organization`,
|
||||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||||
CreateExampleConfig()
|
|
||||||
logger.InitFlag()
|
logger.InitFlag()
|
||||||
mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String())
|
mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String())
|
||||||
mainLogger.Trace("Full argv: %v", os.Args)
|
mainLogger.Trace("Full argv: %v", os.Args)
|
||||||
},
|
},
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
exampleFlag, _ := cmd.Flags().GetBool("example")
|
||||||
|
if exampleFlag {
|
||||||
|
CreateExampleConfig()
|
||||||
|
return
|
||||||
|
}
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
cmd.Usage()
|
cmd.Usage()
|
||||||
return
|
return
|
||||||
@@ -76,6 +80,7 @@ Features:
|
|||||||
rootCmd.Flags().StringP("filter", "f", "", "Filter commands before running them")
|
rootCmd.Flags().StringP("filter", "f", "", "Filter commands before running them")
|
||||||
rootCmd.Flags().Bool("json", false, "Enable JSON mode for processing JSON files")
|
rootCmd.Flags().Bool("json", false, "Enable JSON mode for processing JSON files")
|
||||||
rootCmd.Flags().BoolP("conv", "c", false, "Convert YAML files to TOML format")
|
rootCmd.Flags().BoolP("conv", "c", false, "Convert YAML files to TOML format")
|
||||||
|
rootCmd.Flags().BoolP("example", "e", false, "Generate example_cook.toml and exit")
|
||||||
|
|
||||||
// Set up examples in the help text
|
// Set up examples in the help text
|
||||||
rootCmd.SetUsageTemplate(`Usage:{{if .Runnable}}
|
rootCmd.SetUsageTemplate(`Usage:{{if .Runnable}}
|
||||||
@@ -238,7 +243,7 @@ func runModifier(args []string, cmd *cobra.Command) {
|
|||||||
// Resolve all the files for all the globs
|
// Resolve all the files for all the globs
|
||||||
mainLogger.Info("Found %d unique file patterns", len(globs))
|
mainLogger.Info("Found %d unique file patterns", len(globs))
|
||||||
mainLogger.Debug("Expanding glob patterns to files")
|
mainLogger.Debug("Expanding glob patterns to files")
|
||||||
files, err := utils.ExpandGLobs(globs)
|
files, err := utils.ExpandGlobs(globs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
mainLogger.Error("Failed to expand file patterns: %v", err)
|
mainLogger.Error("Failed to expand file patterns: %v", err)
|
||||||
return
|
return
|
||||||
@@ -335,23 +340,23 @@ func runModifier(args []string, cmd *cobra.Command) {
|
|||||||
isChanged := false
|
isChanged := false
|
||||||
mainLogger.Debug("Running isolate commands for file %q", file)
|
mainLogger.Debug("Running isolate commands for file %q", file)
|
||||||
fileDataStr, err = RunIsolateCommands(association, file, fileDataStr, jsonFlag)
|
fileDataStr, err = RunIsolateCommands(association, file, fileDataStr, jsonFlag)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
mainLogger.Error("Failed to run isolate commands for file %q: %v", file, err)
|
mainLogger.Error("Failed to run isolate commands for file %q: %v", file, err)
|
||||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if err != NothingToDo {
|
if err != ErrNothingToDo {
|
||||||
isChanged = true
|
isChanged = true
|
||||||
}
|
}
|
||||||
|
|
||||||
mainLogger.Debug("Running other commands for file %q", file)
|
mainLogger.Debug("Running other commands for file %q", file)
|
||||||
fileDataStr, err = RunOtherCommands(file, fileDataStr, association, commandLoggers, jsonFlag)
|
fileDataStr, err = RunOtherCommands(file, fileDataStr, association, commandLoggers, jsonFlag)
|
||||||
if err != nil && err != NothingToDo {
|
if err != nil && err != ErrNothingToDo {
|
||||||
mainLogger.Error("Failed to run other commands for file %q: %v", file, err)
|
mainLogger.Error("Failed to run other commands for file %q: %v", file, err)
|
||||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if err != NothingToDo {
|
if err != ErrNothingToDo {
|
||||||
isChanged = true
|
isChanged = true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -469,7 +474,7 @@ func CreateExampleConfig() {
|
|||||||
createExampleConfigLogger.Info("Wrote example_cook.toml")
|
createExampleConfigLogger.Info("Wrote example_cook.toml")
|
||||||
}
|
}
|
||||||
|
|
||||||
var NothingToDo = errors.New("nothing to do")
|
var ErrNothingToDo = errors.New("nothing to do")
|
||||||
|
|
||||||
func RunOtherCommands(file string, fileDataStr string, association utils.FileCommandAssociation, commandLoggers map[string]*logger.Logger, jsonFlag bool) (string, error) {
|
func RunOtherCommands(file string, fileDataStr string, association utils.FileCommandAssociation, commandLoggers map[string]*logger.Logger, jsonFlag bool) (string, error) {
|
||||||
runOtherCommandsLogger := mainLogger.WithPrefix("RunOtherCommands").WithField("file", file)
|
runOtherCommandsLogger := mainLogger.WithPrefix("RunOtherCommands").WithField("file", file)
|
||||||
@@ -560,7 +565,7 @@ func RunOtherCommands(file string, fileDataStr string, association utils.FileCom
|
|||||||
|
|
||||||
if len(modifications) == 0 {
|
if len(modifications) == 0 {
|
||||||
runOtherCommandsLogger.Warning("No modifications found for file")
|
runOtherCommandsLogger.Warning("No modifications found for file")
|
||||||
return fileDataStr, NothingToDo
|
return fileDataStr, ErrNothingToDo
|
||||||
}
|
}
|
||||||
runOtherCommandsLogger.Debug("Executing %d modifications for file", len(modifications))
|
runOtherCommandsLogger.Debug("Executing %d modifications for file", len(modifications))
|
||||||
|
|
||||||
@@ -658,7 +663,7 @@ func RunIsolateCommands(association utils.FileCommandAssociation, file string, f
|
|||||||
}
|
}
|
||||||
if !anythingDone {
|
if !anythingDone {
|
||||||
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
|
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
|
||||||
return fileDataStr, NothingToDo
|
return fileDataStr, ErrNothingToDo
|
||||||
}
|
}
|
||||||
return currentFileData, nil
|
return currentFileData, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
// Package processor provides JSON processing and Lua script execution capabilities
|
||||||
|
// for data transformation and manipulation.
|
||||||
package processor
|
package processor
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@@ -19,9 +21,9 @@ var jsonLogger = logger.Default.WithPrefix("processor/json")
|
|||||||
|
|
||||||
// ProcessJSON applies Lua processing to JSON content
|
// ProcessJSON applies Lua processing to JSON content
|
||||||
func ProcessJSON(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
func ProcessJSON(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||||
processJsonLogger := jsonLogger.WithPrefix("ProcessJSON").WithField("commandName", command.Name).WithField("file", filename)
|
processJSONLogger := jsonLogger.WithPrefix("ProcessJSON").WithField("commandName", command.Name).WithField("file", filename)
|
||||||
processJsonLogger.Debug("Starting JSON processing for file")
|
processJSONLogger.Debug("Starting JSON processing for file")
|
||||||
processJsonLogger.Trace("Initial file content length: %d", len(content))
|
processJSONLogger.Trace("Initial file content length: %d", len(content))
|
||||||
|
|
||||||
var commands []utils.ReplaceCommand
|
var commands []utils.ReplaceCommand
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
@@ -30,15 +32,15 @@ func ProcessJSON(content string, command utils.ModifyCommand, filename string) (
|
|||||||
var jsonData interface{}
|
var jsonData interface{}
|
||||||
err := json.Unmarshal([]byte(content), &jsonData)
|
err := json.Unmarshal([]byte(content), &jsonData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
processJsonLogger.Error("Failed to parse JSON content: %v", err)
|
processJSONLogger.Error("Failed to parse JSON content: %v", err)
|
||||||
return commands, fmt.Errorf("failed to parse JSON: %v", err)
|
return commands, fmt.Errorf("failed to parse JSON: %v", err)
|
||||||
}
|
}
|
||||||
processJsonLogger.Debug("Successfully parsed JSON content")
|
processJSONLogger.Debug("Successfully parsed JSON content")
|
||||||
|
|
||||||
// Create Lua state
|
// Create Lua state
|
||||||
L, err := NewLuaState()
|
L, err := NewLuaState()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
processJsonLogger.Error("Error creating Lua state: %v", err)
|
processJSONLogger.Error("Error creating Lua state: %v", err)
|
||||||
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
||||||
}
|
}
|
||||||
defer L.Close()
|
defer L.Close()
|
||||||
@@ -49,70 +51,58 @@ func ProcessJSON(content string, command utils.ModifyCommand, filename string) (
|
|||||||
// Convert JSON data to Lua table
|
// Convert JSON data to Lua table
|
||||||
luaTable, err := ToLuaTable(L, jsonData)
|
luaTable, err := ToLuaTable(L, jsonData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
processJsonLogger.Error("Failed to convert JSON to Lua table: %v", err)
|
processJSONLogger.Error("Failed to convert JSON to Lua table: %v", err)
|
||||||
return commands, fmt.Errorf("failed to convert JSON to Lua table: %v", err)
|
return commands, fmt.Errorf("failed to convert JSON to Lua table: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the JSON data as a global variable
|
// Set the JSON data as a global variable
|
||||||
L.SetGlobal("data", luaTable)
|
L.SetGlobal("data", luaTable)
|
||||||
processJsonLogger.Debug("Set JSON data as Lua global 'data'")
|
processJSONLogger.Debug("Set JSON data as Lua global 'data'")
|
||||||
|
|
||||||
// Build and execute Lua script for JSON mode
|
// Build and execute Lua script for JSON mode
|
||||||
luaExpr := BuildJSONLuaScript(command.Lua)
|
luaExpr := BuildJSONLuaScript(command.Lua)
|
||||||
processJsonLogger.Debug("Built Lua script from expression: %q", command.Lua)
|
processJSONLogger.Debug("Built Lua script from expression: %q", command.Lua)
|
||||||
processJsonLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
processJSONLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
||||||
|
|
||||||
if err := L.DoString(luaExpr); err != nil {
|
if err := L.DoString(luaExpr); err != nil {
|
||||||
processJsonLogger.Error("Lua script execution failed: %v\nScript: %s", err, utils.LimitString(luaExpr, 200))
|
processJSONLogger.Error("Lua script execution failed: %v\nScript: %s", err, utils.LimitString(luaExpr, 200))
|
||||||
return commands, fmt.Errorf("lua script execution failed: %v", err)
|
return commands, fmt.Errorf("lua script execution failed: %v", err)
|
||||||
}
|
}
|
||||||
processJsonLogger.Debug("Lua script executed successfully")
|
processJSONLogger.Debug("Lua script executed successfully")
|
||||||
|
|
||||||
// Check if modification flag is set
|
// Check if modification flag is set
|
||||||
modifiedVal := L.GetGlobal("modified")
|
modifiedVal := L.GetGlobal("modified")
|
||||||
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
||||||
processJsonLogger.Debug("Skipping - no modifications indicated by Lua script")
|
processJSONLogger.Debug("Skipping - no modifications indicated by Lua script")
|
||||||
return commands, nil
|
return commands, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the modified data from Lua
|
// Get the modified data from Lua
|
||||||
modifiedData := L.GetGlobal("data")
|
modifiedData := L.GetGlobal("data")
|
||||||
if modifiedData.Type() != lua.LTTable {
|
if modifiedData.Type() != lua.LTTable {
|
||||||
processJsonLogger.Error("Expected 'data' to be a table after Lua processing, got %s", modifiedData.Type().String())
|
processJSONLogger.Error("Expected 'data' to be a table after Lua processing, got %s", modifiedData.Type().String())
|
||||||
return commands, fmt.Errorf("expected 'data' to be a table after Lua processing")
|
return commands, fmt.Errorf("expected 'data' to be a table after Lua processing")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert back to Go interface
|
// Convert back to Go interface
|
||||||
goData, err := FromLua(L, modifiedData)
|
goData, err := FromLua(L, modifiedData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
processJsonLogger.Error("Failed to convert Lua table back to Go: %v", err)
|
processJSONLogger.Error("Failed to convert Lua table back to Go: %v", err)
|
||||||
return commands, fmt.Errorf("failed to convert Lua table back to Go: %v", err)
|
return commands, fmt.Errorf("failed to convert Lua table back to Go: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
processJsonLogger.Debug("About to call applyChanges with original data and modified data")
|
processJSONLogger.Debug("About to call applyChanges with original data and modified data")
|
||||||
commands, err = applyChanges(content, jsonData, goData)
|
commands, err = applyChanges(content, jsonData, goData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
processJsonLogger.Error("Failed to apply surgical JSON changes: %v", err)
|
processJSONLogger.Error("Failed to apply surgical JSON changes: %v", err)
|
||||||
return commands, fmt.Errorf("failed to apply surgical JSON changes: %v", err)
|
return commands, fmt.Errorf("failed to apply surgical JSON changes: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
processJsonLogger.Debug("Total JSON processing time: %v", time.Since(startTime))
|
processJSONLogger.Debug("Total JSON processing time: %v", time.Since(startTime))
|
||||||
processJsonLogger.Debug("Generated %d total modifications", len(commands))
|
processJSONLogger.Debug("Generated %d total modifications", len(commands))
|
||||||
return commands, nil
|
return commands, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// applyJSONChanges compares original and modified data and applies changes surgically
|
|
||||||
func applyJSONChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
|
||||||
var commands []utils.ReplaceCommand
|
|
||||||
|
|
||||||
appliedCommands, err := applyChanges(content, originalData, modifiedData)
|
|
||||||
if err == nil && len(appliedCommands) > 0 {
|
|
||||||
return appliedCommands, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return commands, fmt.Errorf("failed to make any changes to the json")
|
|
||||||
}
|
|
||||||
|
|
||||||
// applyChanges attempts to make surgical changes while preserving exact formatting
|
// applyChanges attempts to make surgical changes while preserving exact formatting
|
||||||
func applyChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
func applyChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
||||||
var commands []utils.ReplaceCommand
|
var commands []utils.ReplaceCommand
|
||||||
@@ -199,12 +189,10 @@ func applyChanges(content string, originalData, modifiedData interface{}) ([]uti
|
|||||||
|
|
||||||
// Convert the new value to JSON string
|
// Convert the new value to JSON string
|
||||||
newValueStr := convertValueToJSONString(newValue)
|
newValueStr := convertValueToJSONString(newValue)
|
||||||
|
|
||||||
|
|
||||||
// Insert the new field with pretty-printed formatting
|
// Insert the new field with pretty-printed formatting
|
||||||
// Format: ,"fieldName": { ... }
|
// Format: ,"fieldName": { ... }
|
||||||
insertText := fmt.Sprintf(`,"%s": %s`, fieldName, newValueStr)
|
insertText := fmt.Sprintf(`,"%s": %s`, fieldName, newValueStr)
|
||||||
|
|
||||||
|
|
||||||
commands = append(commands, utils.ReplaceCommand{
|
commands = append(commands, utils.ReplaceCommand{
|
||||||
From: startPos,
|
From: startPos,
|
||||||
@@ -437,8 +425,6 @@ func findDeepChanges(basePath string, original, modified interface{}) map[string
|
|||||||
}
|
}
|
||||||
changes[currentPath] = nil // Mark for removal
|
changes[currentPath] = nil // Mark for removal
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// Elements added - more complex, skip for now
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Same length - check individual elements for value changes
|
// Same length - check individual elements for value changes
|
||||||
|
|||||||
43
processor/luahelper-test-regress.lua
Normal file
43
processor/luahelper-test-regress.lua
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
-- Load the helper script
|
||||||
|
dofile("luahelper.lua")
|
||||||
|
|
||||||
|
-- Test helper function
|
||||||
|
local function assert(condition, message)
|
||||||
|
if not condition then error("ASSERTION FAILED: " .. (message or "unknown error")) end
|
||||||
|
end
|
||||||
|
|
||||||
|
local function test(name, fn)
|
||||||
|
local ok, err = pcall(fn)
|
||||||
|
if ok then
|
||||||
|
print("PASS: " .. name)
|
||||||
|
else
|
||||||
|
print("FAIL: " .. name .. " - " .. tostring(err))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test("regression test 001", function()
|
||||||
|
local csv =
|
||||||
|
[[Id Enabled ModuleId DepartmentId IsDepartment PositionInGraph Parents Modifiers UpgradePrice
|
||||||
|
news_department TRUE navigation TRUE 2 0 NewsAnalyticsDepartment + 1 communication_relay communication_relay
|
||||||
|
nd_charge_bonus TRUE navigation news_department FALSE 1 0 news_department NDSkillChargeBonus + 1 expert_disk expert_disk
|
||||||
|
nd_cooldown_time_reduce TRUE navigation news_department FALSE 3 0 news_department NDCooldownTimeReduce - 2 communication_relay communication_relay]]
|
||||||
|
local rows, err = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||||
|
if err then error("fromCSV error: " .. err) end
|
||||||
|
assert(#rows == 3, "Should have 3 rows")
|
||||||
|
assert(rows[1].Id == "news_department", "First row Id should be 'news_department'")
|
||||||
|
assert(rows[1].Enabled == "TRUE", "First row Enabled should be 'TRUE'")
|
||||||
|
assert(rows[1].ModuleId == "navigation", "First row ModuleId should be 'navigation'")
|
||||||
|
assert(rows[1].DepartmentId == "", "First row DepartmentId should be ''")
|
||||||
|
assert(rows[1].IsDepartment == "TRUE", "First row IsDepartment should be 'TRUE'")
|
||||||
|
assert(rows.Headers[1] == "Id", "First row Headers should be 'Id'")
|
||||||
|
assert(rows.Headers[2] == "Enabled", "First row Headers should be 'Enabled'")
|
||||||
|
assert(rows.Headers[3] == "ModuleId", "First row Headers should be 'ModuleId'")
|
||||||
|
assert(rows.Headers[4] == "DepartmentId", "First row Headers should be 'DepartmentId'")
|
||||||
|
assert(rows.Headers[5] == "IsDepartment", "First row Headers should be 'IsDepartment'")
|
||||||
|
assert(rows.Headers[6] == "PositionInGraph", "First row Headers should be 'PositionInGraph'")
|
||||||
|
assert(rows.Headers[7] == "Parents", "First row Headers should be 'Parents'")
|
||||||
|
assert(rows.Headers[8] == "Modifiers", "First row Headers should be 'Modifiers'")
|
||||||
|
assert(rows.Headers[9] == "UpgradePrice", "First row Headers should be 'UpgradePrice'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
print("\nAll tests completed!")
|
||||||
534
processor/luahelper-test.lua
Normal file
534
processor/luahelper-test.lua
Normal file
@@ -0,0 +1,534 @@
|
|||||||
|
-- Load the helper script
|
||||||
|
dofile("luahelper.lua")
|
||||||
|
|
||||||
|
-- Test helper function
|
||||||
|
local function assert(condition, message)
|
||||||
|
if not condition then error("ASSERTION FAILED: " .. (message or "unknown error")) end
|
||||||
|
end
|
||||||
|
|
||||||
|
local function test(name, fn)
|
||||||
|
local ok, err = pcall(fn)
|
||||||
|
if ok then
|
||||||
|
print("PASS: " .. name)
|
||||||
|
else
|
||||||
|
print("FAIL: " .. name .. " - " .. tostring(err))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Test fromCSV option validation
|
||||||
|
test("fromCSV invalid option", function()
|
||||||
|
local csv = "a,b,c\n1,2,3"
|
||||||
|
local ok, errMsg = pcall(function() fromCSV(csv, { invalidOption = true }) end)
|
||||||
|
assert(ok == false, "Should raise error")
|
||||||
|
assert(string.find(errMsg, "unknown option"), "Error should mention unknown option")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test toCSV invalid delimiter
|
||||||
|
test("toCSV invalid delimiter", function()
|
||||||
|
local rows = { { "a", "b", "c" } }
|
||||||
|
local csv = toCSV(rows, { delimiter = 123 })
|
||||||
|
-- toCSV converts delimiter to string, so 123 becomes "123"
|
||||||
|
assert(csv == "a123b123c", "Should convert delimiter to string")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV basic parsing
|
||||||
|
test("fromCSV basic", function()
|
||||||
|
local csv = "a,b,c\n1,2,3\n4,5,6"
|
||||||
|
local rows = fromCSV(csv)
|
||||||
|
assert(#rows == 3, "Should have 3 rows")
|
||||||
|
assert(rows[1][1] == "a", "First row first field should be 'a'")
|
||||||
|
assert(rows[2][2] == "2", "Second row second field should be '2'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with headers
|
||||||
|
test("fromCSV with headers", function()
|
||||||
|
local csv = "foo,bar,baz\n1,2,3\n4,5,6"
|
||||||
|
local rows = fromCSV(csv, { hasheader = true })
|
||||||
|
assert(#rows == 2, "Should have 2 data rows")
|
||||||
|
assert(rows[1][1] == "1", "First row first field should be '1'")
|
||||||
|
assert(rows[1].foo == "1", "First row foo should be '1'")
|
||||||
|
assert(rows[1].bar == "2", "First row bar should be '2'")
|
||||||
|
assert(rows[1].baz == "3", "First row baz should be '3'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with custom delimiter
|
||||||
|
test("fromCSV with tab delimiter", function()
|
||||||
|
local csv = "a\tb\tc\n1\t2\t3"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t" })
|
||||||
|
assert(#rows == 2, "Should have 2 rows")
|
||||||
|
assert(rows[1][1] == "a", "First row first field should be 'a'")
|
||||||
|
assert(rows[2][2] == "2", "Second row second field should be '2'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with quoted fields
|
||||||
|
test("fromCSV with quoted fields", function()
|
||||||
|
local csv = '"hello,world","test"\n"foo","bar"'
|
||||||
|
local rows = fromCSV(csv)
|
||||||
|
assert(#rows == 2, "Should have 2 rows")
|
||||||
|
assert(rows[1][1] == "hello,world", "Quoted field with comma should be preserved")
|
||||||
|
assert(rows[1][2] == "test", "Second field should be 'test'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test toCSV basic
|
||||||
|
test("toCSV basic", function()
|
||||||
|
local rows = { { "a", "b", "c" }, { "1", "2", "3" } }
|
||||||
|
local csv = toCSV(rows)
|
||||||
|
assert(csv == "a,b,c\n1,2,3", "CSV output should match expected")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test toCSV with custom delimiter
|
||||||
|
test("toCSV with tab delimiter", function()
|
||||||
|
local rows = { { "a", "b", "c" }, { "1", "2", "3" } }
|
||||||
|
local csv = toCSV(rows, { delimiter = "\t" })
|
||||||
|
assert(csv == "a\tb\tc\n1\t2\t3", "TSV output should match expected")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test toCSV with fields needing quoting
|
||||||
|
test("toCSV with quoted fields", function()
|
||||||
|
local rows = { { "hello,world", "test" }, { "foo", "bar" } }
|
||||||
|
local csv = toCSV(rows)
|
||||||
|
assert(csv == '"hello,world",test\nfoo,bar', "Fields with commas should be quoted")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test round trip
|
||||||
|
test("fromCSV toCSV round trip", function()
|
||||||
|
local original = "a,b,c\n1,2,3\n4,5,6"
|
||||||
|
local rows = fromCSV(original)
|
||||||
|
local csv = toCSV(rows)
|
||||||
|
assert(csv == original, "Round trip should preserve original")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test round trip with headers
|
||||||
|
test("fromCSV toCSV round trip with headers", function()
|
||||||
|
local original = "foo,bar,baz\n1,2,3\n4,5,6"
|
||||||
|
local rows = fromCSV(original, { hasheader = true })
|
||||||
|
local csv = toCSV(rows)
|
||||||
|
local expected = "1,2,3\n4,5,6"
|
||||||
|
assert(csv == expected, "Round trip with headers should preserve data rows")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comments
|
||||||
|
test("fromCSV with comments", function()
|
||||||
|
local csv = "# This is a comment\nfoo,bar,baz\n1,2,3\n# Another comment\n4,5,6"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 3, "Should have 3 rows (comments filtered, header + 2 data rows)")
|
||||||
|
assert(rows[1][1] == "foo", "First row should be header row")
|
||||||
|
assert(rows[2][1] == "1", "Second row first field should be '1'")
|
||||||
|
assert(rows[3][1] == "4", "Third row first field should be '4'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comments and headers
|
||||||
|
test("fromCSV with comments and headers", function()
|
||||||
|
local csv = "#mercenary_profiles\nId,Name,Value\n1,Test,100\n# End of data\n2,Test2,200"
|
||||||
|
local rows = fromCSV(csv, { hasheader = true, hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 data rows")
|
||||||
|
assert(rows[1].Id == "1", "First row Id should be '1'")
|
||||||
|
assert(rows[1].Name == "Test", "First row Name should be 'Test'")
|
||||||
|
assert(rows[1].Value == "100", "First row Value should be '100'")
|
||||||
|
assert(rows[2].Id == "2", "Second row Id should be '2'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comments disabled
|
||||||
|
test("fromCSV without comments", function()
|
||||||
|
local csv = "# This should not be filtered\nfoo,bar\n1,2"
|
||||||
|
local rows = fromCSV(csv, { hascomments = false })
|
||||||
|
assert(#rows == 3, "Should have 3 rows (including comment)")
|
||||||
|
assert(rows[1][1] == "# This should not be filtered", "Comment line should be preserved")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment at start
|
||||||
|
test("fromCSV comment at start", function()
|
||||||
|
local csv = "# Header comment\nId,Name\n1,Test"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows (comment filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment with leading whitespace
|
||||||
|
test("fromCSV comment with whitespace", function()
|
||||||
|
local csv = " # Comment with spaces\nId,Name\n1,Test"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows (comment with spaces filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment with tabs
|
||||||
|
test("fromCSV comment with tabs", function()
|
||||||
|
local csv = "\t# Comment with tab\nId,Name\n1,Test"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows (comment with tab filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with multiple consecutive comments
|
||||||
|
test("fromCSV multiple consecutive comments", function()
|
||||||
|
local csv = "# First comment\n# Second comment\n# Third comment\nId,Name\n1,Test"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows (all comments filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment in middle of data
|
||||||
|
test("fromCSV comment in middle", function()
|
||||||
|
local csv = "Id,Name\n1,Test\n# Middle comment\n2,Test2"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 3, "Should have 3 rows (comment filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
assert(rows[2][1] == "1", "Second row should be first data")
|
||||||
|
assert(rows[3][1] == "2", "Third row should be second data")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment at end
|
||||||
|
test("fromCSV comment at end", function()
|
||||||
|
local csv = "Id,Name\n1,Test\n# End comment"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows (end comment filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
assert(rows[2][1] == "1", "Second row should be data")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with empty comment line
|
||||||
|
test("fromCSV empty comment", function()
|
||||||
|
local csv = "#\nId,Name\n1,Test"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows (empty comment filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment and headers
|
||||||
|
test("fromCSV comment with headers enabled", function()
|
||||||
|
local csv = "#mercenary_profiles\nId,Name,Value\n1,Test,100\n2,Test2,200"
|
||||||
|
local rows = fromCSV(csv, { hasheader = true, hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 data rows")
|
||||||
|
assert(rows[1].Id == "1", "First row Id should be '1'")
|
||||||
|
assert(rows[1].Name == "Test", "First row Name should be 'Test'")
|
||||||
|
assert(rows[2].Id == "2", "Second row Id should be '2'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment and TSV delimiter
|
||||||
|
test("fromCSV comment with tab delimiter", function()
|
||||||
|
local csv = "# Comment\nId\tName\n1\tTest"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 rows")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
assert(rows[2][1] == "1", "Second row first field should be '1'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment and headers and TSV
|
||||||
|
test("fromCSV comment with headers and TSV", function()
|
||||||
|
local csv = "#mercenary_profiles\nId\tName\tValue\n1\tTest\t100"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||||
|
assert(#rows == 1, "Should have 1 data row")
|
||||||
|
assert(rows[1].Id == "1", "Row Id should be '1'")
|
||||||
|
assert(rows[1].Name == "Test", "Row Name should be 'Test'")
|
||||||
|
assert(rows[1].Value == "100", "Row Value should be '100'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with data field starting with # (not a comment)
|
||||||
|
test("fromCSV data field starting with hash", function()
|
||||||
|
local csv = "Id,Name\n1,#NotAComment\n2,Test"
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 3, "Should have 3 rows (data with # not filtered)")
|
||||||
|
assert(rows[1][1] == "Id", "First row should be header")
|
||||||
|
assert(rows[2][2] == "#NotAComment", "Second row should have #NotAComment as data")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with quoted field starting with #
|
||||||
|
test("fromCSV quoted field with hash", function()
|
||||||
|
local csv = 'Id,Name\n1,"#NotAComment"\n2,Test'
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 3, "Should have 3 rows (quoted # not filtered)")
|
||||||
|
assert(rows[2][2] == "#NotAComment", "Quoted field with # should be preserved")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test fromCSV with comment after quoted field
|
||||||
|
test("fromCSV comment after quoted field", function()
|
||||||
|
local csv = 'Id,Name\n1,"Test"\n# This is a comment\n2,Test2'
|
||||||
|
local rows = fromCSV(csv, { hascomments = true })
|
||||||
|
assert(#rows == 3, "Should have 3 rows (comment filtered)")
|
||||||
|
assert(rows[2][2] == "Test", "Quoted field should be preserved")
|
||||||
|
assert(rows[3][1] == "2", "Third row should be second data row")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Math function tests
|
||||||
|
test("min function", function()
|
||||||
|
assert(min(5, 3) == 3, "min(5, 3) should be 3")
|
||||||
|
assert(min(-1, 0) == -1, "min(-1, 0) should be -1")
|
||||||
|
assert(min(10, 10) == 10, "min(10, 10) should be 10")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("max function", function()
|
||||||
|
assert(max(5, 3) == 5, "max(5, 3) should be 5")
|
||||||
|
assert(max(-1, 0) == 0, "max(-1, 0) should be 0")
|
||||||
|
assert(max(10, 10) == 10, "max(10, 10) should be 10")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("round function", function()
|
||||||
|
assert(round(3.14159) == 3, "round(3.14159) should be 3")
|
||||||
|
assert(round(3.14159, 2) == 3.14, "round(3.14159, 2) should be 3.14")
|
||||||
|
assert(round(3.5) == 4, "round(3.5) should be 4")
|
||||||
|
assert(round(3.4) == 3, "round(3.4) should be 3")
|
||||||
|
assert(round(123.456, 1) == 123.5, "round(123.456, 1) should be 123.5")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("floor function", function()
|
||||||
|
assert(floor(3.7) == 3, "floor(3.7) should be 3")
|
||||||
|
assert(floor(-3.7) == -4, "floor(-3.7) should be -4")
|
||||||
|
assert(floor(5) == 5, "floor(5) should be 5")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("ceil function", function()
|
||||||
|
assert(ceil(3.2) == 4, "ceil(3.2) should be 4")
|
||||||
|
assert(ceil(-3.2) == -3, "ceil(-3.2) should be -3")
|
||||||
|
assert(ceil(5) == 5, "ceil(5) should be 5")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- String function tests
|
||||||
|
test("upper function", function()
|
||||||
|
assert(upper("hello") == "HELLO", "upper('hello') should be 'HELLO'")
|
||||||
|
assert(upper("Hello World") == "HELLO WORLD", "upper('Hello World') should be 'HELLO WORLD'")
|
||||||
|
assert(upper("123abc") == "123ABC", "upper('123abc') should be '123ABC'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("lower function", function()
|
||||||
|
assert(lower("HELLO") == "hello", "lower('HELLO') should be 'hello'")
|
||||||
|
assert(lower("Hello World") == "hello world", "lower('Hello World') should be 'hello world'")
|
||||||
|
assert(lower("123ABC") == "123abc", "lower('123ABC') should be '123abc'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("format function", function()
|
||||||
|
assert(format("Hello %s", "World") == "Hello World", "format should work")
|
||||||
|
assert(format("Number: %d", 42) == "Number: 42", "format with number should work")
|
||||||
|
assert(format("%.2f", 3.14159) == "3.14", "format with float should work")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("trim function", function()
|
||||||
|
assert(trim(" hello ") == "hello", "trim should remove leading and trailing spaces")
|
||||||
|
assert(trim(" hello world ") == "hello world", "trim should preserve internal spaces")
|
||||||
|
assert(trim("hello") == "hello", "trim should not affect strings without spaces")
|
||||||
|
assert(trim(" ") == "", "trim should handle all spaces")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("strsplit function", function()
|
||||||
|
local result = strsplit("a,b,c", ",")
|
||||||
|
assert(#result == 3, "strsplit should return 3 elements")
|
||||||
|
assert(result[1] == "a", "First element should be 'a'")
|
||||||
|
assert(result[2] == "b", "Second element should be 'b'")
|
||||||
|
assert(result[3] == "c", "Third element should be 'c'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("strsplit with default separator", function()
|
||||||
|
local result = strsplit("a b c")
|
||||||
|
assert(#result == 3, "strsplit with default should return 3 elements")
|
||||||
|
assert(result[1] == "a", "First element should be 'a'")
|
||||||
|
assert(result[2] == "b", "Second element should be 'b'")
|
||||||
|
assert(result[3] == "c", "Third element should be 'c'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("strsplit with custom separator", function()
|
||||||
|
local result = strsplit("a|b|c", "|")
|
||||||
|
assert(#result == 3, "strsplit with pipe should return 3 elements")
|
||||||
|
assert(result[1] == "a", "First element should be 'a'")
|
||||||
|
assert(result[2] == "b", "Second element should be 'b'")
|
||||||
|
assert(result[3] == "c", "Third element should be 'c'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Conversion function tests
|
||||||
|
test("num function", function()
|
||||||
|
assert(num("123") == 123, "num('123') should be 123")
|
||||||
|
assert(num("45.67") == 45.67, "num('45.67') should be 45.67")
|
||||||
|
assert(num("invalid") == 0, "num('invalid') should be 0")
|
||||||
|
assert(num("") == 0, "num('') should be 0")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("str function", function()
|
||||||
|
assert(str(123) == "123", "str(123) should be '123'")
|
||||||
|
assert(str(45.67) == "45.67", "str(45.67) should be '45.67'")
|
||||||
|
assert(str(0) == "0", "str(0) should be '0'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("is_number function", function()
|
||||||
|
assert(is_number("123") == true, "is_number('123') should be true")
|
||||||
|
assert(is_number("45.67") == true, "is_number('45.67') should be true")
|
||||||
|
assert(is_number("invalid") == false, "is_number('invalid') should be false")
|
||||||
|
assert(is_number("") == false, "is_number('') should be false")
|
||||||
|
assert(is_number("123abc") == false, "is_number('123abc') should be false")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Table function tests
|
||||||
|
test("isArray function", function()
|
||||||
|
assert(isArray({ 1, 2, 3 }) == true, "isArray should return true for sequential array")
|
||||||
|
assert(isArray({ "a", "b", "c" }) == true, "isArray should return true for string array")
|
||||||
|
assert(isArray({}) == true, "isArray should return true for empty array")
|
||||||
|
assert(isArray({ a = 1, b = 2 }) == false, "isArray should return false for map")
|
||||||
|
assert(isArray({ 1, 2, [4] = 4 }) == false, "isArray should return false for sparse array")
|
||||||
|
assert(
|
||||||
|
isArray({ [1] = 1, [2] = 2, [3] = 3 }) == true,
|
||||||
|
"isArray should return true for 1-indexed array"
|
||||||
|
)
|
||||||
|
assert(
|
||||||
|
isArray({ [0] = 1, [1] = 2 }) == false,
|
||||||
|
"isArray should return false for 0-indexed array"
|
||||||
|
)
|
||||||
|
assert(
|
||||||
|
isArray({ [1] = 1, [2] = 2, [4] = 4 }) == false,
|
||||||
|
"isArray should return false for non-sequential array"
|
||||||
|
)
|
||||||
|
assert(isArray("not a table") == false, "isArray should return false for non-table")
|
||||||
|
assert(isArray(123) == false, "isArray should return false for number")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("fromCSV assigns header keys correctly", function()
|
||||||
|
local teststr = [[
|
||||||
|
#mercenary_profiles
|
||||||
|
Id ModifyStartCost ModifyStep ModifyLevelLimit Health ResistSheet WoundSlots MeleeDamage MeleeAccuracy RangeAccuracy ReceiveAmputationChance ReceiveWoundChanceMult AttackWoundChanceMult Dodge Los StarvationLimit PainThresholdLimit PainThresholdRegen TalentPerkId ActorId SkinIndex HairType HairColorHex VoiceBank Immunity CreatureClass
|
||||||
|
john_hawkwood_boss 20 0.1 140 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.60 critchance 0.05 0.5 0.5 0.03 0.5 1.2 0.3 8 2200 16 2 talent_the_man_who_sold_the_world human_male 0 hair1 #633D08 player Human
|
||||||
|
francis_reid_daly 20 0.1 130 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.5 0.4 0.04 0.9 1 0.3 8 2000 10 1 talent_weapon_durability human_male 0 player Human
|
||||||
|
]]
|
||||||
|
local rows = fromCSV(teststr, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 data rows")
|
||||||
|
|
||||||
|
-- Test first row
|
||||||
|
assert(rows[1].Id == "john_hawkwood_boss", "First row Id should be 'john_hawkwood_boss'")
|
||||||
|
assert(rows[1].ModifyStartCost == "20", "First row ModifyStartCost should be '20'")
|
||||||
|
assert(rows[1].ModifyStep == "0.1", "First row ModifyStep should be '0.1'")
|
||||||
|
assert(rows[1].Health == "140", "First row Health should be '140'")
|
||||||
|
assert(rows[1].ActorId == "human_male", "First row ActorId should be 'human_male'")
|
||||||
|
assert(rows[1].HairColorHex == "#633D08", "First row HairColorHex should be '#633D08'")
|
||||||
|
|
||||||
|
-- Test second row
|
||||||
|
assert(rows[2].Id == "francis_reid_daly", "Second row Id should be 'francis_reid_daly'")
|
||||||
|
assert(rows[2].ModifyStartCost == "20", "Second row ModifyStartCost should be '20'")
|
||||||
|
assert(rows[2].ModifyStep == "0.1", "Second row ModifyStep should be '0.1'")
|
||||||
|
assert(rows[2].Health == "130", "Second row Health should be '130'")
|
||||||
|
assert(rows[2].ActorId == "human_male", "Second row ActorId should be 'human_male'")
|
||||||
|
|
||||||
|
-- Test that numeric indices still work
|
||||||
|
assert(rows[1][1] == "john_hawkwood_boss", "First row first field by index should work")
|
||||||
|
assert(rows[1][2] == "20", "First row second field by index should work")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("fromCSV debug header assignment", function()
|
||||||
|
local csv = "Id Name Value\n1 Test 100\n2 Test2 200"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||||
|
assert(rows[1].Id == "1", "Id should be '1'")
|
||||||
|
assert(rows[1].Name == "Test", "Name should be 'Test'")
|
||||||
|
assert(rows[1].Value == "100", "Value should be '100'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("fromCSV real world mercenary file format", function()
|
||||||
|
local csv = [[#mercenary_profiles
|
||||||
|
Id ModifyStartCost ModifyStep ModifyLevelLimit Health ResistSheet WoundSlots MeleeDamage MeleeAccuracy RangeAccuracy ReceiveAmputationChance ReceiveWoundChanceMult AttackWoundChanceMult Dodge Los StarvationLimit PainThresholdLimit PainThresholdRegen TalentPerkId ActorId SkinIndex HairType HairColorHex VoiceBank Immunity CreatureClass
|
||||||
|
john_hawkwood_boss 20 0.1 140 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.60 critchance 0.05 0.5 0.5 0.03 0.5 1.2 0.3 8 2200 16 2 talent_the_man_who_sold_the_world human_male 0 hair1 #633D08 player Human
|
||||||
|
francis_reid_daly 20 0.1 130 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.5 0.4 0.04 0.9 1 0.3 8 2000 10 1 talent_weapon_durability human_male 0 player Human
|
||||||
|
]]
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||||
|
assert(#rows == 2, "Should have 2 data rows")
|
||||||
|
|
||||||
|
assert(rows[1].Id == "john_hawkwood_boss", "First row Id should be 'john_hawkwood_boss'")
|
||||||
|
assert(rows[1].ModifyStartCost == "20", "First row ModifyStartCost should be '20'")
|
||||||
|
assert(rows[2].Id == "francis_reid_daly", "Second row Id should be 'francis_reid_daly'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
test("full CSV parser complex", function()
|
||||||
|
local original = [[
|
||||||
|
#mercenary_profiles
|
||||||
|
Id ModifyStartCost ModifyStep ModifyLevelLimit Health ResistSheet WoundSlots MeleeDamage MeleeAccuracy RangeAccuracy ReceiveAmputationChance ReceiveWoundChanceMult AttackWoundChanceMult Dodge Los StarvationLimit PainThresholdLimit PainThresholdRegen TalentPerkId ActorId SkinIndex HairType HairColorHex VoiceBank Immunity CreatureClass
|
||||||
|
john_hawkwood_boss 20 0.1 140 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.60 critchance 0.05 0.5 0.5 0.03 0.5 1.2 0.3 8 2200 16 2 talent_the_man_who_sold_the_world human_male 0 hair1 #633D08 player Human
|
||||||
|
francis_reid_daly 20 0.1 130 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.5 0.4 0.04 0.9 1 0.3 8 2000 10 1 talent_weapon_durability human_male 0 player Human
|
||||||
|
victoria_boudicca 20 0.1 90 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.1 0.4 0.45 0.05 1 1.2 0.3 8 1800 8 1 talent_weapon_distance human_female 0 hair1 #633D08 player Human
|
||||||
|
persival_fawcett 20 0.1 150 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 6 12 crit 1.70 critchance 0.05 0.5 0.35 0.05 0.6 1 0.25 8 2100 16 1 talent_all_resists human_male 1 hair1 #633D08 player Human
|
||||||
|
Isabella_capet 20 0.1 100 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.15 0.55 0.3 0.03 0.8 1.4 0.35 7 1700 14 2 talent_ignore_infection human_female 1 hair3 #FF3100 player Human
|
||||||
|
maximilian_rohr 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.75 critchance 0.05 0.45 0.45 0.06 0.9 1 0.2 8 2000 14 1 talent_ignore_pain human_male 0 hair2 #FFC400 player Human
|
||||||
|
priya_marlon 20 0.1 110 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.15 0.45 0.35 0.05 1 1.1 0.3 7 2200 12 1 talent_all_consumables_stack human_female 0 hair2 #FFC400 player Human
|
||||||
|
jacques_kennet 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.05 0.45 0.35 0.04 0.9 1.2 0.3 8 2300 10 1 talent_reload_time human_male 0 hair1 #908E87 player Human
|
||||||
|
mirza_aishatu 20 0.1 110 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.55 0.45 0.03 1 1.1 0.25 9 2000 10 1 talent_starving_slower human_female 1 hair2 #633D08 player Human
|
||||||
|
kenzie_yukio 20 0.1 100 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.1 0.6 0.4 0.04 1 1 0.4 7 1600 12 1 talent_weight_dodge_affect human_male 0 hair2 #633D08 player Human
|
||||||
|
marika_wulfnod 20 0.1 100 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 6 12 crit 1.60 critchance 0.05 0.5 0.5 0.04 1 1 0.3 9 1900 12 1 talent_belt_slots human_female 0 hair1 #FFC400 player Human
|
||||||
|
auberon_lukas 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 4 8 crit 1.60 critchance 0.15 0.45 0.45 0.05 0.8 1 0.2 9 1900 8 2 talent_weapon_slot human_male 0 hair2 #633D08 player Human
|
||||||
|
niko_medich 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.05 0.4 0.45 0.04 1 1.3 0.25 8 2000 10 1 talent_pistol_acc human_male 0 hair1 #908E87 player Human
|
||||||
|
#end
|
||||||
|
|
||||||
|
#mercenary_classes
|
||||||
|
Id ModifyStartCost ModifyStep PerkIds
|
||||||
|
scouts_of_hades 30 0.1 cqc_specialist_basic military_training_basic gear_maintenance_basic blind_fury_basic fire_transfer_basic assault_reflex_basic
|
||||||
|
ecclipse_blades 30 0.1 berserkgang_basic athletics_basic reaction_training_basic cold_weapon_wielding_basic cannibalism_basic carnage_basic
|
||||||
|
tifton_elite 30 0.1 heavy_weaponary_basic grenadier_basic selfhealing_basic stationary_defense_basic spray_and_pray_basic shock_awe_basic
|
||||||
|
tunnel_rats 30 0.1 cautious_basic handmade_shotgun_ammo_basic marauder_basic dirty_shot_basic vicious_symbiosis_basic covermaster_basic
|
||||||
|
phoenix_brigade 30 0.1 shielding_basic battle_physicist_basic reinforced_battery_basic revealing_flame_basic cauterize_basic scholar_basic
|
||||||
|
]]
|
||||||
|
|
||||||
|
-- Parse with headers and comments
|
||||||
|
local rows = fromCSV(original, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||||
|
assert(#rows > 0, "Should have parsed rows")
|
||||||
|
|
||||||
|
-- Convert back to CSV with headers
|
||||||
|
local csv = toCSV(rows, { delimiter = "\t", hasheader = true })
|
||||||
|
|
||||||
|
-- Parse again
|
||||||
|
local rows2 = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = false })
|
||||||
|
|
||||||
|
-- Verify identical - same number of rows
|
||||||
|
assert(#rows2 == #rows, "Round trip should have same number of rows")
|
||||||
|
|
||||||
|
-- Verify first row data is identical
|
||||||
|
assert(rows2[1].Id == rows[1].Id, "Round trip first row Id should match")
|
||||||
|
assert(
|
||||||
|
rows2[1].ModifyStartCost == rows[1].ModifyStartCost,
|
||||||
|
"Round trip first row ModifyStartCost should match"
|
||||||
|
)
|
||||||
|
assert(rows2[1].Health == rows[1].Health, "Round trip first row Health should match")
|
||||||
|
|
||||||
|
-- Verify headers are preserved
|
||||||
|
assert(rows2.Headers ~= nil, "Round trip rows should have Headers field")
|
||||||
|
assert(#rows2.Headers == #rows.Headers, "Headers should have same number of elements")
|
||||||
|
assert(rows2.Headers[1] == rows.Headers[1], "First header should match")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test metatable: row[1] and row.foobar return same value
|
||||||
|
test("metatable row[1] equals row.header", function()
|
||||||
|
local csv = "Id Name Value\n1 Test 100"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||||
|
assert(rows[1][1] == rows[1].Id, "row[1] should equal row.Id")
|
||||||
|
assert(rows[1][2] == rows[1].Name, "row[2] should equal row.Name")
|
||||||
|
assert(rows[1][3] == rows[1].Value, "row[3] should equal row.Value")
|
||||||
|
assert(rows[1].Id == "1", "row.Id should be '1'")
|
||||||
|
assert(rows[1][1] == "1", "row[1] should be '1'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test metatable: setting via header name updates numeric index
|
||||||
|
test("metatable set via header name", function()
|
||||||
|
local csv = "Id Name Value\n1 Test 100"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||||
|
rows[1].Id = "999"
|
||||||
|
assert(rows[1][1] == "999", "Setting row.Id should update row[1]")
|
||||||
|
assert(rows[1].Id == "999", "row.Id should be '999'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test metatable: error on unknown header assignment
|
||||||
|
test("metatable error on unknown header", function()
|
||||||
|
local csv = "Id Name Value\n1 Test 100"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||||
|
local ok, errMsg = pcall(function() rows[1].UnknownHeader = "test" end)
|
||||||
|
assert(ok == false, "Should error on unknown header")
|
||||||
|
assert(string.find(errMsg, "unknown header"), "Error should mention unknown header")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test metatable: numeric indices still work
|
||||||
|
test("metatable numeric indices work", function()
|
||||||
|
local csv = "Id Name Value\n1 Test 100"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||||
|
rows[1][1] = "999"
|
||||||
|
assert(rows[1].Id == "999", "Setting row[1] should update row.Id")
|
||||||
|
assert(rows[1][1] == "999", "row[1] should be '999'")
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- Test metatable: numeric keys work normally
|
||||||
|
test("metatable numeric keys work", function()
|
||||||
|
local csv = "Id Name Value\n1 Test 100"
|
||||||
|
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||||
|
rows[1][100] = "hundred"
|
||||||
|
assert(rows[1][100] == "hundred", "Numeric keys should work")
|
||||||
|
end)
|
||||||
|
|
||||||
|
print("\nAll tests completed!")
|
||||||
352
processor/luahelper.lua
Normal file
352
processor/luahelper.lua
Normal file
@@ -0,0 +1,352 @@
|
|||||||
|
-- Custom Lua helpers for math operations
|
||||||
|
function min(a, b) return math.min(a, b) end
|
||||||
|
|
||||||
|
function max(a, b) return math.max(a, b) end
|
||||||
|
|
||||||
|
function round(x, n)
|
||||||
|
if n == nil then n = 0 end
|
||||||
|
return math.floor(x * 10 ^ n + 0.5) / 10 ^ n
|
||||||
|
end
|
||||||
|
|
||||||
|
function floor(x) return math.floor(x) end
|
||||||
|
|
||||||
|
function ceil(x) return math.ceil(x) end
|
||||||
|
|
||||||
|
function upper(s) return string.upper(s) end
|
||||||
|
|
||||||
|
function lower(s) return string.lower(s) end
|
||||||
|
|
||||||
|
function format(s, ...) return string.format(s, ...) end
|
||||||
|
|
||||||
|
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
||||||
|
|
||||||
|
-- String split helper
|
||||||
|
function strsplit(inputstr, sep)
|
||||||
|
if sep == nil then sep = "%s" end
|
||||||
|
local t = {}
|
||||||
|
for str in string.gmatch(inputstr, "([^" .. sep .. "]+)") do
|
||||||
|
table.insert(t, str)
|
||||||
|
end
|
||||||
|
return t
|
||||||
|
end
|
||||||
|
|
||||||
|
---@param table table
|
||||||
|
---@param depth number?
|
||||||
|
function dump(table, depth)
|
||||||
|
if depth == nil then depth = 0 end
|
||||||
|
if depth > 200 then
|
||||||
|
print("Error: Depth > 200 in dump()")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
for k, v in pairs(table) do
|
||||||
|
if type(v) == "table" then
|
||||||
|
print(string.rep(" ", depth) .. k .. ":")
|
||||||
|
dump(v, depth + 1)
|
||||||
|
else
|
||||||
|
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
--- @class ParserOptions
|
||||||
|
--- @field delimiter string? The field delimiter (default: ",").
|
||||||
|
--- @field hasheader boolean? If true, first non-comment row is treated as headers (default: false).
|
||||||
|
--- @field hascomments boolean? If true, lines starting with '#' are skipped (default: false).
|
||||||
|
|
||||||
|
--- @type ParserOptions
|
||||||
|
parserDefaultOptions = { delimiter = ",", hasheader = false, hascomments = false }
|
||||||
|
|
||||||
|
--- Validates options against a set of valid option keys.
|
||||||
|
--- @param options ParserOptions? The options table to validate
|
||||||
|
function areOptionsValid(options)
|
||||||
|
if options == nil then return end
|
||||||
|
|
||||||
|
if type(options) ~= "table" then error("options must be a table") end
|
||||||
|
|
||||||
|
-- Build valid options list from validOptions table
|
||||||
|
local validOptionsStr = ""
|
||||||
|
for k, _ in pairs(parserDefaultOptions) do
|
||||||
|
validOptionsStr = validOptionsStr .. k .. ", "
|
||||||
|
end
|
||||||
|
|
||||||
|
for k, _ in pairs(options) do
|
||||||
|
if parserDefaultOptions[k] == nil then
|
||||||
|
error(
|
||||||
|
"unknown option: " .. tostring(k) .. " (valid options: " .. validOptionsStr .. ")"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Parses CSV text into rows and fields using a minimal RFC 4180 state machine.
|
||||||
|
---
|
||||||
|
--- Requirements/assumptions:
|
||||||
|
--- - Input is a single string containing the entire CSV content.
|
||||||
|
--- - Field separators are specified by delimiter option (default: comma).
|
||||||
|
--- - Newlines between rows may be "\n" or "\r\n". "\r\n" is treated as one line break.
|
||||||
|
--- - Fields may be quoted with double quotes (").
|
||||||
|
--- - Inside quoted fields, doubled quotes ("") represent a literal quote character.
|
||||||
|
--- - No backslash escaping is supported (not part of RFC 4180).
|
||||||
|
--- - Newlines inside quoted fields are preserved as part of the field.
|
||||||
|
--- - Leading/trailing spaces are preserved; no trimming is performed.
|
||||||
|
--- - Empty fields and empty rows are preserved.
|
||||||
|
--- - The final row is emitted even if the text does not end with a newline.
|
||||||
|
--- - Lines starting with '#' (after optional leading whitespace) are treated as comments and skipped if hascomments is true.
|
||||||
|
---
|
||||||
|
--- @param csv string The CSV text to parse.
|
||||||
|
--- @param options ParserOptions? Options for the parser
|
||||||
|
--- @return table #A table (array) of rows; each row is a table with numeric indices and optionally header-named keys.
|
||||||
|
function fromCSV(csv, options)
|
||||||
|
if options == nil then options = {} end
|
||||||
|
|
||||||
|
-- Validate options
|
||||||
|
areOptionsValid(options)
|
||||||
|
|
||||||
|
local delimiter = options.delimiter or parserDefaultOptions.delimiter
|
||||||
|
local hasheader = options.hasheader or parserDefaultOptions.hasheader
|
||||||
|
local hascomments = options.hascomments or parserDefaultOptions.hascomments
|
||||||
|
|
||||||
|
local allRows = {}
|
||||||
|
local fields = {}
|
||||||
|
local field = {}
|
||||||
|
|
||||||
|
local STATE_DEFAULT = 1
|
||||||
|
local STATE_IN_QUOTES = 2
|
||||||
|
local STATE_QUOTE_IN_QUOTES = 3
|
||||||
|
local state = STATE_DEFAULT
|
||||||
|
|
||||||
|
local i = 1
|
||||||
|
local len = #csv
|
||||||
|
|
||||||
|
while i <= len do
|
||||||
|
local c = csv:sub(i, i)
|
||||||
|
|
||||||
|
if state == STATE_DEFAULT then
|
||||||
|
if c == '"' then
|
||||||
|
state = STATE_IN_QUOTES
|
||||||
|
i = i + 1
|
||||||
|
elseif c == delimiter then
|
||||||
|
table.insert(fields, table.concat(field))
|
||||||
|
field = {}
|
||||||
|
i = i + 1
|
||||||
|
elseif c == "\r" or c == "\n" then
|
||||||
|
table.insert(fields, table.concat(field))
|
||||||
|
field = {}
|
||||||
|
local shouldAdd = true
|
||||||
|
if hascomments and #fields > 0 then
|
||||||
|
local firstField = fields[1]
|
||||||
|
local trimmed = trim(firstField)
|
||||||
|
if string.sub(trimmed, 1, 1) == "#" then shouldAdd = false end
|
||||||
|
end
|
||||||
|
if shouldAdd then table.insert(allRows, fields) end
|
||||||
|
fields = {}
|
||||||
|
if c == "\r" and i < len and csv:sub(i + 1, i + 1) == "\n" then
|
||||||
|
i = i + 2
|
||||||
|
else
|
||||||
|
i = i + 1
|
||||||
|
end
|
||||||
|
else
|
||||||
|
table.insert(field, c)
|
||||||
|
i = i + 1
|
||||||
|
end
|
||||||
|
elseif state == STATE_IN_QUOTES then
|
||||||
|
if c == '"' then
|
||||||
|
state = STATE_QUOTE_IN_QUOTES
|
||||||
|
i = i + 1
|
||||||
|
else
|
||||||
|
table.insert(field, c)
|
||||||
|
i = i + 1
|
||||||
|
end
|
||||||
|
else -- STATE_QUOTE_IN_QUOTES
|
||||||
|
if c == '"' then
|
||||||
|
table.insert(field, '"')
|
||||||
|
state = STATE_IN_QUOTES
|
||||||
|
i = i + 1
|
||||||
|
elseif c == delimiter then
|
||||||
|
table.insert(fields, table.concat(field))
|
||||||
|
field = {}
|
||||||
|
state = STATE_DEFAULT
|
||||||
|
i = i + 1
|
||||||
|
elseif c == "\r" or c == "\n" then
|
||||||
|
table.insert(fields, table.concat(field))
|
||||||
|
field = {}
|
||||||
|
local shouldAdd = true
|
||||||
|
if hascomments and #fields > 0 then
|
||||||
|
local firstField = fields[1]
|
||||||
|
local trimmed = string.gsub(firstField, "^%s*(.-)%s*$", "%1")
|
||||||
|
if string.sub(trimmed, 1, 1) == "#" then shouldAdd = false end
|
||||||
|
end
|
||||||
|
if shouldAdd then table.insert(allRows, fields) end
|
||||||
|
fields = {}
|
||||||
|
state = STATE_DEFAULT
|
||||||
|
if c == "\r" and i < len and csv:sub(i + 1, i + 1) == "\n" then
|
||||||
|
i = i + 2
|
||||||
|
else
|
||||||
|
i = i + 1
|
||||||
|
end
|
||||||
|
else
|
||||||
|
state = STATE_DEFAULT
|
||||||
|
-- Don't increment i, reprocess character in DEFAULT state
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if #field > 0 or #fields > 0 then
|
||||||
|
table.insert(fields, table.concat(field))
|
||||||
|
local shouldAdd = true
|
||||||
|
if hascomments and #fields > 0 then
|
||||||
|
local firstField = fields[1]
|
||||||
|
local trimmed = string.gsub(firstField, "^%s*(.-)%s*$", "%1")
|
||||||
|
if string.sub(trimmed, 1, 1) == "#" then shouldAdd = false end
|
||||||
|
end
|
||||||
|
if shouldAdd then table.insert(allRows, fields) end
|
||||||
|
end
|
||||||
|
|
||||||
|
if hasheader and #allRows > 0 then
|
||||||
|
local headers = allRows[1]
|
||||||
|
local headerMap = {}
|
||||||
|
for j = 1, #headers do
|
||||||
|
if headers[j] ~= nil and headers[j] ~= "" then
|
||||||
|
local headerName = trim(headers[j])
|
||||||
|
headerMap[headerName] = j
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
local header_mt = {
|
||||||
|
headers = headerMap,
|
||||||
|
__index = function(t, key)
|
||||||
|
local mt = getmetatable(t)
|
||||||
|
if type(key) == "string" and mt.headers and mt.headers[key] then
|
||||||
|
return rawget(t, mt.headers[key])
|
||||||
|
end
|
||||||
|
return rawget(t, key)
|
||||||
|
end,
|
||||||
|
__newindex = function(t, key, value)
|
||||||
|
local mt = getmetatable(t)
|
||||||
|
if type(key) == "string" and mt.headers then
|
||||||
|
if mt.headers[key] then
|
||||||
|
rawset(t, mt.headers[key], value)
|
||||||
|
else
|
||||||
|
error("unknown header: " .. tostring(key))
|
||||||
|
end
|
||||||
|
else
|
||||||
|
rawset(t, key, value)
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
}
|
||||||
|
|
||||||
|
local rows = {}
|
||||||
|
for ii = 2, #allRows do
|
||||||
|
local row = {}
|
||||||
|
local dataRow = allRows[ii]
|
||||||
|
for j = 1, #dataRow do
|
||||||
|
row[j] = dataRow[j]
|
||||||
|
end
|
||||||
|
setmetatable(row, header_mt)
|
||||||
|
table.insert(rows, row)
|
||||||
|
end
|
||||||
|
rows.Headers = headers
|
||||||
|
return rows
|
||||||
|
end
|
||||||
|
|
||||||
|
return allRows
|
||||||
|
end
|
||||||
|
|
||||||
|
--- Converts a table of rows back to CSV text format (RFC 4180 compliant).
|
||||||
|
---
|
||||||
|
--- Requirements:
|
||||||
|
--- - Input is a table (array) of rows, where each row is a table (array) of field values.
|
||||||
|
--- - Field values are converted to strings using tostring().
|
||||||
|
--- - Fields are quoted if they contain the delimiter, newlines, or double quotes.
|
||||||
|
--- - Double quotes inside quoted fields are doubled ("").
|
||||||
|
--- - Fields are joined with the specified delimiter; rows are joined with newlines.
|
||||||
|
--- - If includeHeaders is true and rows have a Headers field, headers are included as the first row.
|
||||||
|
---
|
||||||
|
--- @param rows table Array of rows, where each row is an array of field values.
|
||||||
|
--- @param options ParserOptions? Options for the parser
|
||||||
|
--- @return string #CSV-formatted text
|
||||||
|
function toCSV(rows, options)
|
||||||
|
if options == nil then options = {} end
|
||||||
|
|
||||||
|
-- Validate options
|
||||||
|
areOptionsValid(options)
|
||||||
|
|
||||||
|
local delimiter = options.delimiter or parserDefaultOptions.delimiter
|
||||||
|
local includeHeaders = options.hasheader or parserDefaultOptions.hasheader
|
||||||
|
local rowStrings = {}
|
||||||
|
|
||||||
|
-- Include headers row if requested and available
|
||||||
|
if includeHeaders and #rows > 0 and rows.Headers ~= nil then
|
||||||
|
local headerStrings = {}
|
||||||
|
for _, header in ipairs(rows.Headers) do
|
||||||
|
local headerStr = tostring(header)
|
||||||
|
local needsQuoting = false
|
||||||
|
if
|
||||||
|
headerStr:find(delimiter)
|
||||||
|
or headerStr:find("\n")
|
||||||
|
or headerStr:find("\r")
|
||||||
|
or headerStr:find('"')
|
||||||
|
then
|
||||||
|
needsQuoting = true
|
||||||
|
end
|
||||||
|
if needsQuoting then
|
||||||
|
headerStr = headerStr:gsub('"', '""')
|
||||||
|
headerStr = '"' .. headerStr .. '"'
|
||||||
|
end
|
||||||
|
table.insert(headerStrings, headerStr)
|
||||||
|
end
|
||||||
|
table.insert(rowStrings, table.concat(headerStrings, delimiter))
|
||||||
|
end
|
||||||
|
|
||||||
|
for _, row in ipairs(rows) do
|
||||||
|
local fieldStrings = {}
|
||||||
|
|
||||||
|
for _, field in ipairs(row) do
|
||||||
|
local fieldStr = tostring(field)
|
||||||
|
local needsQuoting = false
|
||||||
|
|
||||||
|
if
|
||||||
|
fieldStr:find(delimiter)
|
||||||
|
or fieldStr:find("\n")
|
||||||
|
or fieldStr:find("\r")
|
||||||
|
or fieldStr:find('"')
|
||||||
|
then
|
||||||
|
needsQuoting = true
|
||||||
|
end
|
||||||
|
|
||||||
|
if needsQuoting then
|
||||||
|
fieldStr = fieldStr:gsub('"', '""')
|
||||||
|
fieldStr = '"' .. fieldStr .. '"'
|
||||||
|
end
|
||||||
|
|
||||||
|
table.insert(fieldStrings, fieldStr)
|
||||||
|
end
|
||||||
|
|
||||||
|
table.insert(rowStrings, table.concat(fieldStrings, delimiter))
|
||||||
|
end
|
||||||
|
|
||||||
|
return table.concat(rowStrings, "\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
-- String to number conversion helper
|
||||||
|
function num(str) return tonumber(str) or 0 end
|
||||||
|
|
||||||
|
-- Number to string conversion
|
||||||
|
function str(num) return tostring(num) end
|
||||||
|
|
||||||
|
-- Check if string is numeric
|
||||||
|
function is_number(str) return tonumber(str) ~= nil end
|
||||||
|
|
||||||
|
function isArray(t)
|
||||||
|
if type(t) ~= "table" then return false end
|
||||||
|
local max = 0
|
||||||
|
local count = 0
|
||||||
|
for k, _ in pairs(t) do
|
||||||
|
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then return false end
|
||||||
|
max = math.max(max, k)
|
||||||
|
count = count + 1
|
||||||
|
end
|
||||||
|
return max == count
|
||||||
|
end
|
||||||
|
|
||||||
|
modified = false
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package processor
|
package processor
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
_ "embed"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -13,6 +14,9 @@ import (
|
|||||||
lua "github.com/yuin/gopher-lua"
|
lua "github.com/yuin/gopher-lua"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
//go:embed luahelper.lua
|
||||||
|
var helperScript string
|
||||||
|
|
||||||
// processorLogger is a scoped logger for the processor package.
|
// processorLogger is a scoped logger for the processor package.
|
||||||
var processorLogger = logger.Default.WithPrefix("processor")
|
var processorLogger = logger.Default.WithPrefix("processor")
|
||||||
|
|
||||||
@@ -160,84 +164,6 @@ func InitLuaHelpers(L *lua.LState) error {
|
|||||||
initLuaHelpersLogger := processorLogger.WithPrefix("InitLuaHelpers")
|
initLuaHelpersLogger := processorLogger.WithPrefix("InitLuaHelpers")
|
||||||
initLuaHelpersLogger.Debug("Loading Lua helper functions")
|
initLuaHelpersLogger.Debug("Loading Lua helper functions")
|
||||||
|
|
||||||
helperScript := `
|
|
||||||
-- Custom Lua helpers for math operations
|
|
||||||
function min(a, b) return math.min(a, b) end
|
|
||||||
function max(a, b) return math.max(a, b) end
|
|
||||||
function round(x, n)
|
|
||||||
if n == nil then n = 0 end
|
|
||||||
return math.floor(x * 10^n + 0.5) / 10^n
|
|
||||||
end
|
|
||||||
function floor(x) return math.floor(x) end
|
|
||||||
function ceil(x) return math.ceil(x) end
|
|
||||||
function upper(s) return string.upper(s) end
|
|
||||||
function lower(s) return string.lower(s) end
|
|
||||||
function format(s, ...) return string.format(s, ...) end
|
|
||||||
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
|
||||||
|
|
||||||
-- String split helper
|
|
||||||
function strsplit(inputstr, sep)
|
|
||||||
if sep == nil then
|
|
||||||
sep = "%s"
|
|
||||||
end
|
|
||||||
local t = {}
|
|
||||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
|
||||||
table.insert(t, str)
|
|
||||||
end
|
|
||||||
return t
|
|
||||||
end
|
|
||||||
|
|
||||||
---@param table table
|
|
||||||
---@param depth number?
|
|
||||||
function DumpTable(table, depth)
|
|
||||||
if depth == nil then
|
|
||||||
depth = 0
|
|
||||||
end
|
|
||||||
if (depth > 200) then
|
|
||||||
print("Error: Depth > 200 in dumpTable()")
|
|
||||||
return
|
|
||||||
end
|
|
||||||
for k, v in pairs(table) do
|
|
||||||
if (type(v) == "table") then
|
|
||||||
print(string.rep(" ", depth) .. k .. ":")
|
|
||||||
DumpTable(v, depth + 1)
|
|
||||||
else
|
|
||||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
-- String to number conversion helper
|
|
||||||
function num(str)
|
|
||||||
return tonumber(str) or 0
|
|
||||||
end
|
|
||||||
|
|
||||||
-- Number to string conversion
|
|
||||||
function str(num)
|
|
||||||
return tostring(num)
|
|
||||||
end
|
|
||||||
|
|
||||||
-- Check if string is numeric
|
|
||||||
function is_number(str)
|
|
||||||
return tonumber(str) ~= nil
|
|
||||||
end
|
|
||||||
|
|
||||||
function isArray(t)
|
|
||||||
if type(t) ~= "table" then return false end
|
|
||||||
local max = 0
|
|
||||||
local count = 0
|
|
||||||
for k, _ in pairs(t) do
|
|
||||||
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then
|
|
||||||
return false
|
|
||||||
end
|
|
||||||
max = math.max(max, k)
|
|
||||||
count = count + 1
|
|
||||||
end
|
|
||||||
return max == count
|
|
||||||
end
|
|
||||||
|
|
||||||
modified = false
|
|
||||||
`
|
|
||||||
if err := L.DoString(helperScript); err != nil {
|
if err := L.DoString(helperScript); err != nil {
|
||||||
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
|
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
|
||||||
return fmt.Errorf("error loading helper functions: %v", err)
|
return fmt.Errorf("error loading helper functions: %v", err)
|
||||||
@@ -303,8 +229,8 @@ func BuildLuaScript(luaExpr string) string {
|
|||||||
|
|
||||||
// BuildJSONLuaScript prepares a Lua expression for JSON mode
|
// BuildJSONLuaScript prepares a Lua expression for JSON mode
|
||||||
func BuildJSONLuaScript(luaExpr string) string {
|
func BuildJSONLuaScript(luaExpr string) string {
|
||||||
buildJsonLuaScriptLogger := processorLogger.WithPrefix("BuildJSONLuaScript").WithField("inputLuaExpr", luaExpr)
|
buildJSONLuaScriptLogger := processorLogger.WithPrefix("BuildJSONLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||||
buildJsonLuaScriptLogger.Debug("Building full Lua script for JSON mode from expression")
|
buildJSONLuaScriptLogger.Debug("Building full Lua script for JSON mode from expression")
|
||||||
|
|
||||||
// Perform $var substitutions from globalVariables
|
// Perform $var substitutions from globalVariables
|
||||||
luaExpr = replaceVariables(luaExpr)
|
luaExpr = replaceVariables(luaExpr)
|
||||||
@@ -316,7 +242,7 @@ func BuildJSONLuaScript(luaExpr string) string {
|
|||||||
local res = run()
|
local res = run()
|
||||||
modified = res == nil or res
|
modified = res == nil or res
|
||||||
`, luaExpr)
|
`, luaExpr)
|
||||||
buildJsonLuaScriptLogger.Trace("Generated full JSON Lua script: %q", utils.LimitString(fullScript, 200))
|
buildJSONLuaScriptLogger.Trace("Generated full JSON Lua script: %q", utils.LimitString(fullScript, 200))
|
||||||
|
|
||||||
return fullScript
|
return fullScript
|
||||||
}
|
}
|
||||||
@@ -385,9 +311,9 @@ func fetch(L *lua.LState) int {
|
|||||||
fetchLogger.Debug("Fetching URL: %q", url)
|
fetchLogger.Debug("Fetching URL: %q", url)
|
||||||
|
|
||||||
// Get options from second argument if provided
|
// Get options from second argument if provided
|
||||||
var method string = "GET"
|
var method = "GET"
|
||||||
var headers map[string]string = make(map[string]string)
|
var headers = make(map[string]string)
|
||||||
var body string = ""
|
var body = ""
|
||||||
|
|
||||||
if L.GetTop() > 1 {
|
if L.GetTop() > 1 {
|
||||||
options := L.ToTable(2)
|
options := L.ToTable(2)
|
||||||
@@ -501,8 +427,8 @@ func EvalRegex(L *lua.LState) int {
|
|||||||
if len(matches) > 0 {
|
if len(matches) > 0 {
|
||||||
matchesTable := L.NewTable()
|
matchesTable := L.NewTable()
|
||||||
for i, match := range matches {
|
for i, match := range matches {
|
||||||
matchesTable.RawSetString(fmt.Sprintf("%d", i), lua.LString(match))
|
matchesTable.RawSetInt(i+1, lua.LString(match))
|
||||||
evalRegexLogger.Debug("Set table[%d] = %q", i, match)
|
evalRegexLogger.Debug("Set table[%d] = %q", i+1, match)
|
||||||
}
|
}
|
||||||
L.Push(matchesTable)
|
L.Push(matchesTable)
|
||||||
} else {
|
} else {
|
||||||
@@ -519,25 +445,27 @@ func GetLuaFunctionsHelp() string {
|
|||||||
return `Lua Functions Available in Global Environment:
|
return `Lua Functions Available in Global Environment:
|
||||||
|
|
||||||
MATH FUNCTIONS:
|
MATH FUNCTIONS:
|
||||||
min(a, b) - Returns the minimum of two numbers
|
min(a, b) - Returns the minimum of two numbers
|
||||||
max(a, b) - Returns the maximum of two numbers
|
max(a, b) - Returns the maximum of two numbers
|
||||||
round(x, n) - Rounds x to n decimal places (default 0)
|
round(x, n) - Rounds x to n decimal places (default 0)
|
||||||
floor(x) - Returns the floor of x
|
floor(x) - Returns the floor of x
|
||||||
ceil(x) - Returns the ceiling of x
|
ceil(x) - Returns the ceiling of x
|
||||||
|
|
||||||
STRING FUNCTIONS:
|
STRING FUNCTIONS:
|
||||||
upper(s) - Converts string to uppercase
|
upper(s) - Converts string to uppercase
|
||||||
lower(s) - Converts string to lowercase
|
lower(s) - Converts string to lowercase
|
||||||
format(s, ...) - Formats string using Lua string.format
|
format(s, ...) - Formats string using Lua string.format
|
||||||
trim(s) - Removes leading/trailing whitespace
|
trim(s) - Removes leading/trailing whitespace
|
||||||
strsplit(inputstr, sep) - Splits string by separator (default: whitespace)
|
strsplit(inputstr, sep) - Splits string by separator (default: whitespace)
|
||||||
num(str) - Converts string to number (returns 0 if invalid)
|
fromCSV(csv, delimiter, hasHeaders) - Parses CSV text into rows of fields (delimiter defaults to ",", hasHeaders defaults to false)
|
||||||
str(num) - Converts number to string
|
toCSV(rows, delimiter) - Converts table of rows to CSV text format (delimiter defaults to ",")
|
||||||
is_number(str) - Returns true if string is numeric
|
num(str) - Converts string to number (returns 0 if invalid)
|
||||||
|
str(num) - Converts number to string
|
||||||
|
is_number(str) - Returns true if string is numeric
|
||||||
|
|
||||||
TABLE FUNCTIONS:
|
TABLE FUNCTIONS:
|
||||||
DumpTable(table, depth) - Prints table structure recursively
|
dump(table, depth) - Prints table structure recursively
|
||||||
isArray(t) - Returns true if table is a sequential array
|
isArray(t) - Returns true if table is a sequential array
|
||||||
|
|
||||||
HTTP FUNCTIONS:
|
HTTP FUNCTIONS:
|
||||||
fetch(url, options) - Makes HTTP request, returns response table
|
fetch(url, options) - Makes HTTP request, returns response table
|
||||||
@@ -552,12 +480,12 @@ UTILITY FUNCTIONS:
|
|||||||
print(...) - Prints arguments to Go logger
|
print(...) - Prints arguments to Go logger
|
||||||
|
|
||||||
EXAMPLES:
|
EXAMPLES:
|
||||||
round(3.14159, 2) -> 3.14
|
round(3.14159, 2) -> 3.14
|
||||||
strsplit("a,b,c", ",") -> {"a", "b", "c"}
|
strsplit("a,b,c", ",") -> {"a", "b", "c"}
|
||||||
upper("hello") -> "HELLO"
|
upper("hello") -> "HELLO"
|
||||||
min(5, 3) -> 3
|
min(5, 3) -> 3
|
||||||
num("123") -> 123
|
num("123") -> 123
|
||||||
is_number("abc") -> false
|
is_number("abc") -> false
|
||||||
fetch("https://api.example.com/data")
|
fetch("https://api.example.com/data")
|
||||||
re("(\\w+)@(\\w+)", "user@domain.com") -> {"user@domain.com", "user", "domain.com"}`
|
re("(\\w+)@(\\w+)", "user@domain.com") -> {"user@domain.com", "user", "domain.com"}`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
package processor_test
|
package processor_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -30,8 +29,8 @@ func TestEvalRegex_CaptureGroupsReturned(t *testing.T) {
|
|||||||
}
|
}
|
||||||
expected := []string{"test-42", "test", "42"}
|
expected := []string{"test-42", "test", "42"}
|
||||||
for i, v := range expected {
|
for i, v := range expected {
|
||||||
val := tbl.RawGetString(fmt.Sprintf("%d", i))
|
val := tbl.RawGetInt(i + 1)
|
||||||
assert.Equal(t, lua.LString(v), val, "Expected index %d to be %q", i, v)
|
assert.Equal(t, lua.LString(v), val, "Expected index %d to be %q", i+1, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,9 +66,9 @@ func TestEvalRegex_NoCaptureGroups(t *testing.T) {
|
|||||||
if !ok {
|
if !ok {
|
||||||
t.Fatalf("Expected Lua table, got %T", out)
|
t.Fatalf("Expected Lua table, got %T", out)
|
||||||
}
|
}
|
||||||
fullMatch := tbl.RawGetString("0")
|
fullMatch := tbl.RawGetInt(1)
|
||||||
assert.Equal(t, lua.LString("foo123"), fullMatch)
|
assert.Equal(t, lua.LString("foo123"), fullMatch)
|
||||||
// There should be only the full match (index 0)
|
// There should be only the full match (index 1)
|
||||||
count := 0
|
count := 0
|
||||||
tbl.ForEach(func(k, v lua.LValue) {
|
tbl.ForEach(func(k, v lua.LValue) {
|
||||||
count++
|
count++
|
||||||
|
|||||||
@@ -22,9 +22,9 @@ type CaptureGroup struct {
|
|||||||
Range [2]int
|
Range [2]int
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProcessContent applies regex replacement with Lua processing
|
// ProcessRegex applies regex replacement with Lua processing.
|
||||||
// The filename here exists ONLY so we can pass it to the lua environment
|
// The filename here exists ONLY so we can pass it to the lua environment.
|
||||||
// It's not used for anything else
|
// It's not used for anything else.
|
||||||
func ProcessRegex(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
func ProcessRegex(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||||
processRegexLogger := regexLogger.WithPrefix("ProcessRegex").WithField("commandName", command.Name).WithField("file", filename)
|
processRegexLogger := regexLogger.WithPrefix("ProcessRegex").WithField("commandName", command.Name).WithField("file", filename)
|
||||||
processRegexLogger.Debug("Starting regex processing for file")
|
processRegexLogger.Debug("Starting regex processing for file")
|
||||||
@@ -53,7 +53,7 @@ func ProcessRegex(content string, command utils.ModifyCommand, filename string)
|
|||||||
processRegexLogger.Error("Error compiling pattern %q: %v", pattern, err)
|
processRegexLogger.Error("Error compiling pattern %q: %v", pattern, err)
|
||||||
return commands, fmt.Errorf("error compiling pattern: %v", err)
|
return commands, fmt.Errorf("error compiling pattern: %v", err)
|
||||||
}
|
}
|
||||||
processRegexLogger.Debug("Compiled pattern successfully in %v", time.Since(patternCompileStart))
|
processRegexLogger.Debug("Compiled pattern successfully in %v. Pattern: %s", time.Since(patternCompileStart), pattern)
|
||||||
|
|
||||||
// Same here, it's just string concatenation, it won't kill us
|
// Same here, it's just string concatenation, it won't kill us
|
||||||
// More important is that we don't fuck up the command
|
// More important is that we don't fuck up the command
|
||||||
@@ -77,7 +77,7 @@ func ProcessRegex(content string, command utils.ModifyCommand, filename string)
|
|||||||
processRegexLogger.Debug("Pattern complexity estimate: %d", patternComplexity)
|
processRegexLogger.Debug("Pattern complexity estimate: %d", patternComplexity)
|
||||||
|
|
||||||
if len(indices) == 0 {
|
if len(indices) == 0 {
|
||||||
processRegexLogger.Warning("No matches found for regex: %q", pattern)
|
processRegexLogger.Warning("No matches found for regex: %s", pattern)
|
||||||
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||||
return commands, nil
|
return commands, nil
|
||||||
}
|
}
|
||||||
@@ -216,9 +216,6 @@ func ProcessRegex(content string, command utils.ModifyCommand, filename string)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if replacement == "" {
|
if replacement == "" {
|
||||||
// Apply the modifications to the original match
|
|
||||||
replacement = matchContent
|
|
||||||
|
|
||||||
// Count groups that were actually modified
|
// Count groups that were actually modified
|
||||||
modifiedGroupsCount := 0
|
modifiedGroupsCount := 0
|
||||||
for _, capture := range updatedCaptureGroups {
|
for _, capture := range updatedCaptureGroups {
|
||||||
@@ -335,6 +332,9 @@ func resolveRegexPlaceholders(pattern string) string {
|
|||||||
pattern = strings.ReplaceAll(pattern, "!any", `.*?`)
|
pattern = strings.ReplaceAll(pattern, "!any", `.*?`)
|
||||||
resolveLogger.Debug("Replaced !any with non-greedy wildcard")
|
resolveLogger.Debug("Replaced !any with non-greedy wildcard")
|
||||||
|
|
||||||
|
pattern = strings.ReplaceAll(pattern, "\n", "\r?\n")
|
||||||
|
resolveLogger.Debug("Added optional carriage return support for Windows line endings")
|
||||||
|
|
||||||
repPattern := regexp.MustCompile(`!rep\(([^,]+),\s*(\d+)\)`)
|
repPattern := regexp.MustCompile(`!rep\(([^,]+),\s*(\d+)\)`)
|
||||||
// !rep(pattern, count) repeats the pattern n times
|
// !rep(pattern, count) repeats the pattern n times
|
||||||
// Inserting !any between each repetition
|
// Inserting !any between each repetition
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ func normalizeWhitespace(s string) string {
|
|||||||
return re.ReplaceAllString(strings.TrimSpace(s), " ")
|
return re.ReplaceAllString(strings.TrimSpace(s), " ")
|
||||||
}
|
}
|
||||||
|
|
||||||
func ApiAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
func APIAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
||||||
command := utils.ModifyCommand{
|
command := utils.ModifyCommand{
|
||||||
Regex: regex,
|
Regex: regex,
|
||||||
Lua: lua,
|
Lua: lua,
|
||||||
@@ -79,7 +79,7 @@ func TestSimpleValueMultiplication(t *testing.T) {
|
|||||||
</item>
|
</item>
|
||||||
</config>`
|
</config>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(\d+)</value>`, "v1 = v1*1.5")
|
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(\d+)</value>`, "v1 = v1*1.5")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||||
@@ -100,7 +100,7 @@ func TestShorthandNotation(t *testing.T) {
|
|||||||
</item>
|
</item>
|
||||||
</config>`
|
</config>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(\d+)</value>`, "v1*1.5")
|
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(\d+)</value>`, "v1*1.5")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||||
@@ -121,7 +121,7 @@ func TestShorthandNotationFloats(t *testing.T) {
|
|||||||
</item>
|
</item>
|
||||||
</config>`
|
</config>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(\d+\.\d+)</value>`, "v1*1.5")
|
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(\d+\.\d+)</value>`, "v1*1.5")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||||
@@ -146,7 +146,7 @@ func TestArrayNotation(t *testing.T) {
|
|||||||
</prices>
|
</prices>
|
||||||
</config>`
|
</config>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<price>(\d+)</price>`, "v1*2")
|
result, mods, matches, err := APIAdaptor(content, `(?s)<price>(\d+)</price>`, "v1*2")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
||||||
@@ -167,7 +167,7 @@ func TestMultipleNumericMatches(t *testing.T) {
|
|||||||
<entry>400</entry>
|
<entry>400</entry>
|
||||||
</data>`
|
</data>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `<entry>(\d+)</entry>`, "v1*2")
|
result, mods, matches, err := APIAdaptor(content, `<entry>(\d+)</entry>`, "v1*2")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
||||||
@@ -186,7 +186,7 @@ func TestMultipleStringMatches(t *testing.T) {
|
|||||||
<name>Mary_modified</name>
|
<name>Mary_modified</name>
|
||||||
</data>`
|
</data>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `<name>([A-Za-z]+)</name>`, `s1 = s1 .. "_modified"`)
|
result, mods, matches, err := APIAdaptor(content, `<name>([A-Za-z]+)</name>`, `s1 = s1 .. "_modified"`)
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||||
@@ -205,7 +205,7 @@ func TestStringUpperCase(t *testing.T) {
|
|||||||
<user>MARY</user>
|
<user>MARY</user>
|
||||||
</users>`
|
</users>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `<user>([A-Za-z]+)</user>`, `s1 = string.upper(s1)`)
|
result, mods, matches, err := APIAdaptor(content, `<user>([A-Za-z]+)</user>`, `s1 = string.upper(s1)`)
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||||
@@ -224,7 +224,7 @@ func TestStringConcatenation(t *testing.T) {
|
|||||||
<product>Banana_fruit</product>
|
<product>Banana_fruit</product>
|
||||||
</products>`
|
</products>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `<product>([A-Za-z]+)</product>`, `s1 = s1 .. "_fruit"`)
|
result, mods, matches, err := APIAdaptor(content, `<product>([A-Za-z]+)</product>`, `s1 = s1 .. "_fruit"`)
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||||
@@ -254,7 +254,7 @@ func TestDecimalValues(t *testing.T) {
|
|||||||
regex := regexp.MustCompile(`(?s)<value>([0-9.]+)</value>.*?<multiplier>([0-9.]+)</multiplier>`)
|
regex := regexp.MustCompile(`(?s)<value>([0-9.]+)</value>.*?<multiplier>([0-9.]+)</multiplier>`)
|
||||||
luaExpr := BuildLuaScript("v1 = v1 * v2")
|
luaExpr := BuildLuaScript("v1 = v1 * v2")
|
||||||
|
|
||||||
result, _, _, err := ApiAdaptor(content, regex.String(), luaExpr)
|
result, _, _, err := APIAdaptor(content, regex.String(), luaExpr)
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
|
|
||||||
normalizedModified := normalizeWhitespace(result)
|
normalizedModified := normalizeWhitespace(result)
|
||||||
@@ -282,7 +282,7 @@ func TestLuaMathFunctions(t *testing.T) {
|
|||||||
regex := regexp.MustCompile(`(?s)<value>(\d+)</value>`)
|
regex := regexp.MustCompile(`(?s)<value>(\d+)</value>`)
|
||||||
luaExpr := BuildLuaScript("v1 = math.sqrt(v1)")
|
luaExpr := BuildLuaScript("v1 = math.sqrt(v1)")
|
||||||
|
|
||||||
modifiedContent, _, _, err := ApiAdaptor(content, regex.String(), luaExpr)
|
modifiedContent, _, _, err := APIAdaptor(content, regex.String(), luaExpr)
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
|
|
||||||
normalizedModified := normalizeWhitespace(modifiedContent)
|
normalizedModified := normalizeWhitespace(modifiedContent)
|
||||||
@@ -310,7 +310,7 @@ func TestDirectAssignment(t *testing.T) {
|
|||||||
regex := regexp.MustCompile(`(?s)<value>(\d+)</value>`)
|
regex := regexp.MustCompile(`(?s)<value>(\d+)</value>`)
|
||||||
luaExpr := BuildLuaScript("=0")
|
luaExpr := BuildLuaScript("=0")
|
||||||
|
|
||||||
modifiedContent, _, _, err := ApiAdaptor(content, regex.String(), luaExpr)
|
modifiedContent, _, _, err := APIAdaptor(content, regex.String(), luaExpr)
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
|
|
||||||
normalizedModified := normalizeWhitespace(modifiedContent)
|
normalizedModified := normalizeWhitespace(modifiedContent)
|
||||||
@@ -369,7 +369,7 @@ func TestStringAndNumericOperations(t *testing.T) {
|
|||||||
luaExpr := BuildLuaScript(tt.luaExpression)
|
luaExpr := BuildLuaScript(tt.luaExpression)
|
||||||
|
|
||||||
// Process with our function
|
// Process with our function
|
||||||
result, modCount, _, err := ApiAdaptor(tt.input, pattern, luaExpr)
|
result, modCount, _, err := APIAdaptor(tt.input, pattern, luaExpr)
|
||||||
assert.NoError(t, err, "Process function failed: %v", err)
|
assert.NoError(t, err, "Process function failed: %v", err)
|
||||||
|
|
||||||
// Check results
|
// Check results
|
||||||
@@ -430,7 +430,7 @@ func TestEdgeCases(t *testing.T) {
|
|||||||
luaExpr := BuildLuaScript(tt.luaExpression)
|
luaExpr := BuildLuaScript(tt.luaExpression)
|
||||||
|
|
||||||
// Process with our function
|
// Process with our function
|
||||||
result, modCount, _, err := ApiAdaptor(tt.input, pattern, luaExpr)
|
result, modCount, _, err := APIAdaptor(tt.input, pattern, luaExpr)
|
||||||
assert.NoError(t, err, "Process function failed: %v", err)
|
assert.NoError(t, err, "Process function failed: %v", err)
|
||||||
|
|
||||||
// Check results
|
// Check results
|
||||||
@@ -453,7 +453,7 @@ func TestNamedCaptureGroups(t *testing.T) {
|
|||||||
</item>
|
</item>
|
||||||
</config>`
|
</config>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(?<amount>\d+)</value>`, "amount = amount * 2")
|
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(?<amount>\d+)</value>`, "amount = amount * 2")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||||
@@ -474,7 +474,7 @@ func TestNamedCaptureGroupsNum(t *testing.T) {
|
|||||||
</item>
|
</item>
|
||||||
</config>`
|
</config>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(?<amount>!num)</value>`, "amount = amount * 2")
|
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(?<amount>!num)</value>`, "amount = amount * 2")
|
||||||
|
|
||||||
assert.NoError(t, err, "Error processing content: %v", err)
|
assert.NoError(t, err, "Error processing content: %v", err)
|
||||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||||
@@ -495,7 +495,7 @@ func TestMultipleNamedCaptureGroups(t *testing.T) {
|
|||||||
<quantity>15</quantity>
|
<quantity>15</quantity>
|
||||||
</product>`
|
</product>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`(?s)<name>(?<prodName>[^<]+)</name>.*?<price>(?<prodPrice>\d+\.\d+)</price>.*?<quantity>(?<prodQty>\d+)</quantity>`,
|
`(?s)<name>(?<prodName>[^<]+)</name>.*?<price>(?<prodPrice>\d+\.\d+)</price>.*?<quantity>(?<prodQty>\d+)</quantity>`,
|
||||||
`prodName = string.upper(prodName)
|
`prodName = string.upper(prodName)
|
||||||
prodPrice = round(prodPrice + 8, 2)
|
prodPrice = round(prodPrice + 8, 2)
|
||||||
@@ -518,7 +518,7 @@ func TestMixedIndexedAndNamedCaptures(t *testing.T) {
|
|||||||
<data>VALUE</data>
|
<data>VALUE</data>
|
||||||
</entry>`
|
</entry>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`(?s)<id>(\d+)</id>.*?<data>(?<dataField>[^<]+)</data>`,
|
`(?s)<id>(\d+)</id>.*?<data>(?<dataField>[^<]+)</data>`,
|
||||||
`v1 = v1 * 2
|
`v1 = v1 * 2
|
||||||
dataField = string.upper(dataField)`)
|
dataField = string.upper(dataField)`)
|
||||||
@@ -550,7 +550,7 @@ func TestComplexNestedNamedCaptures(t *testing.T) {
|
|||||||
</contact>
|
</contact>
|
||||||
</person>`
|
</person>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`(?s)<details>.*?<name>(?<fullName>[^<]+)</name>.*?<age>(?<age>\d+)</age>`,
|
`(?s)<details>.*?<name>(?<fullName>[^<]+)</name>.*?<age>(?<age>\d+)</age>`,
|
||||||
`fullName = string.upper(fullName) .. " (" .. age .. ")"`)
|
`fullName = string.upper(fullName) .. " (" .. age .. ")"`)
|
||||||
|
|
||||||
@@ -571,7 +571,7 @@ func TestNamedCaptureWithVariableReadback(t *testing.T) {
|
|||||||
<mana>300</mana>
|
<mana>300</mana>
|
||||||
</stats>`
|
</stats>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`(?s)<health>(?<hp>\d+)</health>.*?<mana>(?<mp>\d+)</mana>`,
|
`(?s)<health>(?<hp>\d+)</health>.*?<mana>(?<mp>\d+)</mana>`,
|
||||||
`hp = hp * 1.5
|
`hp = hp * 1.5
|
||||||
mp = mp * 1.5`)
|
mp = mp * 1.5`)
|
||||||
@@ -587,7 +587,7 @@ func TestNamedCaptureWithSpecialCharsInName(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<data value="84" min="10" max="100" />`
|
expected := `<data value="84" min="10" max="100" />`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<data value="(?<val_1>\d+)"`,
|
`<data value="(?<val_1>\d+)"`,
|
||||||
`val_1 = val_1 * 2`)
|
`val_1 = val_1 * 2`)
|
||||||
|
|
||||||
@@ -602,7 +602,7 @@ func TestEmptyNamedCapture(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<tag attr="default" />`
|
expected := `<tag attr="default" />`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`attr="(?<value>.*?)"`,
|
`attr="(?<value>.*?)"`,
|
||||||
`value = value == "" and "default" or value`)
|
`value = value == "" and "default" or value`)
|
||||||
|
|
||||||
@@ -617,7 +617,7 @@ func TestMultipleNamedCapturesInSameLine(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<rect x="20" y="40" width="200" height="100" />`
|
expected := `<rect x="20" y="40" width="200" height="100" />`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`x="(?<x>\d+)" y="(?<y>\d+)" width="(?<w>\d+)" height="(?<h>\d+)"`,
|
`x="(?<x>\d+)" y="(?<y>\d+)" width="(?<w>\d+)" height="(?<h>\d+)"`,
|
||||||
`x = x * 2
|
`x = x * 2
|
||||||
y = y * 2
|
y = y * 2
|
||||||
@@ -641,7 +641,7 @@ func TestConditionalNamedCapture(t *testing.T) {
|
|||||||
<item status="inactive" count="10" />
|
<item status="inactive" count="10" />
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<item status="(?<status>[^"]+)" count="(?<count>\d+)"`,
|
`<item status="(?<status>[^"]+)" count="(?<count>\d+)"`,
|
||||||
`count = status == "active" and count * 2 or count`)
|
`count = status == "active" and count * 2 or count`)
|
||||||
|
|
||||||
@@ -662,7 +662,7 @@ func TestLuaFunctionsOnNamedCaptures(t *testing.T) {
|
|||||||
<user name="JANE SMITH" role="admin" />
|
<user name="JANE SMITH" role="admin" />
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<user name="(?<name>[^"]+)" role="(?<role>[^"]+)"`,
|
`<user name="(?<name>[^"]+)" role="(?<role>[^"]+)"`,
|
||||||
`-- Capitalize first letters for regular users
|
`-- Capitalize first letters for regular users
|
||||||
if role == "user" then
|
if role == "user" then
|
||||||
@@ -692,7 +692,7 @@ func TestNamedCaptureWithMath(t *testing.T) {
|
|||||||
<item price="19.99" quantity="3" total="59.97" />
|
<item price="19.99" quantity="3" total="59.97" />
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<item price="(?<price>\d+\.\d+)" quantity="(?<qty>\d+)"!any$`,
|
`<item price="(?<price>\d+\.\d+)" quantity="(?<qty>\d+)"!any$`,
|
||||||
`-- Calculate and add total
|
`-- Calculate and add total
|
||||||
replacement = string.format('<item price="%s" quantity="%s" total="%.2f" />',
|
replacement = string.format('<item price="%s" quantity="%s" total="%.2f" />',
|
||||||
@@ -712,7 +712,7 @@ func TestNamedCaptureWithGlobals(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<temp unit="F">77</temp>`
|
expected := `<temp unit="F">77</temp>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<temp unit="(?<unit>[CF]?)">(?<value>\d+)</temp>`,
|
`<temp unit="(?<unit>[CF]?)">(?<value>\d+)</temp>`,
|
||||||
`if unit == "C" then
|
`if unit == "C" then
|
||||||
value = value * 9/5 + 32
|
value = value * 9/5 + 32
|
||||||
@@ -739,7 +739,7 @@ func TestMixedDynamicAndNamedCaptures(t *testing.T) {
|
|||||||
<color rgb="0,255,0" name="GREEN" hex="#00FF00" />
|
<color rgb="0,255,0" name="GREEN" hex="#00FF00" />
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<color rgb="(?<r>\d+),(?<g>\d+),(?<b>\d+)" name="(?<colorName>[^"]+)" />`,
|
`<color rgb="(?<r>\d+),(?<g>\d+),(?<b>\d+)" name="(?<colorName>[^"]+)" />`,
|
||||||
`-- Uppercase the name
|
`-- Uppercase the name
|
||||||
colorName = string.upper(colorName)
|
colorName = string.upper(colorName)
|
||||||
@@ -765,7 +765,7 @@ func TestNamedCapturesWithMultipleReferences(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<text format="uppercase" length="11">HELLO WORLD</text>`
|
expected := `<text format="uppercase" length="11">HELLO WORLD</text>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<text>(?<content>[^<]+)</text>`,
|
`<text>(?<content>[^<]+)</text>`,
|
||||||
`local uppercaseContent = string.upper(content)
|
`local uppercaseContent = string.upper(content)
|
||||||
local contentLength = string.len(content)
|
local contentLength = string.len(content)
|
||||||
@@ -783,7 +783,7 @@ func TestNamedCaptureWithJsonData(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<data>{"name":"JOHN","age":30}</data>`
|
expected := `<data>{"name":"JOHN","age":30}</data>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<data>(?<json>\{.*?\})</data>`,
|
`<data>(?<json>\{.*?\})</data>`,
|
||||||
`-- Parse JSON (simplified, assumes valid JSON)
|
`-- Parse JSON (simplified, assumes valid JSON)
|
||||||
local name = json:match('"name":"([^"]+)"')
|
local name = json:match('"name":"([^"]+)"')
|
||||||
@@ -813,7 +813,7 @@ func TestNamedCaptureInXML(t *testing.T) {
|
|||||||
</product>
|
</product>
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`(?s)<price currency="(?<currency>[^"]+)">(?<price>\d+\.\d+)</price>.*?<stock>(?<stock>\d+)</stock>`,
|
`(?s)<price currency="(?<currency>[^"]+)">(?<price>\d+\.\d+)</price>.*?<stock>(?<stock>\d+)</stock>`,
|
||||||
`-- Add 20% to price if USD
|
`-- Add 20% to price if USD
|
||||||
if currency == "USD" then
|
if currency == "USD" then
|
||||||
@@ -870,7 +870,7 @@ func TestComprehensiveNamedCaptures(t *testing.T) {
|
|||||||
</products>
|
</products>
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`(?s)<product sku="(?<sku>[^"]+)" status="(?<status>[^"]+)"[^>]*>\s*<name>(?<product_name>[^<]+)</name>\s*<price currency="(?<currency>[^"]+)">(?<price>\d+\.\d+)</price>\s*<quantity>(?<qty>\d+)</quantity>`,
|
`(?s)<product sku="(?<sku>[^"]+)" status="(?<status>[^"]+)"[^>]*>\s*<name>(?<product_name>[^<]+)</name>\s*<price currency="(?<currency>[^"]+)">(?<price>\d+\.\d+)</price>\s*<quantity>(?<qty>\d+)</quantity>`,
|
||||||
`-- Only process in-stock items
|
`-- Only process in-stock items
|
||||||
if status == "in-stock" then
|
if status == "in-stock" then
|
||||||
@@ -924,7 +924,7 @@ func TestVariousNamedCaptureFormats(t *testing.T) {
|
|||||||
</data>
|
</data>
|
||||||
`
|
`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`<entry id="(?<id_num>\d+)" value="(?<val>\d+)"(?: status="(?<status>[^"]*)")? />`,
|
`<entry id="(?<id_num>\d+)" value="(?<val>\d+)"(?: status="(?<status>[^"]*)")? />`,
|
||||||
`-- Prefix the ID with "ID-"
|
`-- Prefix the ID with "ID-"
|
||||||
id_num = "ID-" .. id_num
|
id_num = "ID-" .. id_num
|
||||||
@@ -963,7 +963,7 @@ func TestSimpleNamedCapture(t *testing.T) {
|
|||||||
|
|
||||||
expected := `<product name="WIDGET" price="19.99"/>`
|
expected := `<product name="WIDGET" price="19.99"/>`
|
||||||
|
|
||||||
result, mods, matches, err := ApiAdaptor(content,
|
result, mods, matches, err := APIAdaptor(content,
|
||||||
`name="(?<product_name>[^"]+)"`,
|
`name="(?<product_name>[^"]+)"`,
|
||||||
`product_name = string.upper(product_name)`)
|
`product_name = string.upper(product_name)`)
|
||||||
|
|
||||||
|
|||||||
@@ -404,6 +404,7 @@ files = ["test.txt"
|
|||||||
|
|
||||||
invalidFile := filepath.Join(tmpDir, "invalid.toml")
|
invalidFile := filepath.Join(tmpDir, "invalid.toml")
|
||||||
err = os.WriteFile(invalidFile, []byte(invalidTOML), 0644)
|
err = os.WriteFile(invalidFile, []byte(invalidTOML), 0644)
|
||||||
|
assert.NoError(t, err, "Should write invalid TOML file")
|
||||||
|
|
||||||
commands, err := utils.LoadCommandsFromTomlFiles("invalid.toml")
|
commands, err := utils.LoadCommandsFromTomlFiles("invalid.toml")
|
||||||
assert.Error(t, err, "Should return error for invalid TOML syntax")
|
assert.Error(t, err, "Should return error for invalid TOML syntax")
|
||||||
@@ -418,6 +419,7 @@ files = ["test.txt"
|
|||||||
// Test 3: Empty TOML file creates an error (this is expected behavior)
|
// Test 3: Empty TOML file creates an error (this is expected behavior)
|
||||||
emptyFile := filepath.Join(tmpDir, "empty.toml")
|
emptyFile := filepath.Join(tmpDir, "empty.toml")
|
||||||
err = os.WriteFile(emptyFile, []byte(""), 0644)
|
err = os.WriteFile(emptyFile, []byte(""), 0644)
|
||||||
|
assert.NoError(t, err, "Should write empty TOML file")
|
||||||
|
|
||||||
commands, err = utils.LoadCommandsFromTomlFiles("empty.toml")
|
commands, err = utils.LoadCommandsFromTomlFiles("empty.toml")
|
||||||
assert.Error(t, err, "Should return error for empty TOML file")
|
assert.Error(t, err, "Should return error for empty TOML file")
|
||||||
@@ -508,4 +510,4 @@ func TestYAMLToTOMLConversion(t *testing.T) {
|
|||||||
assert.Equal(t, tomlData, originalTomlData, "TOML file content should be unchanged")
|
assert.Equal(t, tomlData, originalTomlData, "TOML file content should be unchanged")
|
||||||
|
|
||||||
t.Logf("YAML to TOML conversion test completed successfully")
|
t.Logf("YAML to TOML conversion test completed successfully")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package utils
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -13,34 +12,13 @@ import (
|
|||||||
var fileLogger = logger.Default.WithPrefix("utils/file")
|
var fileLogger = logger.Default.WithPrefix("utils/file")
|
||||||
|
|
||||||
func CleanPath(path string) string {
|
func CleanPath(path string) string {
|
||||||
cleanPathLogger := fileLogger.WithPrefix("CleanPath")
|
// Use the centralized ResolvePath function
|
||||||
cleanPathLogger.Debug("Cleaning path: %q", path)
|
return ResolvePath(path)
|
||||||
cleanPathLogger.Trace("Original path: %q", path)
|
|
||||||
path = filepath.Clean(path)
|
|
||||||
path = strings.ReplaceAll(path, "\\", "/")
|
|
||||||
cleanPathLogger.Trace("Cleaned path result: %q", path)
|
|
||||||
return path
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func ToAbs(path string) string {
|
func ToAbs(path string) string {
|
||||||
toAbsLogger := fileLogger.WithPrefix("ToAbs")
|
// Use the centralized ResolvePath function
|
||||||
toAbsLogger.Debug("Converting path to absolute: %q", path)
|
return ResolvePath(path)
|
||||||
toAbsLogger.Trace("Input path: %q", path)
|
|
||||||
if filepath.IsAbs(path) {
|
|
||||||
toAbsLogger.Debug("Path is already absolute, cleaning it.")
|
|
||||||
cleanedPath := CleanPath(path)
|
|
||||||
toAbsLogger.Trace("Already absolute path after cleaning: %q", cleanedPath)
|
|
||||||
return cleanedPath
|
|
||||||
}
|
|
||||||
cwd, err := os.Getwd()
|
|
||||||
if err != nil {
|
|
||||||
toAbsLogger.Error("Error getting current working directory: %v", err)
|
|
||||||
return CleanPath(path)
|
|
||||||
}
|
|
||||||
toAbsLogger.Trace("Current working directory: %q", cwd)
|
|
||||||
cleanedPath := CleanPath(filepath.Join(cwd, path))
|
|
||||||
toAbsLogger.Trace("Converted absolute path result: %q", cleanedPath)
|
|
||||||
return cleanedPath
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||||
"github.com/bmatcuk/doublestar/v4"
|
|
||||||
"github.com/BurntSushi/toml"
|
"github.com/BurntSushi/toml"
|
||||||
|
"github.com/bmatcuk/doublestar/v4"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -62,6 +62,7 @@ func (c *ModifyCommand) Validate() error {
|
|||||||
|
|
||||||
// Ehh.. Not much better... Guess this wasn't the big deal
|
// Ehh.. Not much better... Guess this wasn't the big deal
|
||||||
var matchesMemoTable map[string]bool = make(map[string]bool)
|
var matchesMemoTable map[string]bool = make(map[string]bool)
|
||||||
|
var globMemoTable map[string][]string = make(map[string][]string)
|
||||||
|
|
||||||
func Matches(path string, glob string) (bool, error) {
|
func Matches(path string, glob string) (bool, error) {
|
||||||
matchesLogger := modifyCommandLogger.WithPrefix("Matches").WithField("path", path).WithField("glob", glob)
|
matchesLogger := modifyCommandLogger.WithPrefix("Matches").WithField("path", path).WithField("glob", glob)
|
||||||
@@ -85,25 +86,27 @@ func SplitPattern(pattern string) (string, string) {
|
|||||||
splitPatternLogger := modifyCommandLogger.WithPrefix("SplitPattern").WithField("pattern", pattern)
|
splitPatternLogger := modifyCommandLogger.WithPrefix("SplitPattern").WithField("pattern", pattern)
|
||||||
splitPatternLogger.Debug("Splitting pattern")
|
splitPatternLogger.Debug("Splitting pattern")
|
||||||
splitPatternLogger.Trace("Original pattern: %q", pattern)
|
splitPatternLogger.Trace("Original pattern: %q", pattern)
|
||||||
static, pattern := doublestar.SplitPattern(pattern)
|
|
||||||
|
|
||||||
cwd, err := os.Getwd()
|
// Resolve the pattern first to handle ~ expansion and make it absolute
|
||||||
if err != nil {
|
resolvedPattern := ResolvePath(pattern)
|
||||||
splitPatternLogger.Error("Error getting current working directory: %v", err)
|
splitPatternLogger.Trace("Resolved pattern: %q", resolvedPattern)
|
||||||
return "", ""
|
|
||||||
}
|
static, pattern := doublestar.SplitPattern(resolvedPattern)
|
||||||
splitPatternLogger.Trace("Current working directory: %q", cwd)
|
|
||||||
|
// Ensure static part is properly resolved
|
||||||
if static == "" {
|
if static == "" {
|
||||||
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory")
|
cwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
splitPatternLogger.Error("Error getting current working directory: %v", err)
|
||||||
|
return "", ""
|
||||||
|
}
|
||||||
static = cwd
|
static = cwd
|
||||||
|
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory: %q", static)
|
||||||
|
} else {
|
||||||
|
// Static part should already be resolved by ResolvePath
|
||||||
|
static = strings.ReplaceAll(static, "\\", "/")
|
||||||
}
|
}
|
||||||
if !filepath.IsAbs(static) {
|
|
||||||
splitPatternLogger.Debug("Static part is not absolute, joining with current working directory")
|
|
||||||
static = filepath.Join(cwd, static)
|
|
||||||
static = filepath.Clean(static)
|
|
||||||
splitPatternLogger.Trace("Static path after joining and cleaning: %q", static)
|
|
||||||
}
|
|
||||||
static = strings.ReplaceAll(static, "\\", "/")
|
|
||||||
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, pattern)
|
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, pattern)
|
||||||
return static, pattern
|
return static, pattern
|
||||||
}
|
}
|
||||||
@@ -123,33 +126,23 @@ func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[s
|
|||||||
fileCommands := make(map[string]FileCommandAssociation)
|
fileCommands := make(map[string]FileCommandAssociation)
|
||||||
|
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
file = strings.ReplaceAll(file, "\\", "/")
|
// Use centralized path resolution internally but keep original file as key
|
||||||
associateFilesLogger.Debug("Processing file: %q", file)
|
resolvedFile := ResolvePath(file)
|
||||||
|
associateFilesLogger.Debug("Processing file: %q (resolved: %q)", file, resolvedFile)
|
||||||
fileCommands[file] = FileCommandAssociation{
|
fileCommands[file] = FileCommandAssociation{
|
||||||
File: file,
|
File: resolvedFile,
|
||||||
IsolateCommands: []ModifyCommand{},
|
IsolateCommands: []ModifyCommand{},
|
||||||
Commands: []ModifyCommand{},
|
Commands: []ModifyCommand{},
|
||||||
}
|
}
|
||||||
for _, command := range commands {
|
for _, command := range commands {
|
||||||
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
|
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
|
||||||
for _, glob := range command.Files {
|
for _, glob := range command.Files {
|
||||||
glob = strings.ReplaceAll(glob, "\\", "/")
|
// SplitPattern now handles tilde expansion and path resolution
|
||||||
static, pattern := SplitPattern(glob)
|
static, pattern := SplitPattern(glob)
|
||||||
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", glob, static, pattern)
|
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", glob, static, pattern)
|
||||||
|
|
||||||
// Build absolute path for the current file to compare with static
|
// Use resolved file for matching
|
||||||
cwd, err := os.Getwd()
|
absFile := resolvedFile
|
||||||
if err != nil {
|
|
||||||
associateFilesLogger.Warning("Failed to get CWD when matching %q for file %q: %v", glob, file, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
var absFile string
|
|
||||||
if filepath.IsAbs(file) {
|
|
||||||
absFile = filepath.Clean(file)
|
|
||||||
} else {
|
|
||||||
absFile = filepath.Clean(filepath.Join(cwd, file))
|
|
||||||
}
|
|
||||||
absFile = strings.ReplaceAll(absFile, "\\", "/")
|
|
||||||
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
|
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
|
||||||
|
|
||||||
// Only match if the file is under the static root
|
// Only match if the file is under the static root
|
||||||
@@ -200,9 +193,14 @@ func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
|
|||||||
for _, command := range commands {
|
for _, command := range commands {
|
||||||
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
|
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
|
||||||
for _, glob := range command.Files {
|
for _, glob := range command.Files {
|
||||||
resolvedGlob := strings.Replace(glob, "~", os.Getenv("HOME"), 1)
|
// Split the glob into static and pattern parts, then resolve ONLY the static part
|
||||||
resolvedGlob = strings.ReplaceAll(resolvedGlob, "\\", "/")
|
static, pattern := SplitPattern(glob)
|
||||||
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q)", glob, resolvedGlob)
|
// Reconstruct the glob with resolved static part
|
||||||
|
resolvedGlob := static
|
||||||
|
if pattern != "" {
|
||||||
|
resolvedGlob += "/" + pattern
|
||||||
|
}
|
||||||
|
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q) [static=%s, pattern=%s]", glob, resolvedGlob, static, pattern)
|
||||||
globs[resolvedGlob] = struct{}{}
|
globs[resolvedGlob] = struct{}{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -211,7 +209,7 @@ func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
|
|||||||
return globs
|
return globs
|
||||||
}
|
}
|
||||||
|
|
||||||
func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
|
func ExpandGlobs(patterns map[string]struct{}) ([]string, error) {
|
||||||
expandGlobsLogger := modifyCommandLogger.WithPrefix("ExpandGLobs")
|
expandGlobsLogger := modifyCommandLogger.WithPrefix("ExpandGLobs")
|
||||||
expandGlobsLogger.Debug("Expanding glob patterns to actual files")
|
expandGlobsLogger.Debug("Expanding glob patterns to actual files")
|
||||||
expandGlobsLogger.Trace("Input patterns for expansion: %v", patterns)
|
expandGlobsLogger.Trace("Input patterns for expansion: %v", patterns)
|
||||||
@@ -228,23 +226,30 @@ func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
|
|||||||
for pattern := range patterns {
|
for pattern := range patterns {
|
||||||
expandGlobsLogger.Debug("Processing glob pattern: %q", pattern)
|
expandGlobsLogger.Debug("Processing glob pattern: %q", pattern)
|
||||||
static, pattern := SplitPattern(pattern)
|
static, pattern := SplitPattern(pattern)
|
||||||
matches, err := doublestar.Glob(os.DirFS(static), pattern)
|
key := static + "|" + pattern
|
||||||
if err != nil {
|
matches, ok := globMemoTable[key]
|
||||||
expandGlobsLogger.Warning("Error expanding glob %q in %q: %v", pattern, static, err)
|
if !ok {
|
||||||
continue
|
var err error
|
||||||
|
matches, err = doublestar.Glob(os.DirFS(static), pattern)
|
||||||
|
if err != nil {
|
||||||
|
expandGlobsLogger.Warning("Error expanding glob %q in %q: %v", pattern, static, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
globMemoTable[key] = matches
|
||||||
}
|
}
|
||||||
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
|
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
|
||||||
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
|
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
|
||||||
for _, m := range matches {
|
for _, m := range matches {
|
||||||
m = filepath.Join(static, m)
|
// Resolve the full path
|
||||||
info, err := os.Stat(m)
|
fullPath := ResolvePath(filepath.Join(static, m))
|
||||||
|
info, err := os.Stat(fullPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
expandGlobsLogger.Warning("Error getting file info for %q: %v", m, err)
|
expandGlobsLogger.Warning("Error getting file info for %q: %v", fullPath, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if !info.IsDir() && !filesMap[m] {
|
if !info.IsDir() && !filesMap[fullPath] {
|
||||||
expandGlobsLogger.Trace("Adding unique file to list: %q", m)
|
expandGlobsLogger.Trace("Adding unique file to list: %q", fullPath)
|
||||||
filesMap[m], files = true, append(files, m)
|
filesMap[fullPath], files = true, append(files, fullPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -317,9 +322,8 @@ func LoadCommandsFromCookFiles(pattern string) ([]ModifyCommand, error) {
|
|||||||
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
|
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
|
||||||
|
|
||||||
for _, cookFile := range cookFiles {
|
for _, cookFile := range cookFiles {
|
||||||
cookFile = filepath.Join(static, cookFile)
|
// Use centralized path resolution
|
||||||
cookFile = filepath.Clean(cookFile)
|
cookFile = ResolvePath(filepath.Join(static, cookFile))
|
||||||
cookFile = strings.ReplaceAll(cookFile, "\\", "/")
|
|
||||||
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
|
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
|
||||||
|
|
||||||
cookFileData, err := os.ReadFile(cookFile)
|
cookFileData, err := os.ReadFile(cookFile)
|
||||||
@@ -406,9 +410,8 @@ func LoadCommandsFromTomlFiles(pattern string) ([]ModifyCommand, error) {
|
|||||||
loadTomlFilesLogger.Trace("TOML files found: %v", tomlFiles)
|
loadTomlFilesLogger.Trace("TOML files found: %v", tomlFiles)
|
||||||
|
|
||||||
for _, tomlFile := range tomlFiles {
|
for _, tomlFile := range tomlFiles {
|
||||||
tomlFile = filepath.Join(static, tomlFile)
|
// Use centralized path resolution
|
||||||
tomlFile = filepath.Clean(tomlFile)
|
tomlFile = ResolvePath(filepath.Join(static, tomlFile))
|
||||||
tomlFile = strings.ReplaceAll(tomlFile, "\\", "/")
|
|
||||||
loadTomlFilesLogger.Debug("Loading commands from individual TOML file: %q", tomlFile)
|
loadTomlFilesLogger.Debug("Loading commands from individual TOML file: %q", tomlFile)
|
||||||
|
|
||||||
tomlFileData, err := os.ReadFile(tomlFile)
|
tomlFileData, err := os.ReadFile(tomlFile)
|
||||||
@@ -504,9 +507,8 @@ func ConvertYAMLToTOML(yamlPattern string) error {
|
|||||||
skippedCount := 0
|
skippedCount := 0
|
||||||
|
|
||||||
for _, yamlFile := range yamlFiles {
|
for _, yamlFile := range yamlFiles {
|
||||||
yamlFilePath := filepath.Join(static, yamlFile)
|
// Use centralized path resolution
|
||||||
yamlFilePath = filepath.Clean(yamlFilePath)
|
yamlFilePath := ResolvePath(filepath.Join(static, yamlFile))
|
||||||
yamlFilePath = strings.ReplaceAll(yamlFilePath, "\\", "/")
|
|
||||||
|
|
||||||
// Generate corresponding TOML file path
|
// Generate corresponding TOML file path
|
||||||
tomlFilePath := strings.TrimSuffix(yamlFilePath, filepath.Ext(yamlFilePath)) + ".toml"
|
tomlFilePath := strings.TrimSuffix(yamlFilePath, filepath.Ext(yamlFilePath)) + ".toml"
|
||||||
|
|||||||
@@ -251,11 +251,19 @@ func TestAggregateGlobs(t *testing.T) {
|
|||||||
|
|
||||||
globs := AggregateGlobs(commands)
|
globs := AggregateGlobs(commands)
|
||||||
|
|
||||||
|
// Now we properly resolve only the static part of globs
|
||||||
|
// *.xml has no static part (current dir), so it becomes resolved_dir/*.xml
|
||||||
|
// *.txt has no static part (current dir), so it becomes resolved_dir/*.txt
|
||||||
|
// *.json has no static part (current dir), so it becomes resolved_dir/*.json
|
||||||
|
// subdir/*.xml has static "subdir", so it becomes resolved_dir/subdir/*.xml
|
||||||
|
cwd, _ := os.Getwd()
|
||||||
|
resolvedCwd := ResolvePath(cwd)
|
||||||
|
|
||||||
expected := map[string]struct{}{
|
expected := map[string]struct{}{
|
||||||
"*.xml": {},
|
resolvedCwd + "/*.xml": {},
|
||||||
"*.txt": {},
|
resolvedCwd + "/*.txt": {},
|
||||||
"*.json": {},
|
resolvedCwd + "/*.json": {},
|
||||||
"subdir/*.xml": {},
|
resolvedCwd + "/subdir/*.xml": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(globs) != len(expected) {
|
if len(globs) != len(expected) {
|
||||||
@@ -697,6 +705,58 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
func TestExpandGlobsMemoization(t *testing.T) {
|
||||||
|
tmpDir, err := os.MkdirTemp("", "expand-globs-memo-test")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create temp dir: %v", err)
|
||||||
|
}
|
||||||
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
|
err = os.WriteFile(filepath.Join(tmpDir, "test1.go"), []byte("test"), 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create test file: %v", err)
|
||||||
|
}
|
||||||
|
err = os.WriteFile(filepath.Join(tmpDir, "test2.go"), []byte("test"), 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create test file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
origDir, _ := os.Getwd()
|
||||||
|
os.Chdir(tmpDir)
|
||||||
|
defer os.Chdir(origDir)
|
||||||
|
|
||||||
|
cwd, _ := os.Getwd()
|
||||||
|
resolvedCwd := ResolvePath(cwd)
|
||||||
|
pattern1 := resolvedCwd + "/*.go"
|
||||||
|
patterns := map[string]struct{}{pattern1: {}}
|
||||||
|
|
||||||
|
globMemoTable = make(map[string][]string)
|
||||||
|
|
||||||
|
files1, err := ExpandGlobs(patterns)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ExpandGlobs failed: %v", err)
|
||||||
|
}
|
||||||
|
if len(files1) != 2 {
|
||||||
|
t.Fatalf("Expected 2 files, got %d", len(files1))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(globMemoTable) != 1 {
|
||||||
|
t.Fatalf("Expected 1 entry in memo table, got %d", len(globMemoTable))
|
||||||
|
}
|
||||||
|
|
||||||
|
files2, err := ExpandGlobs(patterns)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ExpandGlobs failed: %v", err)
|
||||||
|
}
|
||||||
|
if len(files2) != 2 {
|
||||||
|
t.Fatalf("Expected 2 files, got %d", len(files2))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(globMemoTable) != 1 {
|
||||||
|
t.Fatalf("Expected memo table to still have 1 entry, got %d", len(globMemoTable))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// LoadCommandsFromCookFile returns an error for a malformed YAML file
|
// LoadCommandsFromCookFile returns an error for a malformed YAML file
|
||||||
// func TestLoadCommandsFromCookFilesMalformedYAML(t *testing.T) {
|
// func TestLoadCommandsFromCookFilesMalformedYAML(t *testing.T) {
|
||||||
// // Setup test directory with mock YAML files
|
// // Setup test directory with mock YAML files
|
||||||
|
|||||||
104
utils/path.go
Normal file
104
utils/path.go
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||||
|
)
|
||||||
|
|
||||||
|
// pathLogger is a scoped logger for the utils/path package.
|
||||||
|
var pathLogger = logger.Default.WithPrefix("utils/path")
|
||||||
|
|
||||||
|
// ResolvePath resolves a file path by:
|
||||||
|
// 1. Expanding ~ to the user's home directory
|
||||||
|
// 2. Making the path absolute if it's relative
|
||||||
|
// 3. Normalizing path separators to forward slashes
|
||||||
|
// 4. Cleaning the path
|
||||||
|
func ResolvePath(path string) string {
|
||||||
|
resolvePathLogger := pathLogger.WithPrefix("ResolvePath").WithField("inputPath", path)
|
||||||
|
resolvePathLogger.Debug("Resolving path")
|
||||||
|
|
||||||
|
if path == "" {
|
||||||
|
resolvePathLogger.Warning("Empty path provided")
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 1: Expand ~ to home directory
|
||||||
|
originalPath := path
|
||||||
|
if strings.HasPrefix(path, "~") {
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" {
|
||||||
|
// Fallback for Windows
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if home != "" {
|
||||||
|
if path == "~" {
|
||||||
|
path = home
|
||||||
|
} else if strings.HasPrefix(path, "~/") {
|
||||||
|
path = filepath.Join(home, path[2:])
|
||||||
|
} else {
|
||||||
|
// Handle cases like ~username
|
||||||
|
// For now, just replace ~ with home directory
|
||||||
|
path = strings.Replace(path, "~", home, 1)
|
||||||
|
}
|
||||||
|
resolvePathLogger.Debug("Expanded tilde to home directory: home=%s, result=%s", home, path)
|
||||||
|
} else {
|
||||||
|
resolvePathLogger.Warning("Could not determine home directory for tilde expansion")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Make path absolute if it's not already
|
||||||
|
if !filepath.IsAbs(path) {
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
resolvePathLogger.Error("Failed to get current working directory: %v", err)
|
||||||
|
return path // Return as-is if we can't get CWD
|
||||||
|
}
|
||||||
|
path = filepath.Join(cwd, path)
|
||||||
|
resolvePathLogger.Debug("Made relative path absolute: cwd=%s, result=%s", cwd, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3: Clean the path
|
||||||
|
path = filepath.Clean(path)
|
||||||
|
resolvePathLogger.Debug("Cleaned path: result=%s", path)
|
||||||
|
|
||||||
|
// Step 4: Normalize path separators to forward slashes for consistency
|
||||||
|
path = strings.ReplaceAll(path, "\\", "/")
|
||||||
|
|
||||||
|
resolvePathLogger.Debug("Final resolved path: original=%s, final=%s", originalPath, path)
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolvePathForLogging is the same as ResolvePath but includes more detailed logging
|
||||||
|
// for debugging purposes
|
||||||
|
func ResolvePathForLogging(path string) string {
|
||||||
|
return ResolvePath(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsAbsolutePath checks if a path is absolute (including tilde expansion)
|
||||||
|
func IsAbsolutePath(path string) bool {
|
||||||
|
// Check for tilde expansion first
|
||||||
|
if strings.HasPrefix(path, "~") {
|
||||||
|
return true // Tilde paths become absolute after expansion
|
||||||
|
}
|
||||||
|
return filepath.IsAbs(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRelativePath returns the relative path from base to target
|
||||||
|
func GetRelativePath(base, target string) (string, error) {
|
||||||
|
resolvedBase := ResolvePath(base)
|
||||||
|
resolvedTarget := ResolvePath(target)
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(resolvedBase, resolvedTarget)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize to forward slashes
|
||||||
|
return strings.ReplaceAll(relPath, "\\", "/"), nil
|
||||||
|
}
|
||||||
432
utils/path_test.go
Normal file
432
utils/path_test.go
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestResolvePath(t *testing.T) {
|
||||||
|
// Save original working directory
|
||||||
|
origDir, _ := os.Getwd()
|
||||||
|
defer os.Chdir(origDir)
|
||||||
|
|
||||||
|
// Create a temporary directory for testing
|
||||||
|
tmpDir, err := os.MkdirTemp("", "path_test")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
setup func() // Optional setup function
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Empty path",
|
||||||
|
input: "",
|
||||||
|
expected: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Already absolute path",
|
||||||
|
input: func() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "C:/absolute/path/file.txt"
|
||||||
|
}
|
||||||
|
return "/absolute/path/file.txt"
|
||||||
|
}(),
|
||||||
|
expected: func() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "C:/absolute/path/file.txt"
|
||||||
|
}
|
||||||
|
return "/absolute/path/file.txt"
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Relative path",
|
||||||
|
input: "relative/file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
abs, _ := filepath.Abs("relative/file.txt")
|
||||||
|
return strings.ReplaceAll(abs, "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Tilde expansion - home only",
|
||||||
|
input: "~",
|
||||||
|
expected: func() string {
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" && runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
return strings.ReplaceAll(filepath.Clean(home), "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Tilde expansion - with subpath",
|
||||||
|
input: "~/Documents/file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" && runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
expected := filepath.Join(home, "Documents", "file.txt")
|
||||||
|
return strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Path normalization - double slashes",
|
||||||
|
input: "path//to//file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
abs, _ := filepath.Abs("path/to/file.txt")
|
||||||
|
return strings.ReplaceAll(abs, "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Path normalization - . and ..",
|
||||||
|
input: "path/./to/../file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
abs, _ := filepath.Abs("path/file.txt")
|
||||||
|
return strings.ReplaceAll(abs, "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Windows backslash normalization",
|
||||||
|
input: "path\\to\\file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
abs, _ := filepath.Abs("path/to/file.txt")
|
||||||
|
return strings.ReplaceAll(abs, "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Mixed separators with tilde",
|
||||||
|
input: "~/Documents\\file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" && runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
expected := filepath.Join(home, "Documents", "file.txt")
|
||||||
|
return strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Relative path from current directory",
|
||||||
|
input: "./file.txt",
|
||||||
|
expected: func() string {
|
||||||
|
abs, _ := filepath.Abs("file.txt")
|
||||||
|
return strings.ReplaceAll(abs, "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if tt.setup != nil {
|
||||||
|
tt.setup()
|
||||||
|
}
|
||||||
|
|
||||||
|
result := ResolvePath(tt.input)
|
||||||
|
assert.Equal(t, tt.expected, result, "ResolvePath(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolvePathWithWorkingDirectoryChange(t *testing.T) {
|
||||||
|
// Save original working directory
|
||||||
|
origDir, _ := os.Getwd()
|
||||||
|
defer os.Chdir(origDir)
|
||||||
|
|
||||||
|
// Create temporary directories
|
||||||
|
tmpDir, err := os.MkdirTemp("", "path_test")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
|
subDir := filepath.Join(tmpDir, "subdir")
|
||||||
|
err = os.MkdirAll(subDir, 0755)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// Change to subdirectory
|
||||||
|
err = os.Chdir(subDir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
// Test relative path resolution from new working directory
|
||||||
|
result := ResolvePath("../test.txt")
|
||||||
|
expected := filepath.Join(tmpDir, "test.txt")
|
||||||
|
expected = strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||||
|
|
||||||
|
assert.Equal(t, expected, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolvePathComplexTilde(t *testing.T) {
|
||||||
|
// Test complex tilde patterns
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" && runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
|
||||||
|
if home == "" {
|
||||||
|
t.Skip("Cannot determine home directory for tilde expansion tests")
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
input string
|
||||||
|
expected string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: "~",
|
||||||
|
expected: strings.ReplaceAll(filepath.Clean(home), "\\", "/"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "~/",
|
||||||
|
expected: strings.ReplaceAll(filepath.Clean(home), "\\", "/"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "~~",
|
||||||
|
expected: func() string {
|
||||||
|
// ~~ should be treated as ~ followed by ~ (tilde expansion)
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" && runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
if home != "" {
|
||||||
|
// First ~ gets expanded, second ~ remains
|
||||||
|
return strings.ReplaceAll(filepath.Clean(home+"~"), "\\", "/")
|
||||||
|
}
|
||||||
|
abs, _ := filepath.Abs("~~")
|
||||||
|
return strings.ReplaceAll(abs, "\\", "/")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: func() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "C:/not/tilde/path"
|
||||||
|
}
|
||||||
|
return "/not/tilde/path"
|
||||||
|
}(),
|
||||||
|
expected: func() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "C:/not/tilde/path"
|
||||||
|
}
|
||||||
|
return "/not/tilde/path"
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run("Complex tilde: "+tt.input, func(t *testing.T) {
|
||||||
|
result := ResolvePath(tt.input)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsAbsolutePath(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Empty path",
|
||||||
|
input: "",
|
||||||
|
expected: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Absolute Unix path",
|
||||||
|
input: "/absolute/path",
|
||||||
|
expected: func() bool {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// On Windows, paths starting with / are not considered absolute
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Relative path",
|
||||||
|
input: "relative/path",
|
||||||
|
expected: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Tilde expansion (becomes absolute)",
|
||||||
|
input: "~/path",
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Windows absolute path",
|
||||||
|
input: "C:\\Windows\\System32",
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result := IsAbsolutePath(tt.input)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetRelativePath(t *testing.T) {
|
||||||
|
// Create temporary directories for testing
|
||||||
|
tmpDir, err := os.MkdirTemp("", "relative_path_test")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
defer os.RemoveAll(tmpDir)
|
||||||
|
|
||||||
|
baseDir := filepath.Join(tmpDir, "base")
|
||||||
|
targetDir := filepath.Join(tmpDir, "target")
|
||||||
|
subDir := filepath.Join(targetDir, "subdir")
|
||||||
|
|
||||||
|
err = os.MkdirAll(baseDir, 0755)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
err = os.MkdirAll(subDir, 0755)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
base string
|
||||||
|
target string
|
||||||
|
expected string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Target is subdirectory of base",
|
||||||
|
base: baseDir,
|
||||||
|
target: filepath.Join(baseDir, "subdir"),
|
||||||
|
expected: "subdir",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Target is parent of base",
|
||||||
|
base: filepath.Join(baseDir, "subdir"),
|
||||||
|
target: baseDir,
|
||||||
|
expected: "..",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Target is sibling directory",
|
||||||
|
base: baseDir,
|
||||||
|
target: targetDir,
|
||||||
|
expected: "../target",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Same directory",
|
||||||
|
base: baseDir,
|
||||||
|
target: baseDir,
|
||||||
|
expected: ".",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "With tilde expansion",
|
||||||
|
base: baseDir,
|
||||||
|
target: filepath.Join(baseDir, "file.txt"),
|
||||||
|
expected: "file.txt",
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result, err := GetRelativePath(tt.base, tt.target)
|
||||||
|
if tt.wantErr {
|
||||||
|
assert.Error(t, err)
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, tt.expected, result)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolvePathRegression(t *testing.T) {
|
||||||
|
// This test specifically addresses the original bug:
|
||||||
|
// "~ is NOT BEING FUCKING RESOLVED"
|
||||||
|
|
||||||
|
home := os.Getenv("HOME")
|
||||||
|
if home == "" && runtime.GOOS == "windows" {
|
||||||
|
home = os.Getenv("USERPROFILE")
|
||||||
|
}
|
||||||
|
|
||||||
|
if home == "" {
|
||||||
|
t.Skip("Cannot determine home directory for regression test")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test the exact pattern from the bug report
|
||||||
|
testPath := "~/Seafile/activitywatch/sync.yml"
|
||||||
|
result := ResolvePath(testPath)
|
||||||
|
expected := filepath.Join(home, "Seafile", "activitywatch", "sync.yml")
|
||||||
|
expected = strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||||
|
|
||||||
|
assert.Equal(t, expected, result, "Tilde expansion bug not fixed!")
|
||||||
|
assert.NotContains(t, result, "~", "Tilde still present in resolved path!")
|
||||||
|
// Convert both to forward slashes for comparison
|
||||||
|
homeForwardSlash := strings.ReplaceAll(home, "\\", "/")
|
||||||
|
assert.Contains(t, result, homeForwardSlash, "Home directory not found in resolved path!")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolvePathEdgeCases(t *testing.T) {
|
||||||
|
// Save original working directory
|
||||||
|
origDir, _ := os.Getwd()
|
||||||
|
defer os.Chdir(origDir)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
input string
|
||||||
|
setup func()
|
||||||
|
shouldPanic bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Just dot",
|
||||||
|
input: ".",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Just double dot",
|
||||||
|
input: "..",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Triple dot",
|
||||||
|
input: "...",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Multiple leading dots",
|
||||||
|
input: "./.././../file.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Path with spaces",
|
||||||
|
input: "path with spaces/file.txt",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Very long relative path",
|
||||||
|
input: strings.Repeat("../", 10) + "file.txt",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if tt.setup != nil {
|
||||||
|
tt.setup()
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.shouldPanic {
|
||||||
|
assert.Panics(t, func() {
|
||||||
|
ResolvePath(tt.input)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// Should not panic
|
||||||
|
assert.NotPanics(t, func() {
|
||||||
|
ResolvePath(tt.input)
|
||||||
|
})
|
||||||
|
// Result should be a valid absolute path
|
||||||
|
result := ResolvePath(tt.input)
|
||||||
|
if tt.input != "" {
|
||||||
|
assert.True(t, filepath.IsAbs(result) || result == "", "Result should be absolute or empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user