Compare commits
186 Commits
e8f16dda2b
...
v9.13.2
| Author | SHA1 | Date | |
|---|---|---|---|
| e7c5ce5b54 | |||
| de5db5d3c0 | |||
| 860c566b86 | |||
| 5b52c745dd | |||
| 1d278369a6 | |||
| 795f12252c | |||
| 741b128664 | |||
| d4ae64314f | |||
| 77c04e9c98 | |||
| 756f60f350 | |||
| 3861597c02 | |||
| 39b2a1e5e8 | |||
| c96a713317 | |||
| 1b8c014fd4 | |||
| 2622813bfe | |||
| 55031de03e | |||
| 354cf423eb | |||
| a704864e5d | |||
| 08ccd6e962 | |||
| 8d08c0bbd4 | |||
| 8a000fe3ee | |||
| 2a29742b89 | |||
| 4c5125b528 | |||
| 8ad7e91b57 | |||
| 2ba4501201 | |||
| 7083681752 | |||
| 0f55d18146 | |||
| 8b95b9561e | |||
| 2c126825c5 | |||
| 2f3b937b3b | |||
| ac965de730 | |||
| 04c930e027 | |||
| 28d36379b2 | |||
| 11a7c187f8 | |||
| 6b8ff8d31e | |||
| 5c4610d763 | |||
| aba25afc1c | |||
| c1d24d0197 | |||
| dfaead471b | |||
| 2b8f5dc997 | |||
| e4d390214d | |||
| 968c36577d | |||
| 2ee9ccc0eb | |||
| bf1c132bb7 | |||
| af11cb23b0 | |||
| 133053e63a | |||
| 2c51343bab | |||
| c6d6c45727 | |||
| 21c5cdde54 | |||
| d64bf7fd84 | |||
| 831a559256 | |||
| 2654dc541a | |||
| 6d619e37de | |||
| eab18ef1f0 | |||
| 28feb5924b | |||
| 64773edfd4 | |||
| 9eba684da9 | |||
| d3974255f9 | |||
| dbfcf06fa6 | |||
| ec7ec3cb09 | |||
| f20a76f723 | |||
| f67f2efb5c | |||
| f10465e54a | |||
| 767b6d6430 | |||
| 0f47303091 | |||
| e23808417f | |||
| 33eb969778 | |||
| 8b8fa70ce4 | |||
| 4d0b91ed94 | |||
| 12e0612b48 | |||
| 98e05f4998 | |||
| 2fa99ec3a2 | |||
| 8dd212fc71 | |||
| a4bbaf9f27 | |||
| 419a8118fc | |||
| da5b621cb6 | |||
| 1df0263a42 | |||
| 74394cbde9 | |||
| f1ea0f9156 | |||
| fff8869aff | |||
| a0c5a5f18c | |||
| b309e3e6f0 | |||
| 09cdc91761 | |||
| a18573c9f8 | |||
| eacc92ce4b | |||
| 3bcc958dda | |||
| 11f0bbee53 | |||
| c145ad0900 | |||
| e02c1f018f | |||
| 07fea6238f | |||
| 5f1fdfa6c1 | |||
| 4fb25d0463 | |||
| bf23894188 | |||
| aec0f9f171 | |||
| 83fed68432 | |||
| 4311533445 | |||
| ce28b948d0 | |||
| efc602e0ba | |||
| 917063db0c | |||
| 3e552428a5 | |||
| 50455c491d | |||
| 12ec399b09 | |||
| 5a49998c2c | |||
| 590f19603e | |||
| ee8c4b9aa5 | |||
| e8d6613ac8 | |||
| 91ad9006fa | |||
| 60ba3ad417 | |||
| b74e4724d4 | |||
| 30246fd626 | |||
| 06aed7b27a | |||
| b001dfe667 | |||
| d905ad027a | |||
| 3f6a03aee8 | |||
| 302e874710 | |||
| 9d9820072a | |||
| 53d14345b9 | |||
| 67c3346f2f | |||
| 346afdd143 | |||
| 48729cdfa4 | |||
| b9574f0106 | |||
| 635ca463c0 | |||
| 2459988ff0 | |||
| 6ab08fe97f | |||
| 2dafe4a981 | |||
| ec24e0713d | |||
| 969ccae25c | |||
| 5b46ff0efd | |||
| d234616406 | |||
| af3e55e518 | |||
| 13b48229ac | |||
| 670f6ed7a0 | |||
| bbc7c50fae | |||
| 779d1e0a0e | |||
| 54581f0216 | |||
| 3d01822e77 | |||
| 4e0ca92c77 | |||
| 388e54b3e3 | |||
| 6f2e76221a | |||
| e0d3b938e3 | |||
| 491a030bf8 | |||
| bff7cc2a27 | |||
| ff30b00e71 | |||
| e1eb5eeaa6 | |||
| 2a2e11d8e0 | |||
| 6eb4f31127 | |||
| 4b58e00c26 | |||
| 8ffd8af13c | |||
| 67861d4455 | |||
| 299e6d8bfe | |||
| 388822e90a | |||
| 91993b4548 | |||
| bb69558aaa | |||
| 052c670627 | |||
| 67fd215d0e | |||
| 9ecbbff6fa | |||
| 774ac0f0ca | |||
| b785d24a08 | |||
| 22f991e72e | |||
| 5518b27663 | |||
| 0b899dea2c | |||
| 3424fea8ad | |||
| ddc1d83d58 | |||
| 4b0a85411d | |||
| 46e871b626 | |||
| 258dcc88e7 | |||
| 75bf449bed | |||
| 58586395fb | |||
| c5a68af5e6 | |||
| b4c0284734 | |||
| c5d1dad8de | |||
| 4ff2ee80ee | |||
| 633eebfd2a | |||
| 5a31703840 | |||
| 162d0c758d | |||
| 14d64495b6 | |||
| fe6e97e832 | |||
| 35b3d8b099 | |||
| 2e3e958e15 | |||
| 955afc4295 | |||
| 2c487bc443 | |||
| b77224176b | |||
| a2201053c5 | |||
| 04cedf5ece | |||
| ebb07854cc | |||
| 8a86ae2f40 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +1,4 @@
|
||||
*.exe
|
||||
.qodo
|
||||
*.sqlite
|
||||
.cursor/rules
|
||||
|
||||
76
.vscode/launch.json
vendored
76
.vscode/launch.json
vendored
@@ -12,11 +12,20 @@
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma",
|
||||
"args": [
|
||||
"loglevel",
|
||||
"*.toml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Payday 2)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Payday2",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"(?-s)LightComponent!anyrange=\"(!num)\"",
|
||||
"*4",
|
||||
"**/Outpost*.xml"
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -33,18 +42,71 @@
|
||||
"cookassistant.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Quasimorph cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Quasimorph",
|
||||
"args": [
|
||||
"cook.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Rimworld cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Rimworld/294100",
|
||||
"args": [
|
||||
"cookVehicles.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Workspace)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"args": [
|
||||
"tester.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Avorion)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Avorion/Avorion",
|
||||
"args": [
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Minecraft)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Minecraft",
|
||||
"args": [
|
||||
"cook_tacz.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (ICARUS)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-ICARUS/Icarus/Saved/IME3/Mods",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"(?-s)LightComponent!anyrange=\"(!num)\"",
|
||||
"*4",
|
||||
"**/Outpost*.xml"
|
||||
"cook_processorrecipes.yml",
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
398
README.md
398
README.md
@@ -1,116 +1,304 @@
|
||||
# Big Chef
|
||||
# Chef
|
||||
|
||||
A Go-based tool for modifying XML, JSON, and text documents using XPath/JSONPath/Regex expressions and Lua transformations.
|
||||
`chef` is a Go CLI for batch file transformations using regex, JSON, or XML mode with Lua scripts.
|
||||
It runs commands from cook files (`.toml` or `.yml/.yaml`), supports glob-based file selection, and keeps file snapshots so changes can be reset.
|
||||
|
||||
## Features
|
||||
## What it does
|
||||
|
||||
- **Multi-Format Processing**:
|
||||
- XML (XPath)
|
||||
- JSON (JSONPath)
|
||||
- Text (Regex)
|
||||
- **Node Value Modification**: Update text values in XML elements, JSON properties or text matches
|
||||
- **Attribute Manipulation**: Modify XML attributes, JSON object keys or regex capture groups
|
||||
- **Conditional Logic**: Apply transformations based on document content
|
||||
- **Complex Operations**:
|
||||
- Mathematical calculations
|
||||
- String manipulations
|
||||
- Date conversions
|
||||
- Structural changes
|
||||
- Whole ass Lua environment
|
||||
- **Error Handling**: Comprehensive error detection for:
|
||||
- Invalid XML/JSON
|
||||
- Malformed XPath/JSONPath
|
||||
- Lua syntax errors
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### 1. Basic field modification
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<price>44.95</price>
|
||||
|
||||
<!-- Command -->
|
||||
chef -xml "//price" "v=v*2" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<price>89.9</price>
|
||||
```
|
||||
|
||||
### 2. Supports glob patterns
|
||||
```xml
|
||||
chef -xml "//price" "v=v*2" data/**.xml
|
||||
```
|
||||
|
||||
### 3. Attribute Update
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<item price="10.50"/>
|
||||
|
||||
<!-- Command -->
|
||||
chef -xml "//item/@price" "v=v*2" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<item price="21"/>
|
||||
```
|
||||
|
||||
### 3. JSONPath Transformation
|
||||
```json
|
||||
// Input
|
||||
{
|
||||
"products": [
|
||||
{"name": "Widget", "price": 19.99},
|
||||
{"name": "Gadget", "price": 29.99}
|
||||
]
|
||||
}
|
||||
|
||||
// Command
|
||||
chef -json "$.products[*].price" "v=v*0.75" input.json
|
||||
|
||||
// Output
|
||||
{
|
||||
"products": [
|
||||
{"name": "Widget", "price": 14.99},
|
||||
{"name": "Gadget", "price": 22.49}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Regex Text Replacement
|
||||
Regex works slightly differently, up to 12 match groups are provided as v1..v12 and s1..s12 for numbers and strings respectively.
|
||||
A special shorthand "!num" is also provided that simply expands to `(\d*\.?\d+)`.
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<description>Price: $15.00 Special Offer</description>
|
||||
|
||||
<!-- Command -->
|
||||
chef "Price: $!num Special Offer" "v1 = v1 * 0.92" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<description>Price: $13.80 Special Offer</description>
|
||||
```
|
||||
|
||||
### 5. Conditional Transformation
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<item stock="5" price="10.00"/>
|
||||
|
||||
<!-- Command -->
|
||||
chef -xml "//item" "if tonumber(v.stock) > 0 then v.price = v.price * 0.8 end" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<item stock="5" price="8.00"/>
|
||||
```
|
||||
- Applies regex-based edits using Lua on capture groups (`v1`, `s1`, named groups, etc.).
|
||||
- Applies JSON edits by exposing parsed JSON as `data` in Lua (`json = true` command mode).
|
||||
- Applies XML edits by exposing parsed XML nodes as `data` in Lua (`xml = true` command mode).
|
||||
- Loads reusable global variables from `[variables]` (TOML) / `variables:` (YAML).
|
||||
- Supports multi-command pipelines, command filtering, per-command log levels, isolate passes, and reset behavior.
|
||||
- Saves original file snapshots in `data.sqlite` so you can restore files later.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go build -o chef main.go
|
||||
go build -o chef .
|
||||
```
|
||||
|
||||
## Quick start
|
||||
|
||||
1) Generate an example config:
|
||||
|
||||
```bash
|
||||
# Process XML file
|
||||
./chef -xml "//price" "v=v*1.2" input.xml
|
||||
|
||||
# Process JSON file
|
||||
./chef -json "$.prices[*]" "v=v*0.9" input.json
|
||||
chef --example
|
||||
```
|
||||
|
||||
This writes `example_cook.toml` in the current directory.
|
||||
|
||||
2) Run it:
|
||||
|
||||
```bash
|
||||
chef example_cook.toml
|
||||
```
|
||||
|
||||
3) Run only selected commands:
|
||||
|
||||
```bash
|
||||
chef -f "UpdateAmounts,Prefix" example_cook.toml
|
||||
```
|
||||
|
||||
4) Restore files from snapshots:
|
||||
|
||||
```bash
|
||||
chef reset
|
||||
```
|
||||
|
||||
5) Clear all saved snapshots:
|
||||
|
||||
```bash
|
||||
chef dump
|
||||
```
|
||||
|
||||
6) Open the interactive TUI:
|
||||
|
||||
```bash
|
||||
chef tui example_cook.toml
|
||||
```
|
||||
|
||||
## CLI flags
|
||||
|
||||
- `-P, --parallel` number of files to process concurrently (default `100`).
|
||||
- `-f, --filter` comma-separated command name filters.
|
||||
- `-c, --conv` convert YAML cook files to TOML (skips output if target `.toml` already exists).
|
||||
- `-e, --example` generate `example_cook.toml` and exit.
|
||||
- `-m, --meta` generate `meta.lua` (LuaLS autocomplete helper) and exit.
|
||||
- `-l, --loglevel` global log level (`ERROR`, `WARNING`, `INFO`, `DEBUG`, `TRACE`).
|
||||
|
||||
## TUI mode
|
||||
|
||||
Launch with:
|
||||
|
||||
```bash
|
||||
chef tui <cook-files...>
|
||||
```
|
||||
|
||||
Behavior:
|
||||
|
||||
- Displays files as top-level rows and commands indented under each file.
|
||||
- Toggle a command row to select that command for next run.
|
||||
- Toggle a file row to select/deselect all commands under that file.
|
||||
- Shows per-command status lines after runs:
|
||||
- green (modified, no errors)
|
||||
- yellow (no modifications, no errors)
|
||||
- red (errors)
|
||||
- Watched commands are marked with a blue watch indicator.
|
||||
|
||||
Hotkeys:
|
||||
|
||||
- `j` / `k` or arrow keys: move cursor
|
||||
- `Space`: toggle focused row
|
||||
- `a`: select all commands
|
||||
- `c`: clear all selections
|
||||
- `s`: run selected
|
||||
- `S`: run all
|
||||
- `w`: toggle watch on focused command
|
||||
- `W`: toggle watch for all commands under focused file
|
||||
- `e`: escape clipboard text (regex quote)
|
||||
- `E`: escape clipboard text with whitespace minimization
|
||||
- `q`: quit
|
||||
|
||||
Watch mode details:
|
||||
|
||||
- Watch source: **cook/config files only**.
|
||||
- Trigger engine: **fsnotify only**.
|
||||
- Debounce: `500ms`.
|
||||
- On cook file change, TUI reloads commands and reruns only watched commands whose definitions changed.
|
||||
|
||||
## Cook file format
|
||||
|
||||
`chef` executes commands from cook files. Prefer TOML (shown below), YAML is also supported.
|
||||
|
||||
```toml
|
||||
[variables]
|
||||
multiplier = 1.5
|
||||
prefix = "NEW_"
|
||||
|
||||
[[commands]]
|
||||
name = "MultiplyValues"
|
||||
regex = "value = !num"
|
||||
lua = "v1 * multiplier"
|
||||
files = ["data/**/*.txt"]
|
||||
loglevel = "INFO"
|
||||
|
||||
[[commands]]
|
||||
name = "JSONUpdate"
|
||||
json = true
|
||||
lua = "data.version = '2.0.0'; modified = true"
|
||||
files = ["config/**/*.json"]
|
||||
|
||||
[[commands]]
|
||||
name = "XMLUpdate"
|
||||
xml = true
|
||||
lua = "data.attr.version = '2'; modified = true"
|
||||
files = ["config/**/*.xml"]
|
||||
```
|
||||
|
||||
### Command fields
|
||||
|
||||
- `name`: display/log/filter name.
|
||||
- `regex`: single regex pattern (regex mode).
|
||||
- `regexes`: multiple regex patterns for one command.
|
||||
- `regex_pred`: optional prefilter regex; command runs only when file content matches.
|
||||
- `regex_preds`: optional list of prefilter regex patterns (OR semantics).
|
||||
- `lua`: Lua snippet or external file reference via `@path/to/script.lua`.
|
||||
- `files`: glob patterns to match target files.
|
||||
- `json`: command-level JSON mode.
|
||||
- `xml`: command-level XML mode.
|
||||
- `noreset`: skip reset for target files before this run (`false` by default, so files are reset unless `true`).
|
||||
- `isolate`: run command in an isolated pass before regular commands.
|
||||
- `nodedup`: disable overlap deduplication for capture groups.
|
||||
- `disable`: skip command.
|
||||
- `raw`: disable primitive wrapping/coercion for this command.
|
||||
- `loglevel`: per-command log level override.
|
||||
|
||||
## Processing modes
|
||||
|
||||
### Regex mode (default)
|
||||
|
||||
- Use `regex` or `regexes` in a command.
|
||||
- Pattern helpers:
|
||||
- `!num` -> numeric capture pattern (`(-?\d*\.?\d+)`)
|
||||
- `!any` -> non-greedy wildcard (`.*?`)
|
||||
- `!rep(pattern, n)` -> repeats `pattern` `n` times
|
||||
- Unnamed capture groups are available as `s1..sN` (string) and `v1..vN` (number when parseable).
|
||||
- Named capture groups are available as Lua variables by name.
|
||||
- Set `replacement` in Lua to replace the full match; otherwise updates are applied to changed captures.
|
||||
|
||||
### JSON mode
|
||||
|
||||
- Enable per-command with `json = true`.
|
||||
- Parsed JSON is exposed as `data` in Lua.
|
||||
- JSONPath helper is available: `jpath(node, expr)` (use `data` for root).
|
||||
- JSONPath remover is available: `jpathrm(node, expr)` (returns removed count).
|
||||
- Primitive leaves are wrapped as `{ val = ..., raw = ... }` unless `raw = true` on the command.
|
||||
- Set `modified = true` (or return truthy/nil) to apply changes.
|
||||
- Changes are applied surgically where possible to preserve unrelated formatting.
|
||||
|
||||
### XML mode
|
||||
|
||||
- Enable per-command with `xml = true`.
|
||||
- Parsed XML is exposed as `data` in Lua.
|
||||
- XML nodes expose:
|
||||
- `data.tag`: element name
|
||||
- `data.text`: wrapped primitive (`.val` and `.raw`)
|
||||
- `data.attr`: attribute table of wrapped primitives (`.val` and `.raw`)
|
||||
- `data.children`: child node array
|
||||
- `data.name.local`: local name
|
||||
- `data.name.uri`: namespace URI
|
||||
- `data.name.prefix`: namespace prefix
|
||||
- XPath helper is available: `xpath(node, expr)` (use `data` for root).
|
||||
- XPath remover is available: `xpathrm(node, expr)` (returns removed count).
|
||||
- Add/remove child nodes by mutating `data.children` with normal Lua table operations.
|
||||
- Set `modified = true` (or return truthy/nil) to apply changes.
|
||||
- Existing XML is edited surgically where possible; newly added nodes are serialized from the Lua node table.
|
||||
|
||||
## Lua runtime reference
|
||||
|
||||
### Variables available in Lua
|
||||
|
||||
- Global cook variables: everything under `[variables]` is injected as a Lua global.
|
||||
- `$varName` substitution is supported inside `lua` expressions (`v1 * $multiplier`).
|
||||
- `@script.lua` in `lua` loads an external script relative to the cook file directory.
|
||||
- `file`: current file path being processed.
|
||||
- `modified`: boolean gate for applying changes.
|
||||
- Defaults to `false` before script execution.
|
||||
- The wrapper sets it to `true` when your script returns `nil` or truthy.
|
||||
- Set `modified = false` (or `return false`) to skip writing changes.
|
||||
|
||||
Regex mode adds:
|
||||
|
||||
- `s1..sN`: string value for unnamed capture groups.
|
||||
- `v1..vN`: numeric value for unnamed capture groups when parseable as number.
|
||||
- Named captures as variables, for example `(?P<amount>!num)` -> `amount`.
|
||||
- `replacement`: optional full-match replacement string.
|
||||
|
||||
Notes:
|
||||
|
||||
- If you prefer fixed positions, treat captures as `s1..s9` and `v1..v9` (and beyond when needed).
|
||||
- `vX` only exists when capture `sX` parses as a number.
|
||||
|
||||
JSON mode adds:
|
||||
|
||||
- `data`: parsed JSON document as a mutable Lua table.
|
||||
- `jpath(node, expr)`: evaluate JSONPath relative to `node`.
|
||||
- `jpathrm(node, expr)`: remove JSON nodes matched by JSONPath.
|
||||
|
||||
XML mode adds:
|
||||
|
||||
- `data`: parsed XML node tree as a mutable Lua table.
|
||||
- Node shape:
|
||||
- `tag`
|
||||
- `text`
|
||||
- `attr`
|
||||
- `children`
|
||||
- `name.local`
|
||||
- `name.uri`
|
||||
- `name.prefix`
|
||||
- `xpath(node, expr)`: evaluate XPath relative to `node`.
|
||||
- `xpathrm(node, expr)`: remove XML nodes matched by XPath.
|
||||
|
||||
Example:
|
||||
|
||||
```lua
|
||||
for _, item in ipairs(xpath(data, "//Item")) do
|
||||
if item.attr.Weight then
|
||||
item.attr.Weight.val = item.attr.Weight.val * 2
|
||||
end
|
||||
end
|
||||
|
||||
for _, group in ipairs(xpath(data, "//Group")) do
|
||||
for _, entry in ipairs(xpath(group, ".//Entry")) do
|
||||
entry.attr.Enabled = "true"
|
||||
end
|
||||
end
|
||||
|
||||
modified = true
|
||||
```
|
||||
|
||||
### CSV helpers
|
||||
|
||||
- `fromCSV(csvText, options)` parses CSV/TSV into rows.
|
||||
- `toCSV(rows, options)` converts rows back into CSV/TSV text.
|
||||
|
||||
Supported options keys:
|
||||
|
||||
- `delimiter` (default `","`)
|
||||
- `hasheader` (default `false`)
|
||||
- `hascomments` (default `false`, skips lines starting with `#`)
|
||||
|
||||
Example:
|
||||
|
||||
```lua
|
||||
local rows = fromCSV(csvText, {
|
||||
delimiter = "\t",
|
||||
hasheader = true,
|
||||
hascomments = true,
|
||||
})
|
||||
|
||||
rows[1].Price = num(rows[1].Price) * 1.1
|
||||
replacement = toCSV(rows, { delimiter = "\t", hasheader = true })
|
||||
modified = true
|
||||
```
|
||||
|
||||
## Useful Lua helpers
|
||||
|
||||
Built-in helpers:
|
||||
|
||||
- Math: `min`, `max`, `round`, `floor`, `ceil`
|
||||
- Strings: `upper`, `lower`, `trim`, `format`, `strsplit`
|
||||
- CSV: `fromCSV`, `toCSV`
|
||||
- Conversion: `num`, `str`, `is_number`
|
||||
- Tables: `isArray`, `dump`
|
||||
- HTTP: `fetch(url, options)` returns `{status, statusText, ok, body, headers}`
|
||||
- Regex: `re(pattern, input)` returns match table (`[1]` full match, `[2+]` groups)
|
||||
- JSON traversal: `jpath`, `visitJSON`, `findInJSON`, `modifyJSONNumbers`
|
||||
- Logging: `print(...)` (redirected to app logger)
|
||||
|
||||
Run `chef --help` for the full generated helper reference, and `chef --meta` to generate `meta.lua` for editor autocomplete.
|
||||
|
||||
## Notes about `example_cook.toml`
|
||||
|
||||
`example_cook.toml` is intentionally broad and demonstrates many patterns (`regexes`, `isolate`, `nodedup`, JSON commands, multiline regex, external systems). Treat it as a feature showcase and copy only the parts you need into your own cook files.
|
||||
|
||||
139
TUI_PLAN.md
Normal file
139
TUI_PLAN.md
Normal file
@@ -0,0 +1,139 @@
|
||||
# TUI V2 Plan (Two-Pane, Lazygit-Style)
|
||||
|
||||
## Objective
|
||||
|
||||
Build a practical TUI that gives immediate feedback:
|
||||
|
||||
- Left pane: expandable tree of cook files and commands
|
||||
- Right pane: live command/watch logs
|
||||
|
||||
This is intentionally a two-pane UI (not multi-window like lazygit).
|
||||
|
||||
## Ground Rules
|
||||
|
||||
- Keep TUI logic isolated in `/tui` and thin adapter in `tui_cmd.go`.
|
||||
- Do not print runtime logs to stdout while TUI is active.
|
||||
- In TUI mode, logger handling order is:
|
||||
1. set level to `ERROR` first
|
||||
2. if debug mode enabled, create fresh `logger.Default` and redirect output to file (`chef.log`) using the new output-redirection method.
|
||||
|
||||
## UX Target
|
||||
|
||||
### Left Pane (Tree)
|
||||
|
||||
- Top-level nodes: cook files (config source files)
|
||||
- Child nodes: commands from each cook file
|
||||
- Expand/collapse per cook file (lazygit-like tree interaction)
|
||||
- Multi-select support for commands
|
||||
- Watched marker on watched commands
|
||||
|
||||
### Right Pane (Logs)
|
||||
|
||||
- Large scrollable log buffer
|
||||
- Show timeline entries with timestamp + event source
|
||||
- Show:
|
||||
- run start/end
|
||||
- per-command start/end and summary
|
||||
- watch tick detection and rerun decisions
|
||||
- errors with full message
|
||||
|
||||
### Footer
|
||||
|
||||
- Current mode + focused pane
|
||||
- Last action summary
|
||||
- Watch state summary (on/off, last tick time)
|
||||
|
||||
## Interaction Model
|
||||
|
||||
- `j/k`, arrows: move
|
||||
- `Enter` or `l`: expand/collapse file row
|
||||
- `Space`: select/deselect focused command
|
||||
- `a`: select all commands
|
||||
- `c`: clear selection
|
||||
- `s`: run selected
|
||||
- `S`: run all
|
||||
- `w`: toggle watch on selected commands
|
||||
- `Tab`: switch focus between left/right panes
|
||||
- `g/G` (right pane): top/bottom log
|
||||
- `q`: quit
|
||||
|
||||
## Logging and Feedback Pipeline
|
||||
|
||||
### Core idea
|
||||
|
||||
Treat the right pane as the primary feedback channel.
|
||||
|
||||
### Implementation
|
||||
|
||||
1. Introduce a TUI log sink (`io.Writer`) that appends lines to in-memory ring buffers.
|
||||
2. Attach sink to logger output in TUI mode (via the new logger redirection method).
|
||||
3. Emit structured lifecycle events in adapter code (`tui_cmd.go`) in addition to normal logs:
|
||||
- `watch_tick_detected`
|
||||
- `watch_reload_done`
|
||||
- `run_started`
|
||||
- `command_started`
|
||||
- `command_finished`
|
||||
- `run_finished`
|
||||
4. Render the active log buffer in right pane with autoscroll (while still allowing manual scroll).
|
||||
|
||||
## Watch Behavior (Pragmatic)
|
||||
|
||||
- Poll cook files at fixed interval (current 500ms is fine to start).
|
||||
- On detected cook-file content change:
|
||||
- reload commands
|
||||
- compare watched command snapshots (string compare on serialized command payload)
|
||||
- rerun watched commands that changed
|
||||
- log all decisions in right pane
|
||||
|
||||
If no watched command changed, log that explicitly.
|
||||
|
||||
## Architecture
|
||||
|
||||
### `/tui`
|
||||
|
||||
- `layout.go`: pane split calculations
|
||||
- `tree.go`: left pane model + expand/collapse + selection
|
||||
- `logs.go`: ring buffer + right pane scroll model
|
||||
- `render.go`: draw both panes + footer
|
||||
- `events.go`: app event types (run/watch/log)
|
||||
- `watch.go`: polling source of watch ticks
|
||||
|
||||
### `tui_cmd.go`
|
||||
|
||||
- load cook files + commands
|
||||
- run selected/all
|
||||
- watch reload + diff + rerun
|
||||
- publish events/log lines to TUI app
|
||||
|
||||
## Borrowed Patterns
|
||||
|
||||
### From `mprocs`
|
||||
|
||||
- Single layout function computes pane rectangles (`AppLayout` style)
|
||||
- Left list + right terminal pane separation
|
||||
- Active pane border/focus clarity
|
||||
|
||||
### From `lazygit`
|
||||
|
||||
- Keep command/action logs in dedicated panel
|
||||
- Keep layout and rendering responsibilities separate
|
||||
- Stream long-running output incrementally to UI pane
|
||||
|
||||
## Delivery Steps
|
||||
|
||||
1. Add two-pane layout skeleton (`left tree`, `right logs`).
|
||||
2. Move existing list/status rendering into left pane.
|
||||
3. Add right-pane ring-buffer log viewer.
|
||||
4. Wire run and watch lifecycle events into logs pane.
|
||||
5. Wire logger output redirection (with new method) for TUI mode.
|
||||
6. Add pane focus + log scrolling keybinds.
|
||||
7. Add explicit watch diagnostics lines to logs pane.
|
||||
8. Verify run/watch feedback is always visible without stdout.
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
- User can always tell whether a command was run and what happened.
|
||||
- Editing watched cook file produces visible watch tick + rerun events in right pane.
|
||||
- Left pane supports expandable cook-file tree with command selection.
|
||||
- TUI does not rely on stdout for feedback.
|
||||
- TUI remains isolated from core logic except through adapter callbacks.
|
||||
20
build.sh
Normal file
20
build.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
TAG="${1:-}"
|
||||
if [ -z "$TAG" ]; then
|
||||
TAG=$(git describe --tags --always --dirty)
|
||||
fi
|
||||
|
||||
COMMIT=$(git rev-parse --short HEAD)
|
||||
BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
||||
LD_FLAGS="-s -w -X main.version=$TAG -X main.commit=$COMMIT -X main.buildDate=$BUILD_DATE"
|
||||
|
||||
echo "Building chef.exe"
|
||||
echo "Version: $TAG"
|
||||
echo "Commit: $COMMIT"
|
||||
echo "Date: $BUILD_DATE"
|
||||
|
||||
go build -ldflags "$LD_FLAGS" -o chef.exe .
|
||||
go install -ldflags "$LD_FLAGS" .
|
||||
195
cli.go
Normal file
195
cli.go
Normal file
@@ -0,0 +1,195 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"cook/processor"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
//go:embed example_cook.toml
|
||||
var exampleTOMLContent string
|
||||
|
||||
// rootCmd represents the base command when called without any subcommands
|
||||
var rootCmd *cobra.Command
|
||||
|
||||
func init() {
|
||||
rootCmd = &cobra.Command{
|
||||
Use: "chef [flags] <cook_files...>",
|
||||
Short: "Batch file transformations with regex, JSON, XML + Lua",
|
||||
Args: cobra.ArbitraryArgs,
|
||||
TraverseChildren: true,
|
||||
Long: `chef processes files using commands loaded from cook files (TOML or YAML).
|
||||
|
||||
Core functionality:
|
||||
- Regex mode (default): capture groups are exposed to Lua as s1..sN / v1..vN plus named captures
|
||||
- JSON mode: set per-command with json=true, with parsed data exposed as Lua global "data" and jpath(node, expr)
|
||||
- XML mode: set per-command with xml=true, with parsed XML tree exposed as Lua global "data" (fields: data.tag, data.text, data.attr, data.children, data.name.local, data.name.uri, data.name.prefix) and xpath(node, expr)
|
||||
- Global variables: [variables] in TOML / variables: in YAML are injected into Lua globals
|
||||
- Glob expansion: files fields support doublestar patterns (**/*.json, etc.)
|
||||
- Safe resets: original file snapshots are stored in data.sqlite and can be restored with "chef reset"
|
||||
|
||||
Utility commands:
|
||||
- chef reset : restore all snapshotted files to original state
|
||||
- chef dump : clear all stored file snapshots
|
||||
- chef fmt : format multiline lua blocks in TOML via stylua
|
||||
- chef gen : generate TOML commands from clipboard diff
|
||||
- chef conv : convert YAML cook files to TOML
|
||||
- chef escape: regexp-escape clipboard text, write back to clipboard
|
||||
|
||||
Extra helpers:
|
||||
- --example generates example_cook.toml
|
||||
- --meta generates meta.lua for LuaLS autocomplete
|
||||
`,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
logger.InitFlag()
|
||||
if cmd.Name() == "tui" {
|
||||
return
|
||||
}
|
||||
|
||||
logLevelStr, _ := cmd.Flags().GetString("loglevel")
|
||||
logLevel := logger.ParseLevel(logLevelStr)
|
||||
logger.SetLevel(logLevel)
|
||||
|
||||
versionFlag, _ := cmd.Flags().GetBool("version")
|
||||
if versionFlag {
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("chef %s", VersionString())
|
||||
logger.Info("Initializing with log level: %s", logger.GetLevel().String())
|
||||
logger.Trace("Full argv: %v", os.Args)
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
versionFlag, _ := cmd.Flags().GetBool("version")
|
||||
if versionFlag {
|
||||
fmt.Println(VersionString())
|
||||
return
|
||||
}
|
||||
|
||||
exampleFlag, _ := cmd.Flags().GetBool("example")
|
||||
if exampleFlag {
|
||||
CreateExampleConfig()
|
||||
return
|
||||
}
|
||||
metaFlag, _ := cmd.Flags().GetBool("meta")
|
||||
if metaFlag {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
logger.Error("Failed to get current directory: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
metaPath := filepath.Join(cwd, "meta.lua")
|
||||
if err := processor.GenerateMetaFile(metaPath); err != nil {
|
||||
logger.Error("Failed to generate meta.lua: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
return
|
||||
}
|
||||
if len(args) == 0 {
|
||||
cmd.Usage()
|
||||
return
|
||||
}
|
||||
runChef(args, cmd)
|
||||
},
|
||||
}
|
||||
|
||||
// Global flags
|
||||
rootCmd.PersistentFlags().StringP("loglevel", "l", "INFO", "Set logging level: ERROR, WARNING, INFO, DEBUG, TRACE")
|
||||
|
||||
// Local flags
|
||||
rootCmd.Flags().IntP("parallel", "P", 100, "Number of files to process in parallel")
|
||||
rootCmd.Flags().StringP("filter", "f", "", "Filter commands before running them")
|
||||
rootCmd.Flags().BoolP("version", "v", false, "Print version and exit")
|
||||
rootCmd.Flags().BoolP("example", "e", false, "Generate example_cook.toml and exit")
|
||||
rootCmd.Flags().BoolP("meta", "m", false, "Generate meta.lua file for LuaLS autocomplete and exit")
|
||||
|
||||
// Set up examples in the help text
|
||||
rootCmd.SetUsageTemplate(`Usage:{{if .Runnable}}
|
||||
{{.UseLine}}{{end}}{{if .HasAvailableSubCommands}}
|
||||
{{.CommandPath}} [command]{{end}} {{if gt (len .Aliases) 0}}
|
||||
|
||||
Aliases:
|
||||
{{.NameAndAliases}}{{end}}{{if .HasExample}}
|
||||
|
||||
Examples:
|
||||
{{.Example}}{{end}}{{if .HasAvailableSubCommands}}
|
||||
|
||||
Available Commands:{{range .Commands}}{{if (or .IsAvailableCommand (eq .Name "help"))}}
|
||||
{{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}}
|
||||
|
||||
Flags:
|
||||
{{.LocalFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasAvailableInheritedFlags}}
|
||||
|
||||
Global Flags:
|
||||
{{.InheritedFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasHelpSubCommands}}
|
||||
|
||||
Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}}
|
||||
{{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableSubCommands}}
|
||||
|
||||
Use "{{.CommandPath}} [command] --help" for more information about a command.{{end}}
|
||||
`)
|
||||
|
||||
// Add examples
|
||||
rootCmd.Example = `Run a TOML cook file:
|
||||
chef example_cook.toml
|
||||
|
||||
Run multiple cook files:
|
||||
chef base.toml overrides.yaml
|
||||
|
||||
Filter by command names:
|
||||
chef -f "Balance,Prefix" example_cook.toml
|
||||
|
||||
Format lua blocks in TOML files:
|
||||
chef fmt example_cook.toml
|
||||
chef fmt **/*.toml
|
||||
|
||||
Convert YAML cook files to TOML:
|
||||
chef conv *.yml
|
||||
chef conv **/*.yaml
|
||||
|
||||
Generate helpers:
|
||||
chef --example
|
||||
chef --meta
|
||||
chef gen > generated.toml
|
||||
|
||||
Restore or clear snapshots:
|
||||
chef reset
|
||||
chef dump
|
||||
|
||||
Notes:
|
||||
Regex placeholders: !num, !any, !rep(pattern, n)
|
||||
Capture variables: s1..sN (string), v1..vN (number when parseable), plus named captures
|
||||
Full-match override: set Lua global "replacement"
|
||||
Apply gate: set "modified" false or return false to skip writes
|
||||
External Lua: set lua = "@script.lua" in cook file (resolved relative to cook file)
|
||||
|
||||
` + processor.GetLuaFunctionsHelp()
|
||||
|
||||
rootCmd.AddCommand(newFmtCmd())
|
||||
rootCmd.AddCommand(newGenCmd())
|
||||
rootCmd.AddCommand(newConvCmd())
|
||||
rootCmd.AddCommand(newResetCmd())
|
||||
rootCmd.AddCommand(newDumpCmd())
|
||||
rootCmd.AddCommand(newTUICmd())
|
||||
rootCmd.AddCommand(newEscapeCmd())
|
||||
}
|
||||
|
||||
func CreateExampleConfig() {
|
||||
createExampleConfigLogger := logger.WithPrefix("CreateExampleConfig")
|
||||
createExampleConfigLogger.Debug("Creating example configuration file")
|
||||
|
||||
createExampleConfigLogger.Debug("Writing example_cook.toml")
|
||||
err := os.WriteFile("example_cook.toml", []byte(exampleTOMLContent), 0644)
|
||||
if err != nil {
|
||||
createExampleConfigLogger.Error("Failed to write example_cook.toml: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
createExampleConfigLogger.Info("Wrote example_cook.toml")
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"modify/logger"
|
||||
"time"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Initialize logger with DEBUG level
|
||||
logger.Init(logger.LevelDebug)
|
||||
|
||||
// Test different log levels
|
||||
logger.Info("This is an info message")
|
||||
logger.Debug("This is a debug message")
|
||||
logger.Warning("This is a warning message")
|
||||
logger.Error("This is an error message")
|
||||
logger.Trace("This is a trace message (not visible at DEBUG level)")
|
||||
|
||||
// Test with a goroutine
|
||||
logger.SafeGo(func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
logger.Info("Message from goroutine")
|
||||
})
|
||||
|
||||
// Wait for goroutine to complete
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
}
|
||||
350
command_runner.go
Normal file
350
command_runner.go
Normal file
@@ -0,0 +1,350 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync/atomic"
|
||||
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
var ErrNothingToDo = errors.New("nothing to do")
|
||||
|
||||
func loggerForCommand(commandName string, commandLoggers map[string]*logger.Logger) *logger.Logger {
|
||||
cmdLogger := logger.Default
|
||||
if cmdLog, ok := commandLoggers[commandName]; ok {
|
||||
cmdLogger = cmdLog
|
||||
}
|
||||
return cmdLogger
|
||||
}
|
||||
|
||||
func addCommandModificationCount(commandName string, delta int) {
|
||||
if delta <= 0 {
|
||||
return
|
||||
}
|
||||
count, ok := stats.ModificationsPerCommand.Load(commandName)
|
||||
if !ok {
|
||||
count = 0
|
||||
}
|
||||
stats.ModificationsPerCommand.Store(commandName, count.(int)+delta)
|
||||
}
|
||||
|
||||
func RunOtherCommands(file string, fileDataStr string, association utils.FileCommandAssociation, commandLoggers map[string]*logger.Logger) (string, error) {
|
||||
runOtherCommandsLogger := logger.WithPrefix("RunOtherCommands").WithField("file", fmt.Sprintf("%q", file))
|
||||
runOtherCommandsLogger.Debug("Running other commands for file")
|
||||
runOtherCommandsLogger.Trace("File data before modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
regexCommands := []utils.ModifyCommand{}
|
||||
structuredModified := false
|
||||
|
||||
for _, command := range association.Commands {
|
||||
commandName := commandNameForStats(command)
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesSeen, 1)
|
||||
})
|
||||
|
||||
matches, predErr := commandPredicateMatches(command, fileDataStr)
|
||||
if predErr != nil {
|
||||
runOtherCommandsLogger.Error("Predicate error for command %q: %v", command.Name, predErr)
|
||||
incrementCommandError(commandName, predErr)
|
||||
continue
|
||||
}
|
||||
if !matches {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesPredicateSkipped, 1)
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
cmdLogger := loggerForCommand(command.Name, commandLoggers)
|
||||
|
||||
if command.JSON {
|
||||
cmdLogger.Debug("Processing file with JSON mode for command %q", command.Name)
|
||||
newModifications, err := processor.ProcessJSON(fileDataStr, command, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with JSON command %q: %v", command.Name, err)
|
||||
incrementCommandError(commandName, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(newModifications) > 0 {
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModificationsForFile(file, newModifications, fileDataStr)
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
cmdLogger.Debug("Applied %d JSON modifications for command %q", count, command.Name)
|
||||
structuredModified = true
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesModified, 1)
|
||||
atomic.AddInt64(&cs.ModificationsTotal, int64(len(newModifications)))
|
||||
})
|
||||
} else {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesNoMatch, 1)
|
||||
})
|
||||
}
|
||||
|
||||
addCommandModificationCount(command.Name, len(newModifications))
|
||||
} else if command.XML {
|
||||
cmdLogger.Debug("Processing file with XML mode for command %q", command.Name)
|
||||
newModifications, err := processor.ProcessXML(fileDataStr, command, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with XML command %q: %v", command.Name, err)
|
||||
incrementCommandError(commandName, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(newModifications) > 0 {
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModificationsForFile(file, newModifications, fileDataStr)
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
cmdLogger.Debug("Applied %d XML modifications for command %q", count, command.Name)
|
||||
structuredModified = true
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesModified, 1)
|
||||
atomic.AddInt64(&cs.ModificationsTotal, int64(len(newModifications)))
|
||||
})
|
||||
} else {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesNoMatch, 1)
|
||||
})
|
||||
}
|
||||
|
||||
addCommandModificationCount(command.Name, len(newModifications))
|
||||
} else {
|
||||
regexCommands = append(regexCommands, command)
|
||||
}
|
||||
}
|
||||
|
||||
modifications := []utils.ReplaceCommand{}
|
||||
numCommandsConsidered := 0
|
||||
for _, command := range regexCommands {
|
||||
commandName := commandNameForStats(command)
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesSeen, 1)
|
||||
})
|
||||
matches, predErr := commandPredicateMatches(command, fileDataStr)
|
||||
if predErr != nil {
|
||||
runOtherCommandsLogger.Error("Predicate error for command %q: %v", command.Name, predErr)
|
||||
incrementCommandError(commandName, predErr)
|
||||
continue
|
||||
}
|
||||
if !matches {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesPredicateSkipped, 1)
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
cmdLogger := loggerForCommand(command.Name, commandLoggers)
|
||||
|
||||
patterns := command.Regexes
|
||||
if len(patterns) == 0 {
|
||||
patterns = []string{command.Regex}
|
||||
}
|
||||
totalModsForCommand := 0
|
||||
hadError := false
|
||||
for idx, pattern := range patterns {
|
||||
tmpCmd := command
|
||||
tmpCmd.Regex = pattern
|
||||
cmdLogger.Debug("Begin processing file with command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
|
||||
numCommandsConsidered++
|
||||
newModifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with command %q: %v", command.Name, err)
|
||||
incrementCommandError(commandName, err)
|
||||
hadError = true
|
||||
continue
|
||||
}
|
||||
modifications = append(modifications, newModifications...)
|
||||
totalModsForCommand += len(newModifications)
|
||||
addCommandModificationCount(command.Name, len(newModifications))
|
||||
|
||||
cmdLogger.Debug("Command %q generated %d modifications (pattern %d/%d)", command.Name, len(newModifications), idx+1, len(patterns))
|
||||
cmdLogger.Trace("Modifications generated by command %q: %v", command.Name, newModifications)
|
||||
if len(newModifications) == 0 {
|
||||
cmdLogger.Debug("No modifications yielded by command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
|
||||
}
|
||||
}
|
||||
if totalModsForCommand > 0 {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesModified, 1)
|
||||
atomic.AddInt64(&cs.ModificationsTotal, int64(totalModsForCommand))
|
||||
})
|
||||
} else if !hadError {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesNoMatch, 1)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
runOtherCommandsLogger.Debug("Aggregated %d modifications from %d command-pattern runs", len(modifications), numCommandsConsidered)
|
||||
runOtherCommandsLogger.Trace("All aggregated modifications: %v", modifications)
|
||||
|
||||
if len(modifications) == 0 {
|
||||
if structuredModified {
|
||||
return fileDataStr, nil
|
||||
}
|
||||
runOtherCommandsLogger.Debug("No modifications found for file")
|
||||
return fileDataStr, ErrNothingToDo
|
||||
}
|
||||
runOtherCommandsLogger.Debug("Executing %d modifications for file", len(modifications))
|
||||
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModificationsForFile(file, modifications, fileDataStr)
|
||||
runOtherCommandsLogger.Trace("File data after modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runOtherCommandsLogger.Info("Executed %d modifications for file", count)
|
||||
return fileDataStr, nil
|
||||
}
|
||||
|
||||
func RunIsolateCommands(association utils.FileCommandAssociation, file string, fileDataStr string) (string, error) {
|
||||
runIsolateCommandsLogger := logger.WithPrefix("RunIsolateCommands").WithField("file", fmt.Sprintf("%q", file))
|
||||
runIsolateCommandsLogger.Debug("Running isolate commands for file")
|
||||
runIsolateCommandsLogger.Trace("File data before isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
anythingDone := false
|
||||
currentFileData := fileDataStr
|
||||
|
||||
for _, isolateCommand := range association.IsolateCommands {
|
||||
commandName := commandNameForStats(isolateCommand)
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesSeen, 1)
|
||||
})
|
||||
|
||||
matches, predErr := commandPredicateMatches(isolateCommand, currentFileData)
|
||||
if predErr != nil {
|
||||
runIsolateCommandsLogger.Error("Predicate error for isolate command %q: %v", isolateCommand.Name, predErr)
|
||||
incrementCommandError(commandName, predErr)
|
||||
continue
|
||||
}
|
||||
if !matches {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesPredicateSkipped, 1)
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
if isolateCommand.XML {
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with XML isolate command %q", isolateCommand.Name)
|
||||
modifications, err := processor.ProcessXML(currentFileData, isolateCommand, file)
|
||||
if err != nil {
|
||||
runIsolateCommandsLogger.Error("Failed to process file with XML isolate command %q: %v", isolateCommand.Name, err)
|
||||
incrementCommandError(commandName, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("XML isolate command %q produced no modifications", isolateCommand.Name)
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesNoMatch, 1)
|
||||
})
|
||||
continue
|
||||
}
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesModified, 1)
|
||||
atomic.AddInt64(&cs.ModificationsTotal, int64(len(modifications)))
|
||||
})
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d XML isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("XML isolate modifications: %v", modifications)
|
||||
var count int
|
||||
currentFileData, count = utils.ExecuteModificationsForFile(file, modifications, currentFileData)
|
||||
runIsolateCommandsLogger.Trace("File data after XML isolate modifications: %s", utils.LimitString(currentFileData, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
addCommandModificationCount(isolateCommand.Name, len(modifications))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d XML isolate modifications for file", count)
|
||||
} else if isolateCommand.JSON {
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with JSON isolate command %q", isolateCommand.Name)
|
||||
modifications, err := processor.ProcessJSON(currentFileData, isolateCommand, file)
|
||||
if err != nil {
|
||||
runIsolateCommandsLogger.Error("Failed to process file with JSON isolate command %q: %v", isolateCommand.Name, err)
|
||||
incrementCommandError(commandName, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("JSON isolate command %q produced no modifications", isolateCommand.Name)
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesNoMatch, 1)
|
||||
})
|
||||
continue
|
||||
}
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesModified, 1)
|
||||
atomic.AddInt64(&cs.ModificationsTotal, int64(len(modifications)))
|
||||
})
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d JSON isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("JSON isolate modifications: %v", modifications)
|
||||
var count int
|
||||
currentFileData, count = utils.ExecuteModificationsForFile(file, modifications, currentFileData)
|
||||
runIsolateCommandsLogger.Trace("File data after JSON isolate modifications: %s", utils.LimitString(currentFileData, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
addCommandModificationCount(isolateCommand.Name, len(modifications))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d JSON isolate modifications for file", count)
|
||||
} else {
|
||||
patterns := isolateCommand.Regexes
|
||||
if len(patterns) == 0 {
|
||||
patterns = []string{isolateCommand.Regex}
|
||||
}
|
||||
totalModsForCommand := 0
|
||||
hadError := false
|
||||
for idx, pattern := range patterns {
|
||||
tmpCmd := isolateCommand
|
||||
tmpCmd.Regex = pattern
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with isolate command %q (pattern %d/%d)", isolateCommand.Name, idx+1, len(patterns))
|
||||
modifications, err := processor.ProcessRegex(currentFileData, tmpCmd, file)
|
||||
if err != nil {
|
||||
runIsolateCommandsLogger.Error("Failed to process file with isolate command %q (pattern %d/%d): %v", isolateCommand.Name, idx+1, len(patterns), err)
|
||||
incrementCommandError(commandName, err)
|
||||
hadError = true
|
||||
continue
|
||||
}
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("Isolate command %q produced no modifications (pattern %d/%d)", isolateCommand.Name, idx+1, len(patterns))
|
||||
continue
|
||||
}
|
||||
totalModsForCommand += len(modifications)
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("Isolate modifications: %v", modifications)
|
||||
var count int
|
||||
currentFileData, count = utils.ExecuteModificationsForFile(file, modifications, currentFileData)
|
||||
runIsolateCommandsLogger.Trace("File data after isolate modifications: %s", utils.LimitString(currentFileData, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
addCommandModificationCount(isolateCommand.Name, len(modifications))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d isolate modifications for file", count)
|
||||
}
|
||||
if totalModsForCommand > 0 {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesModified, 1)
|
||||
atomic.AddInt64(&cs.ModificationsTotal, int64(totalModsForCommand))
|
||||
})
|
||||
} else if !hadError {
|
||||
incrementCommandCounter(commandName, func(cs *CommandStats) {
|
||||
atomic.AddInt64(&cs.FilesNoMatch, 1)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
if !anythingDone {
|
||||
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
|
||||
return fileDataStr, ErrNothingToDo
|
||||
}
|
||||
return currentFileData, nil
|
||||
}
|
||||
125
command_stats.go
Normal file
125
command_stats.go
Normal file
@@ -0,0 +1,125 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
|
||||
"cook/utils"
|
||||
)
|
||||
|
||||
type CommandStats struct {
|
||||
FilesSeen int64
|
||||
FilesPredicateSkipped int64
|
||||
FilesModified int64
|
||||
FilesNoMatch int64
|
||||
LuaErrors int64
|
||||
ParseErrors int64
|
||||
OtherErrors int64
|
||||
ModificationsTotal int64
|
||||
}
|
||||
|
||||
func commandNameForStats(command utils.ModifyCommand) string {
|
||||
if command.Name != "" {
|
||||
return command.Name
|
||||
}
|
||||
if command.Regex != "" {
|
||||
return command.Regex
|
||||
}
|
||||
if command.XML {
|
||||
return "<xml-command>"
|
||||
}
|
||||
if command.JSON {
|
||||
return "<json-command>"
|
||||
}
|
||||
return "<unnamed-command>"
|
||||
}
|
||||
|
||||
func getOrCreateCommandStats(commandName string) *CommandStats {
|
||||
if existing, ok := stats.CommandStats.Load(commandName); ok {
|
||||
return existing.(*CommandStats)
|
||||
}
|
||||
created := &CommandStats{}
|
||||
actual, _ := stats.CommandStats.LoadOrStore(commandName, created)
|
||||
return actual.(*CommandStats)
|
||||
}
|
||||
|
||||
func incrementCommandCounter(commandName string, fn func(*CommandStats)) {
|
||||
cs := getOrCreateCommandStats(commandName)
|
||||
fn(cs)
|
||||
}
|
||||
|
||||
func incrementCommandError(commandName string, err error) {
|
||||
cs := getOrCreateCommandStats(commandName)
|
||||
msg := strings.ToLower(err.Error())
|
||||
switch {
|
||||
case strings.Contains(msg, "lua script execution failed"):
|
||||
atomic.AddInt64(&cs.LuaErrors, 1)
|
||||
case strings.Contains(msg, "failed to parse json") || strings.Contains(msg, "failed to parse xml"):
|
||||
atomic.AddInt64(&cs.ParseErrors, 1)
|
||||
default:
|
||||
atomic.AddInt64(&cs.OtherErrors, 1)
|
||||
}
|
||||
}
|
||||
|
||||
func commandPredicateMatches(command utils.ModifyCommand, content string) (bool, error) {
|
||||
preds := make([]string, 0, len(command.RegexPreds)+1)
|
||||
if command.RegexPred != "" {
|
||||
preds = append(preds, command.RegexPred)
|
||||
}
|
||||
preds = append(preds, command.RegexPreds...)
|
||||
if len(preds) == 0 {
|
||||
return true, nil
|
||||
}
|
||||
for _, pred := range preds {
|
||||
matched, err := regexp.MatchString(pred, content)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("invalid regex_pred %q for command %q: %w", pred, command.Name, err)
|
||||
}
|
||||
if matched {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func printCommandSummaryTable() {
|
||||
rows := commandSummaryRows()
|
||||
utils.RenderCommandSummaryTable(os.Stdout, rows)
|
||||
}
|
||||
|
||||
func commandSummaryRows() []utils.CommandSummaryRow {
|
||||
rows := make([]utils.CommandSummaryRow, 0)
|
||||
stats.CommandStats.Range(func(key, value interface{}) bool {
|
||||
name := key.(string)
|
||||
cs := value.(*CommandStats)
|
||||
|
||||
seen := atomic.LoadInt64(&cs.FilesSeen)
|
||||
predSkip := atomic.LoadInt64(&cs.FilesPredicateSkipped)
|
||||
filesModified := atomic.LoadInt64(&cs.FilesModified)
|
||||
filesNoMatch := atomic.LoadInt64(&cs.FilesNoMatch)
|
||||
luaErr := atomic.LoadInt64(&cs.LuaErrors)
|
||||
parseErr := atomic.LoadInt64(&cs.ParseErrors)
|
||||
otherErr := atomic.LoadInt64(&cs.OtherErrors)
|
||||
edits := atomic.LoadInt64(&cs.ModificationsTotal)
|
||||
errorsTotal := luaErr + parseErr + otherErr
|
||||
filesUnchanged := filesNoMatch + errorsTotal
|
||||
|
||||
rows = append(rows, utils.CommandSummaryRow{
|
||||
Name: name,
|
||||
Seen: seen,
|
||||
PredSkip: predSkip,
|
||||
FilesModified: filesModified,
|
||||
FilesUnchanged: filesUnchanged,
|
||||
LuaErr: luaErr,
|
||||
ParseErr: parseErr,
|
||||
OtherErr: otherErr,
|
||||
Edits: edits,
|
||||
HasErrors: errorsTotal > 0,
|
||||
})
|
||||
return true
|
||||
})
|
||||
return rows
|
||||
}
|
||||
121
conv.go
Normal file
121
conv.go
Normal file
@@ -0,0 +1,121 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newConvCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "conv <yaml-file-pattern> [more yaml-file-pattern...]",
|
||||
Short: "Convert YAML cook files to TOML",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
logger.Info("Converting %d pattern(s)", len(args))
|
||||
for i := 0; i < len(args); i++ {
|
||||
pattern := args[i]
|
||||
logger.Debug("conv %d/%d pattern=%q", i+1, len(args), pattern)
|
||||
if err := ConvertYAMLToTOML(pattern); err != nil {
|
||||
return fmt.Errorf("failed to convert YAML files for pattern %s: %w", pattern, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func ConvertYAMLToTOML(yamlPattern string) error {
|
||||
convertLogger := logger.WithPrefix("ConvertYAMLToTOML").WithField("pattern", yamlPattern)
|
||||
convertLogger.Debug("Starting YAML to TOML conversion")
|
||||
|
||||
static, pattern := utils.SplitPattern(yamlPattern)
|
||||
yamlFiles, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
convertLogger.Error("Failed to glob YAML files: %v", err)
|
||||
return fmt.Errorf("failed to glob YAML files: %w", err)
|
||||
}
|
||||
|
||||
convertLogger.Debug("Found %d YAML files to convert", len(yamlFiles))
|
||||
|
||||
if len(yamlFiles) == 0 {
|
||||
convertLogger.Info("No YAML files found for pattern: %s", yamlPattern)
|
||||
return nil
|
||||
}
|
||||
|
||||
conversionCount := 0
|
||||
skippedCount := 0
|
||||
|
||||
for _, yamlFile := range yamlFiles {
|
||||
yamlFilePath := utils.ResolvePath(filepath.Join(static, yamlFile))
|
||||
tomlFilePath := strings.TrimSuffix(yamlFilePath, filepath.Ext(yamlFilePath)) + ".toml"
|
||||
|
||||
convertLogger.Debug("Processing YAML file: %s -> %s", yamlFilePath, tomlFilePath)
|
||||
|
||||
if _, err := os.Stat(tomlFilePath); err == nil {
|
||||
convertLogger.Info("Skipping conversion - TOML file already exists: %s", tomlFilePath)
|
||||
skippedCount++
|
||||
continue
|
||||
}
|
||||
|
||||
yamlData, err := os.ReadFile(yamlFilePath)
|
||||
if err != nil {
|
||||
convertLogger.Error("Failed to read YAML file %s: %v", yamlFilePath, err)
|
||||
continue
|
||||
}
|
||||
|
||||
fileCommands, fileVariables, err := utils.LoadCommandsFromCookFile(yamlData)
|
||||
if err != nil {
|
||||
convertLogger.Error("Failed to parse YAML file %s: %v", yamlFilePath, err)
|
||||
continue
|
||||
}
|
||||
|
||||
tomlData, err := convertCommandsToTOML(fileCommands, fileVariables)
|
||||
if err != nil {
|
||||
convertLogger.Error("Failed to convert commands to TOML for %s: %v", yamlFilePath, err)
|
||||
continue
|
||||
}
|
||||
|
||||
err = os.WriteFile(tomlFilePath, tomlData, 0644)
|
||||
if err != nil {
|
||||
convertLogger.Error("Failed to write TOML file %s: %v", tomlFilePath, err)
|
||||
continue
|
||||
}
|
||||
|
||||
convertLogger.Info("Successfully converted %s to %s", yamlFilePath, tomlFilePath)
|
||||
conversionCount++
|
||||
}
|
||||
|
||||
convertLogger.Info("Conversion completed: %d files converted, %d files skipped", conversionCount, skippedCount)
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertCommandsToTOML(commands []utils.ModifyCommand, variables map[string]interface{}) ([]byte, error) {
|
||||
convertLogger := logger.WithPrefix("convertCommandsToTOML")
|
||||
convertLogger.Debug("Converting %d commands to TOML format", len(commands))
|
||||
|
||||
tomlData := struct {
|
||||
Variables map[string]interface{} `toml:"variables,omitempty"`
|
||||
Commands []utils.ModifyCommand `toml:"commands"`
|
||||
}{
|
||||
Variables: variables,
|
||||
Commands: commands,
|
||||
}
|
||||
|
||||
tomlBytes, err := toml.Marshal(tomlData)
|
||||
if err != nil {
|
||||
convertLogger.Error("Failed to marshal commands to TOML: %v", err)
|
||||
return nil, fmt.Errorf("failed to marshal commands to TOML: %w", err)
|
||||
}
|
||||
|
||||
convertLogger.Debug("Successfully converted %d commands and %d variables to TOML (%d bytes)", len(commands), len(variables), len(tomlBytes))
|
||||
return tomlBytes, nil
|
||||
}
|
||||
154
conv_test.go
Normal file
154
conv_test.go
Normal file
@@ -0,0 +1,154 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestConvertYAMLToTOMLReadError(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "convert-read-error-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
yamlFile := filepath.Join(tmpDir, "test.yml")
|
||||
err = os.WriteFile(yamlFile, []byte("commands:\n - name: test\n"), 0000)
|
||||
assert.NoError(t, err)
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
err = ConvertYAMLToTOML("test.yml")
|
||||
assert.NoError(t, err)
|
||||
|
||||
os.Chmod(yamlFile, 0644)
|
||||
}
|
||||
|
||||
func TestConvertYAMLToTOMLParseError(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "convert-parse-error-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
yamlFile := filepath.Join(tmpDir, "invalid.yml")
|
||||
err = os.WriteFile(yamlFile, []byte("commands:\n - [this is not valid yaml}}"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
err = ConvertYAMLToTOML("invalid.yml")
|
||||
assert.NoError(t, err)
|
||||
|
||||
_, statErr := os.Stat(filepath.Join(tmpDir, "invalid.toml"))
|
||||
assert.True(t, os.IsNotExist(statErr))
|
||||
}
|
||||
|
||||
func TestConvertYAMLToTOMLWriteError(t *testing.T) {
|
||||
if os.Getenv("CI") != "" {
|
||||
t.Skip("Skipping write permission test in CI")
|
||||
}
|
||||
|
||||
tmpDir, err := os.MkdirTemp("", "convert-write-error-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
yamlFile := filepath.Join(tmpDir, "test.yml")
|
||||
err = os.WriteFile(yamlFile, []byte("commands:\n - name: test\n regex: test\n lua: v1\n files: [test.txt]\n"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
outputDir := filepath.Join(tmpDir, "readonly")
|
||||
err = os.Mkdir(outputDir, 0555)
|
||||
assert.NoError(t, err)
|
||||
defer os.Chmod(outputDir, 0755)
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
newYamlFile := filepath.Join(outputDir, "test.yml")
|
||||
os.Rename(yamlFile, newYamlFile)
|
||||
|
||||
os.Chdir(outputDir)
|
||||
|
||||
err = ConvertYAMLToTOML("test.yml")
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestConvertYAMLToTOMLSkipExisting(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "convert-skip-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
yamlContent := `
|
||||
commands:
|
||||
- name: "test"
|
||||
regex: "value"
|
||||
lua: "v1 * 2"
|
||||
files: ["*.txt"]
|
||||
`
|
||||
yamlFile := filepath.Join(tmpDir, "test.yml")
|
||||
err = os.WriteFile(yamlFile, []byte(yamlContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte("# existing"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
err = ConvertYAMLToTOML("test.yml")
|
||||
assert.NoError(t, err)
|
||||
|
||||
content, _ := os.ReadFile(tomlFile)
|
||||
assert.Equal(t, "# existing", string(content))
|
||||
}
|
||||
|
||||
func TestConvertCommandsToTOMLError(t *testing.T) {
|
||||
_, err := convertCommandsToTOML([]utils.ModifyCommand{}, map[string]interface{}{
|
||||
"bad": func() {},
|
||||
})
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestConvertYAMLToTOMLSuccess(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
yamlContent := "commands:\n - name: test\n regex: value\n lua: s1\n files: [\"a.txt\"]\n"
|
||||
yamlFile := filepath.Join(tmpDir, "ok.yml")
|
||||
require.NoError(t, os.WriteFile(yamlFile, []byte(yamlContent), 0644))
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
require.NoError(t, ConvertYAMLToTOML("ok.yml"))
|
||||
|
||||
tomlBytes, err := os.ReadFile(filepath.Join(tmpDir, "ok.toml"))
|
||||
require.NoError(t, err)
|
||||
assert.Contains(t, string(tomlBytes), "[[commands]]")
|
||||
assert.Contains(t, string(tomlBytes), "name = \"test\"")
|
||||
}
|
||||
|
||||
func TestConvCommandRunE(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
yamlContent := "commands:\n - name: test\n regex: value\n lua: s1\n files: [\"a.txt\"]\n"
|
||||
require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "c.yml"), []byte(yamlContent), 0644))
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
cmd := newConvCmd()
|
||||
require.NoError(t, cmd.RunE(cmd, []string{"c.yml"}))
|
||||
|
||||
_, err := os.Stat(filepath.Join(tmpDir, "c.toml"))
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
42
dump.go
Normal file
42
dump.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newDumpCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "dump",
|
||||
Short: "Clear all stored file snapshots",
|
||||
Args: cobra.NoArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
logger.Info("Dumping all files from database (clearing snapshots)")
|
||||
db, err := utils.GetDB()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get database: %w", err)
|
||||
}
|
||||
if err := removeAllFiles(db); err != nil {
|
||||
return fmt.Errorf("failed to remove all files from database: %w", err)
|
||||
}
|
||||
logger.Info("Successfully cleared all file snapshots from database")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func removeAllFiles(db utils.DB) error {
|
||||
removeAllFilesLogger := logger.WithPrefix("removeAllFiles")
|
||||
removeAllFilesLogger.Debug("Removing all files from database")
|
||||
err := db.DB().Exec("DELETE FROM file_snapshots").Error
|
||||
if err != nil {
|
||||
removeAllFilesLogger.Error("Failed to remove all files from database: %v", err)
|
||||
} else {
|
||||
removeAllFilesLogger.Debug("All files removed from database")
|
||||
}
|
||||
return err
|
||||
}
|
||||
69
dump_test.go
Normal file
69
dump_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func closeTestDB(t *testing.T, db utils.DB) {
|
||||
t.Helper()
|
||||
sqlDB, err := db.DB().DB()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
_ = sqlDB.Close()
|
||||
}
|
||||
|
||||
func TestRemoveAllFilesClearsSnapshots(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "dump-clear-*")
|
||||
require.NoError(t, err)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
db, err := utils.GetDB()
|
||||
require.NoError(t, err)
|
||||
defer closeTestDB(t, db)
|
||||
|
||||
file := filepath.Join(tmpDir, "a.txt")
|
||||
require.NoError(t, db.SaveFile(file, []byte("original")))
|
||||
|
||||
before, err := db.GetAllFiles()
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, before)
|
||||
|
||||
require.NoError(t, removeAllFiles(db))
|
||||
|
||||
after, err := db.GetAllFiles()
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, after, 0)
|
||||
}
|
||||
|
||||
func TestDumpCommandRunE(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "dump-cmd-*")
|
||||
require.NoError(t, err)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
db, err := utils.GetDB()
|
||||
require.NoError(t, err)
|
||||
defer closeTestDB(t, db)
|
||||
require.NoError(t, db.SaveFile(filepath.Join(tmpDir, "b.txt"), []byte("x")))
|
||||
|
||||
cmd := newDumpCmd()
|
||||
err = cmd.RunE(cmd, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
after, err := db.GetAllFiles()
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, after, 0)
|
||||
}
|
||||
70
escape.go
Normal file
70
escape.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/spf13/cobra"
|
||||
clip "golang.design/x/clipboard"
|
||||
)
|
||||
|
||||
func runEscapeClipboard(minimize bool) (int, int, error) {
|
||||
logger.Info("Initializing clipboard...")
|
||||
if err := clip.Init(); err != nil {
|
||||
return 0, 0, fmt.Errorf("initialize clipboard: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Reading text from clipboard...")
|
||||
clipboardContent := clip.Read(clip.FmtText)
|
||||
inputText := string(clipboardContent)
|
||||
logger.Info("Read %d characters from clipboard", len(inputText))
|
||||
logger.Debug("Original text: %s", inputText)
|
||||
|
||||
escapedText := escapeText(inputText, minimize)
|
||||
|
||||
logger.Info("Escaping regex special characters...")
|
||||
logger.Info("Escaped %d characters", len(escapedText))
|
||||
logger.Debug("Escaped text: %s", escapedText)
|
||||
|
||||
logger.Info("Writing escaped text back to clipboard...")
|
||||
if clip.Write(clip.FmtText, []byte(escapedText)) == nil {
|
||||
return len(inputText), len(escapedText), fmt.Errorf("clipboard write failed")
|
||||
}
|
||||
logger.Info("Successfully wrote escaped text to clipboard")
|
||||
return len(inputText), len(escapedText), nil
|
||||
}
|
||||
|
||||
func escapeText(inputText string, minimize bool) string {
|
||||
var escapedText string
|
||||
if minimize {
|
||||
re := regexp.MustCompile(`([ \t]+)|([^ \t]+)`)
|
||||
var out []byte
|
||||
for _, m := range re.FindAllStringSubmatch(inputText, -1) {
|
||||
if m[1] != "" {
|
||||
out = append(out, `\s*`...)
|
||||
} else {
|
||||
out = append(out, regexp.QuoteMeta(m[2])...)
|
||||
}
|
||||
}
|
||||
escapedText = string(out)
|
||||
} else {
|
||||
escapedText = regexp.QuoteMeta(inputText)
|
||||
}
|
||||
return escapedText
|
||||
}
|
||||
|
||||
func newEscapeCmd() *cobra.Command {
|
||||
var minimize bool
|
||||
cmd := &cobra.Command{
|
||||
Use: "escape",
|
||||
Short: "Regexp-escape clipboard text; write result back to clipboard",
|
||||
Args: cobra.NoArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
_, _, err := runEscapeClipboard(minimize)
|
||||
return err
|
||||
},
|
||||
}
|
||||
cmd.Flags().BoolVarP(&minimize, "minimize", "m", false, "minimize: replace whitespace runs with \\s*")
|
||||
return cmd
|
||||
}
|
||||
565
example_cook.toml
Normal file
565
example_cook.toml
Normal file
@@ -0,0 +1,565 @@
|
||||
# Global variables - available to all commands
|
||||
[variables]
|
||||
foobar = 4
|
||||
multiply = 1.5
|
||||
prefix = 'NEW_'
|
||||
enabled = true
|
||||
|
||||
# Multi-regex example using variable in Lua
|
||||
[[commands]]
|
||||
name = 'RFToolsMultiply'
|
||||
regexes = [
|
||||
'generatePerTick = !num',
|
||||
'ticksPer\w+ = !num',
|
||||
'generatorRFPerTick = !num',
|
||||
]
|
||||
lua = '* foobar'
|
||||
files = [
|
||||
'polymc/instances/**/rftools*.toml',
|
||||
'polymc\instances\**\rftools*.toml',
|
||||
]
|
||||
noreset = false
|
||||
|
||||
# Named capture groups with arithmetic and string ops
|
||||
[[commands]]
|
||||
name = 'UpdateAmountsAndItems'
|
||||
regex = '(?P<amount>!num)\s+units\s+of\s+(?P<item>[A-Za-z_\-]+)'
|
||||
lua = 'amount = amount * multiply; item = upper(item); return true'
|
||||
files = ['data/**/*.txt']
|
||||
|
||||
# Full replacement via Lua 'replacement' variable
|
||||
[[commands]]
|
||||
name = 'BumpMinorVersion'
|
||||
regex = 'version\s*=\s*"(?P<major>!num)\.(?P<minor>!num)\.(?P<patch>!num)"'
|
||||
lua = 'replacement = format("version=\"%s.%s.%s\"", major, num(minor)+1, 0); return true'
|
||||
files = ['config/*.ini', 'config/*.cfg']
|
||||
|
||||
# TOML multiline regex example - single quotes make regex natural!
|
||||
[[commands]]
|
||||
name = 'StressValues'
|
||||
regex = '''
|
||||
\[kinetics\.stressValues\.v2\.capacity\]
|
||||
steam_engine = !num
|
||||
water_wheel = !num
|
||||
copper_valve_handle = !num
|
||||
hand_crank = !num
|
||||
creative_motor = !num'''
|
||||
lua = 'v1 * multiply'
|
||||
files = ['*.txt']
|
||||
isolate = true
|
||||
|
||||
# Network configuration with complex multiline regex
|
||||
[[commands]]
|
||||
name = 'NetworkConfig'
|
||||
regex = '''
|
||||
networking\.firewall\.allowPing = true
|
||||
networking\.firewall\.allowedTCPPorts = \[ 47984 47989 47990 \]
|
||||
networking\.firewall\.allowedUDPPortRanges = \[
|
||||
\{ from = \d+; to = \d+; \}
|
||||
\{ from = 8000; to = 8010; \}
|
||||
\]'''
|
||||
lua = "replacement = string.gsub(block[1], 'true', 'false')"
|
||||
files = ['*.conf']
|
||||
isolate = true
|
||||
|
||||
# Simple regex with single quotes - no escaping needed!
|
||||
[[commands]]
|
||||
name = 'EnableFlags'
|
||||
regex = 'enabled\s*=\s*(true|false)'
|
||||
lua = '= enabled'
|
||||
files = ['**/*.toml']
|
||||
|
||||
# Demonstrate NoDedup to allow overlapping replacements
|
||||
[[commands]]
|
||||
name = 'OverlappingGroups'
|
||||
regex = '(?P<a>!num)(?P<b>!num)'
|
||||
lua = 'a = num(a) + 1; b = num(b) + 1; return true'
|
||||
files = ['overlap/**/*.txt']
|
||||
nodedup = true
|
||||
|
||||
# Isolate command example operating on entire matched block
|
||||
[[commands]]
|
||||
name = 'IsolateUppercaseBlock'
|
||||
regex = '''BEGIN
|
||||
(?P<block>!any)
|
||||
END'''
|
||||
lua = 'block = upper(block); return true'
|
||||
files = ['logs/**/*.log']
|
||||
loglevel = 'TRACE'
|
||||
isolate = true
|
||||
|
||||
# Using !rep placeholder and arrays of files
|
||||
[[commands]]
|
||||
name = 'RepeatPlaceholderExample'
|
||||
regex = 'name: (.*) !rep(, .* , 2)'
|
||||
lua = '-- no-op, just demonstrate placeholder; return false'
|
||||
files = ['lists/**/*.yml', 'lists/**/*.yaml']
|
||||
|
||||
# Using string variable in Lua expression
|
||||
[[commands]]
|
||||
name = 'PrefixKeys'
|
||||
regex = '(?P<key>[A-Za-z0-9_]+)\s*='
|
||||
lua = 'key = prefix .. key; return true'
|
||||
files = ['**/*.properties']
|
||||
|
||||
# HTTP fetch example - get version from API and update config
|
||||
[[commands]]
|
||||
name = 'UpdateVersionFromAPI'
|
||||
regex = 'version\s*=\s*"(?P<version>[^"]+)"'
|
||||
lua = '''
|
||||
local response = fetch("https://api.example.com/version", {
|
||||
method = "GET",
|
||||
headers = { ["Accept"] = "application/json" },
|
||||
})
|
||||
if response and response.body then
|
||||
local data = fromJSON(response.body)
|
||||
if data.latest then
|
||||
version = data.latest
|
||||
return true
|
||||
end
|
||||
end
|
||||
return false
|
||||
'''
|
||||
files = ['version.conf']
|
||||
|
||||
# Complex multiline block replacement with state machine
|
||||
[[commands]]
|
||||
name = 'ModifyConfigBlock'
|
||||
regex = '''(?x)
|
||||
\[server\]
|
||||
\s+host\s*=\s*"(?P<host>[^"]+)"
|
||||
\s+port\s*=\s*(?P<port>\d+)
|
||||
\s+ssl\s*=\s*(?P<ssl>true|false)'''
|
||||
lua = '''
|
||||
port = num(port) + 1000
|
||||
ssl = "true"
|
||||
replacement = format('[server]\n host = "%s"\n port = %d\n ssl = %s', host, port, ssl)
|
||||
return true
|
||||
'''
|
||||
files = ['server.conf']
|
||||
|
||||
# Regex with !any to capture entire sections
|
||||
[[commands]]
|
||||
name = 'WrapInComment'
|
||||
regex = 'FEATURE_START\n(?P<feature>!any)\nFEATURE_END'
|
||||
lua = '''
|
||||
replacement = "FEATURE_START\n# " .. feature:gsub("\n", "\n# ") .. "\nFEATURE_END"
|
||||
return true
|
||||
'''
|
||||
files = ['features/**/*.txt']
|
||||
|
||||
# Advanced capture groups with complex logic
|
||||
[[commands]]
|
||||
name = 'UpdateDependencies'
|
||||
regex = 'dependency\("(?P<group>[^"]+)", "(?P<name>[^"]+)", "(?P<version>[^"]+)"\)'
|
||||
lua = '''
|
||||
local major, minor, patch = version:match("(%d+)%.(%d+)%.(%d+)")
|
||||
if major and minor and patch then
|
||||
-- Bump minor version
|
||||
minor = num(minor) + 1
|
||||
version = format("%s.%s.0", major, minor)
|
||||
return true
|
||||
end
|
||||
return false
|
||||
'''
|
||||
files = ['build.gradle', 'build.gradle.kts']
|
||||
|
||||
# JSON mode examples - modify single field
|
||||
[[commands]]
|
||||
name = 'JSONModifyField'
|
||||
json = true
|
||||
lua = '''
|
||||
data.value = 84
|
||||
modified = true
|
||||
'''
|
||||
files = ['data/**/*.json']
|
||||
|
||||
# JSON mode - add new field
|
||||
[[commands]]
|
||||
name = 'JSONAddField'
|
||||
json = true
|
||||
lua = '''
|
||||
data.newField = "added"
|
||||
modified = true
|
||||
'''
|
||||
files = ['config/**/*.json']
|
||||
|
||||
# JSON mode - modify nested fields
|
||||
[[commands]]
|
||||
name = 'JSONNestedModify'
|
||||
json = true
|
||||
lua = '''
|
||||
if data.config and data.config.settings then
|
||||
data.config.settings.enabled = true
|
||||
data.config.settings.timeout = 60
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['settings/**/*.json']
|
||||
|
||||
# JSON mode - modify array elements
|
||||
[[commands]]
|
||||
name = 'JSONArrayMultiply'
|
||||
json = true
|
||||
lua = '''
|
||||
if data.items then
|
||||
for i, item in ipairs(data.items) do
|
||||
data.items[i].value = item.value * multiply
|
||||
end
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['data/**/*.json']
|
||||
|
||||
# JSON mode - modify object version
|
||||
[[commands]]
|
||||
name = 'JSONObjectUpdate'
|
||||
json = true
|
||||
lua = '''
|
||||
data.version = "2.0.0"
|
||||
data.enabled = enabled
|
||||
modified = true
|
||||
'''
|
||||
files = ['config/**/*.json']
|
||||
|
||||
# JSON mode - surgical editing of specific row
|
||||
[[commands]]
|
||||
name = 'JSONSurgicalEdit'
|
||||
json = true
|
||||
lua = '''
|
||||
if data.Rows and data.Rows[1] then
|
||||
data.Rows[1].Weight = 999
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['items/**/*.json']
|
||||
|
||||
# JSON mode - remove array elements conditionally
|
||||
[[commands]]
|
||||
name = 'JSONRemoveDisabled'
|
||||
json = true
|
||||
lua = '''
|
||||
if data.features then
|
||||
local i = 1
|
||||
while i <= #data.features do
|
||||
if data.features[i].enabled == false then
|
||||
table.remove(data.features, i)
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['config/**/*.json']
|
||||
|
||||
# JSON mode - deep nested object manipulation
|
||||
[[commands]]
|
||||
name = 'JSONDeepUpdate'
|
||||
json = true
|
||||
lua = '''
|
||||
if data.game and data.game.balance and data.game.balance.economy then
|
||||
local econ = data.game.balance.economy
|
||||
econ.inflation = (econ.inflation or 1.0) * 1.05
|
||||
econ.taxRate = 0.15
|
||||
econ.lastUpdate = os.date("%Y-%m-%d")
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['settings/**/*.json']
|
||||
|
||||
# JSON mode - iterate and transform all matching objects
|
||||
[[commands]]
|
||||
name = 'JSONTransformItems'
|
||||
json = true
|
||||
lua = '''
|
||||
local function processItem(item)
|
||||
if item.type == "weapon" and item.damage then
|
||||
item.damage = item.damage * multiply
|
||||
item.modified = true
|
||||
end
|
||||
end
|
||||
|
||||
if data.items then
|
||||
for _, item in ipairs(data.items) do
|
||||
processItem(item)
|
||||
end
|
||||
modified = true
|
||||
elseif data.inventory then
|
||||
for _, item in ipairs(data.inventory) do
|
||||
processItem(item)
|
||||
end
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['data/**/*.json']
|
||||
|
||||
# CSV processing example - read, modify, write
|
||||
[[commands]]
|
||||
name = 'CSVProcess'
|
||||
regex = '(?P<csv>!any)'
|
||||
lua = '''
|
||||
local rows = fromCSV(csv, { hasheader = true })
|
||||
for i, row in ipairs(rows) do
|
||||
if row.Value then
|
||||
row.Value = num(row.Value) * multiply
|
||||
end
|
||||
end
|
||||
replacement = toCSV(rows, { hasheader = true })
|
||||
return true
|
||||
'''
|
||||
files = ['data/**/*.csv']
|
||||
|
||||
# CSV processing with custom delimiter (TSV)
|
||||
[[commands]]
|
||||
name = 'TSVProcess'
|
||||
regex = '(?P<tsv>!any)'
|
||||
lua = '''
|
||||
local rows = fromCSV(tsv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||
for i, row in ipairs(rows) do
|
||||
if row.Price then
|
||||
row.Price = num(row.Price) * 1.1
|
||||
end
|
||||
end
|
||||
replacement = toCSV(rows, { delimiter = "\t", hasheader = true })
|
||||
return true
|
||||
'''
|
||||
files = ['data/**/*.tsv']
|
||||
|
||||
# CSV processing - modify specific columns
|
||||
[[commands]]
|
||||
name = 'CSVModifyColumns'
|
||||
regex = '(?P<csv>!any)'
|
||||
lua = '''
|
||||
local rows = fromCSV(csv, { hasheader = true })
|
||||
for i, row in ipairs(rows) do
|
||||
if row.Name then
|
||||
row.Name = prefix .. row.Name
|
||||
end
|
||||
if row.Status then
|
||||
row.Status = upper(row.Status)
|
||||
end
|
||||
end
|
||||
replacement = toCSV(rows, { hasheader = true })
|
||||
return true
|
||||
'''
|
||||
files = ['exports/**/*.csv']
|
||||
|
||||
# XML mode - multiply numeric attributes directly through node tables
|
||||
[[commands]]
|
||||
name = 'XMLMultiplyAttributes'
|
||||
xml = true
|
||||
lua = '''
|
||||
for _, elem in ipairs(data.children) do
|
||||
if elem.tag == "Item" then
|
||||
if elem.attr.Weight then
|
||||
elem.attr.Weight = tostring(tonumber(elem.attr.Weight) * multiply)
|
||||
end
|
||||
if elem.attr.Value then
|
||||
elem.attr.Value = tostring(tonumber(elem.attr.Value) * foobar)
|
||||
end
|
||||
end
|
||||
end
|
||||
modified = true
|
||||
'''
|
||||
files = ['game/**/*.xml']
|
||||
|
||||
# XML mode - modify specific element attributes
|
||||
[[commands]]
|
||||
name = 'XMLUpdateAfflictions'
|
||||
xml = true
|
||||
lua = '''
|
||||
local function visit(node)
|
||||
if node.tag == "Affliction" then
|
||||
local id = node.attr.identifier
|
||||
if id == "burn" or id == "bleeding" then
|
||||
if node.attr.strength then
|
||||
node.attr.strength = tostring(tonumber(node.attr.strength) * 0.5)
|
||||
end
|
||||
node.attr.description = "Weakened effect"
|
||||
end
|
||||
end
|
||||
for _, child in ipairs(node.children or {}) do
|
||||
visit(child)
|
||||
end
|
||||
end
|
||||
visit(data)
|
||||
modified = true
|
||||
'''
|
||||
files = ['config/Afflictions.xml']
|
||||
|
||||
# XML mode - add new elements by mutating children arrays
|
||||
[[commands]]
|
||||
name = 'XMLAddItems'
|
||||
xml = true
|
||||
lua = '''
|
||||
table.insert(data.children, {
|
||||
tag = "Item",
|
||||
attr = {
|
||||
identifier = "new_item",
|
||||
Weight = "10",
|
||||
Value = "500",
|
||||
},
|
||||
children = {},
|
||||
})
|
||||
modified = true
|
||||
'''
|
||||
files = ['items/**/*.xml']
|
||||
|
||||
# XML mode - remove elements by attribute value
|
||||
[[commands]]
|
||||
name = 'XMLRemoveDisabled'
|
||||
xml = true
|
||||
lua = '''
|
||||
local function prune(node)
|
||||
local i = 1
|
||||
while i <= #(node.children or {}) do
|
||||
local child = node.children[i]
|
||||
if child.tag == "Feature" and child.attr.enabled == "false" then
|
||||
table.remove(node.children, i)
|
||||
else
|
||||
prune(child)
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
prune(data)
|
||||
modified = true
|
||||
'''
|
||||
files = ['config/**/*.xml']
|
||||
|
||||
# XML mode - conditional attribute updates based on other attributes
|
||||
[[commands]]
|
||||
name = 'XMLConditionalUpdate'
|
||||
xml = true
|
||||
lua = '''
|
||||
local function visit(node)
|
||||
if node.tag == "Weapon" then
|
||||
local tier = node.attr.tier
|
||||
if tier and tonumber(tier) and tonumber(tier) >= 3 then
|
||||
if node.attr.damage then
|
||||
node.attr.damage = tostring(tonumber(node.attr.damage) * 1.5)
|
||||
end
|
||||
node.attr.rarity = "legendary"
|
||||
end
|
||||
end
|
||||
for _, child in ipairs(node.children or {}) do
|
||||
visit(child)
|
||||
end
|
||||
end
|
||||
|
||||
visit(data)
|
||||
modified = true
|
||||
'''
|
||||
files = ['weapons/**/*.xml']
|
||||
|
||||
# XML mode - modify nested elements
|
||||
[[commands]]
|
||||
name = 'XMLNestedModify'
|
||||
xml = true
|
||||
lua = '''
|
||||
local settings = data.children and data.children[1]
|
||||
if settings then
|
||||
settings.attr.timeout = "120"
|
||||
settings.attr.maxRetries = "5"
|
||||
|
||||
local logging = nil
|
||||
for _, child in ipairs(settings.children or {}) do
|
||||
if child.tag == "Logging" then
|
||||
logging = child
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
if logging == nil then
|
||||
table.insert(settings.children, {
|
||||
tag = "Logging",
|
||||
attr = { level = "DEBUG", enabled = "true" },
|
||||
children = {},
|
||||
})
|
||||
else
|
||||
logging.attr.level = "INFO"
|
||||
end
|
||||
end
|
||||
modified = true
|
||||
'''
|
||||
files = ['config/**/*.xml']
|
||||
|
||||
# XML mode - batch attribute operations
|
||||
[[commands]]
|
||||
name = 'XMLBatchAttributeUpdate'
|
||||
xml = true
|
||||
lua = '''
|
||||
local function visit(node)
|
||||
if node.attr.Price then
|
||||
node.attr.Price = tostring(tonumber(node.attr.Price) * 1.1)
|
||||
end
|
||||
if node.attr.Cost then
|
||||
node.attr.Cost = tostring(tonumber(node.attr.Cost) * 0.9)
|
||||
end
|
||||
for _, child in ipairs(node.children or {}) do
|
||||
visit(child)
|
||||
end
|
||||
end
|
||||
|
||||
visit(data)
|
||||
modified = true
|
||||
'''
|
||||
files = ['economy/**/*.xml']
|
||||
|
||||
# XML mode - clone and modify elements
|
||||
[[commands]]
|
||||
name = 'XMLCloneItems'
|
||||
xml = true
|
||||
lua = '''
|
||||
local newItems = {}
|
||||
|
||||
for _, item in ipairs(data.children or {}) do
|
||||
local id = item.attr.identifier
|
||||
if item.tag == "Item" and id and id:match("^weapon_") then
|
||||
local upgraded = {
|
||||
tag = "Item",
|
||||
attr = {
|
||||
identifier = id .. "_mk2",
|
||||
Weight = item.attr.Weight,
|
||||
Value = tostring(tonumber(item.attr.Value) * 2),
|
||||
},
|
||||
children = {},
|
||||
}
|
||||
table.insert(newItems, upgraded)
|
||||
end
|
||||
end
|
||||
|
||||
for _, newItem in ipairs(newItems) do
|
||||
table.insert(data.children, newItem)
|
||||
end
|
||||
|
||||
if #newItems > 0 then
|
||||
modified = true
|
||||
end
|
||||
'''
|
||||
files = ['items/**/*.xml']
|
||||
|
||||
# XML mode - remove all children with specific tag
|
||||
[[commands]]
|
||||
name = 'XMLRemoveObsolete'
|
||||
xml = true
|
||||
lua = '''
|
||||
local function prune(node)
|
||||
local i = 1
|
||||
while i <= #(node.children or {}) do
|
||||
local child = node.children[i]
|
||||
if child.tag == "Deprecated" or child.tag == "Legacy" then
|
||||
table.remove(node.children, i)
|
||||
else
|
||||
prune(child)
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
prune(data)
|
||||
modified = true
|
||||
'''
|
||||
files = ['config/**/*.xml']
|
||||
106
fmt.go
Normal file
106
fmt.go
Normal file
@@ -0,0 +1,106 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var luaMultilineBlock = regexp.MustCompile("(?s)lua = '''\r?\n(.*?)\r?\n'''")
|
||||
|
||||
func newFmtCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "fmt <toml-file> [more toml-file...]",
|
||||
Short: "Format multiline lua = '''...''' blocks via stylua",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
logger.Info("Formatting %d file(s)", len(args))
|
||||
logger.Trace("fmt args: %v", args)
|
||||
for i := 0; i < len(args); i++ {
|
||||
path := args[i]
|
||||
flog := logger.WithPrefix(path)
|
||||
flog.Debug("fmt %d/%d", i+1, len(args))
|
||||
if err := formatLuaBlocksInTOML(path, flog); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func formatLuaBlocksInTOML(path string, flog *logger.Logger) error {
|
||||
flog.Trace("Reading file")
|
||||
contentBytes, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read %s: %w", path, err)
|
||||
}
|
||||
content := string(contentBytes)
|
||||
|
||||
matches := luaMultilineBlock.FindAllStringSubmatchIndex(content, -1)
|
||||
flog.Trace("Found %d multiline Lua block(s)", len(matches))
|
||||
if len(matches) == 0 {
|
||||
flog.Debug("No changes needed")
|
||||
return nil
|
||||
}
|
||||
|
||||
var out strings.Builder
|
||||
out.Grow(len(content) + 128)
|
||||
|
||||
lastEnd := 0
|
||||
for i := 0; i < len(matches); i++ {
|
||||
m := matches[i]
|
||||
if len(m) < 4 {
|
||||
return fmt.Errorf("internal error: unexpected regex match shape in %s", path)
|
||||
}
|
||||
|
||||
codeStart, codeEnd := m[2], m[3]
|
||||
if codeStart < 0 || codeEnd < 0 {
|
||||
return fmt.Errorf("internal error: missing capture ranges in %s", path)
|
||||
}
|
||||
|
||||
code := content[codeStart:codeEnd]
|
||||
flog.Trace("Formatting block %d (lua bytes=%d)", i+1, len(code))
|
||||
formatted, err := styluaFormat(code)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
formatted = strings.TrimRight(formatted, "\r\n")
|
||||
|
||||
out.WriteString(content[lastEnd:codeStart])
|
||||
out.WriteString(formatted)
|
||||
lastEnd = codeEnd
|
||||
}
|
||||
out.WriteString(content[lastEnd:])
|
||||
|
||||
newContent := out.String()
|
||||
if newContent == content {
|
||||
flog.Debug("No changes needed")
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := os.WriteFile(path, []byte(newContent), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write %s: %w", path, err)
|
||||
}
|
||||
flog.Info("Formatted %d Lua block(s)", len(matches))
|
||||
return nil
|
||||
}
|
||||
|
||||
func styluaFormat(code string) (string, error) {
|
||||
cmd := exec.Command("stylua", "-")
|
||||
cmd.Stdin = strings.NewReader(code)
|
||||
var stdout bytes.Buffer
|
||||
var stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", fmt.Errorf("stylua error:\n%s", strings.TrimRight(stderr.String(), "\n"))
|
||||
}
|
||||
return stdout.String(), nil
|
||||
}
|
||||
79
fmt_test.go
Normal file
79
fmt_test.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFormatLuaBlocksInTOML_NoBlocks_NoChange(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
path := filepath.Join(tmp, "test.toml")
|
||||
original := "[[commands]]\nname = 'x'\nlua = 'print(1)'\n"
|
||||
require.NoError(t, os.WriteFile(path, []byte(original), 0644))
|
||||
|
||||
err := formatLuaBlocksInTOML(path, logger.WithPrefix("test"))
|
||||
require.NoError(t, err)
|
||||
|
||||
out, err := os.ReadFile(path)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, original, string(out))
|
||||
}
|
||||
|
||||
func TestFormatLuaBlocksInTOML_FormatsMultilineLua(t *testing.T) {
|
||||
if _, err := exec.LookPath("stylua"); err != nil {
|
||||
t.Skip("stylua not installed")
|
||||
}
|
||||
|
||||
tmp := t.TempDir()
|
||||
path := filepath.Join(tmp, "test.toml")
|
||||
content := "[[commands]]\nname = 'x'\nlua = '''\nlocal x=1\nif x>0 then\nprint( x )\nend\n'''\n"
|
||||
require.NoError(t, os.WriteFile(path, []byte(content), 0644))
|
||||
|
||||
err := formatLuaBlocksInTOML(path, logger.WithPrefix("test"))
|
||||
require.NoError(t, err)
|
||||
|
||||
out, err := os.ReadFile(path)
|
||||
require.NoError(t, err)
|
||||
text := string(out)
|
||||
assert.Contains(t, text, "local x = 1")
|
||||
assert.Contains(t, text, "if x > 0 then")
|
||||
assert.Contains(t, text, "print(x)")
|
||||
}
|
||||
|
||||
func TestStyluaFormatError(t *testing.T) {
|
||||
origPath := os.Getenv("PATH")
|
||||
t.Setenv("PATH", "")
|
||||
if strings.Contains(strings.ToLower(os.Getenv("pathext")), ".exe") {
|
||||
t.Setenv("PATHEXT", ".EXE")
|
||||
}
|
||||
|
||||
_, err := styluaFormat("local x = 1")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "stylua error")
|
||||
t.Setenv("PATH", origPath)
|
||||
}
|
||||
|
||||
func TestFmtCommandRunE(t *testing.T) {
|
||||
if _, err := exec.LookPath("stylua"); err != nil {
|
||||
t.Skip("stylua not installed")
|
||||
}
|
||||
tmp := t.TempDir()
|
||||
path := filepath.Join(tmp, "test.toml")
|
||||
content := "[[commands]]\nname = 'x'\nlua = '''\nlocal z=2\nprint( z )\n'''\n"
|
||||
require.NoError(t, os.WriteFile(path, []byte(content), 0644))
|
||||
|
||||
cmd := newFmtCmd()
|
||||
err := cmd.RunE(cmd, []string{path})
|
||||
require.NoError(t, err)
|
||||
|
||||
out, err := os.ReadFile(path)
|
||||
require.NoError(t, err)
|
||||
assert.Contains(t, string(out), "local z = 2")
|
||||
}
|
||||
313
gen.go
Normal file
313
gen.go
Normal file
@@ -0,0 +1,313 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/atotto/clipboard"
|
||||
"github.com/bluekeyes/go-gitdiff/gitdiff"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var nameSanitizer = regexp.MustCompile(`[^A-Za-z0-9]+`)
|
||||
|
||||
type diffChunk struct {
|
||||
oldLines []string
|
||||
newLines []string
|
||||
preLines []string
|
||||
postLines []string
|
||||
mode chunkMode
|
||||
}
|
||||
|
||||
type chunkMode int
|
||||
|
||||
const (
|
||||
chunkReplaceOrDelete chunkMode = iota
|
||||
chunkInsertWithContext
|
||||
)
|
||||
|
||||
func newGenCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "gen",
|
||||
Short: "Generate TOML commands from clipboard diff",
|
||||
Args: cobra.NoArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
output, skipped, err := GenerateCommandsTOMLFromClipboard()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate commands from clipboard diff: %w", err)
|
||||
}
|
||||
logger.Info("Generated commands from clipboard diff")
|
||||
fmt.Print(output)
|
||||
if skipped > 0 {
|
||||
fmt.Fprintf(os.Stderr, "skipped %d insertion-only diff block(s)\n", skipped)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func GenerateCommandsTOMLFromClipboard() (string, int, error) {
|
||||
diffText, err := clipboard.ReadAll()
|
||||
if err != nil {
|
||||
return "", 0, fmt.Errorf("failed to read clipboard: %w", err)
|
||||
}
|
||||
|
||||
commands, skipped, err := GenerateCommandsFromUnifiedDiff(diffText)
|
||||
if err != nil {
|
||||
return "", skipped, err
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
return "", skipped, fmt.Errorf("no replaceable diff blocks found in clipboard")
|
||||
}
|
||||
|
||||
out, err := renderGeneratedCommandsTOML(commands)
|
||||
if err != nil {
|
||||
return "", skipped, err
|
||||
}
|
||||
|
||||
return out, skipped, nil
|
||||
}
|
||||
|
||||
func GenerateCommandsFromUnifiedDiff(diffText string) ([]utils.ModifyCommand, int, error) {
|
||||
diffText = normalizeNewlines(diffText)
|
||||
|
||||
files, _, err := gitdiff.Parse(strings.NewReader(diffText))
|
||||
if err != nil {
|
||||
return nil, 0, fmt.Errorf("failed to parse unified diff: %w", err)
|
||||
}
|
||||
|
||||
commands := make([]utils.ModifyCommand, 0)
|
||||
skippedInsertionOnly := 0
|
||||
perFileCounter := make(map[string]int)
|
||||
|
||||
for _, fileDiff := range files {
|
||||
filePath := normalizeDiffPath(fileDiff.NewName)
|
||||
if filePath == "" {
|
||||
filePath = normalizeDiffPath(fileDiff.OldName)
|
||||
}
|
||||
if filePath == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, fragment := range fileDiff.TextFragments {
|
||||
chunks, skipped := extractDiffChunksFromFragment(fragment)
|
||||
skippedInsertionOnly += skipped
|
||||
|
||||
for _, chunk := range chunks {
|
||||
oldBlock, newBlock := buildBlocks(chunk)
|
||||
|
||||
perFileCounter[filePath]++
|
||||
commands = append(commands, utils.ModifyCommand{
|
||||
Name: generateCommandName(filePath, perFileCounter[filePath]),
|
||||
Regex: oldBlock,
|
||||
Lua: "s1 = [[" + newBlock + "]]",
|
||||
Files: []string{filePath},
|
||||
NoReset: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return commands, skippedInsertionOnly, nil
|
||||
}
|
||||
|
||||
func extractDiffChunksFromFragment(fragment *gitdiff.TextFragment) ([]diffChunk, int) {
|
||||
chunks := make([]diffChunk, 0)
|
||||
skippedInsertionOnly := 0
|
||||
for i := 0; i < len(fragment.Lines); i++ {
|
||||
if fragment.Lines[i].Op == gitdiff.OpContext {
|
||||
continue
|
||||
}
|
||||
|
||||
start := i
|
||||
for i+1 < len(fragment.Lines) && fragment.Lines[i+1].Op != gitdiff.OpContext {
|
||||
i++
|
||||
}
|
||||
end := i
|
||||
|
||||
chunk := diffChunk{}
|
||||
for j := start; j <= end; j++ {
|
||||
switch fragment.Lines[j].Op {
|
||||
case gitdiff.OpDelete:
|
||||
chunk.oldLines = append(chunk.oldLines, normalizeDiffLine(fragment.Lines[j].Line))
|
||||
case gitdiff.OpAdd:
|
||||
chunk.newLines = append(chunk.newLines, normalizeDiffLine(fragment.Lines[j].Line))
|
||||
}
|
||||
}
|
||||
|
||||
for j := start - 1; j >= 0 && fragment.Lines[j].Op == gitdiff.OpContext; j-- {
|
||||
chunk.preLines = append([]string{normalizeDiffLine(fragment.Lines[j].Line)}, chunk.preLines...)
|
||||
}
|
||||
for j := end + 1; j < len(fragment.Lines) && fragment.Lines[j].Op == gitdiff.OpContext; j++ {
|
||||
chunk.postLines = append(chunk.postLines, normalizeDiffLine(fragment.Lines[j].Line))
|
||||
}
|
||||
|
||||
if len(chunk.oldLines) == 0 {
|
||||
if len(chunk.preLines) == 0 && len(chunk.postLines) == 0 {
|
||||
skippedInsertionOnly++
|
||||
continue
|
||||
}
|
||||
chunk.mode = chunkInsertWithContext
|
||||
} else {
|
||||
chunk.mode = chunkReplaceOrDelete
|
||||
}
|
||||
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
|
||||
return chunks, skippedInsertionOnly
|
||||
}
|
||||
|
||||
func buildBlocks(chunk diffChunk) (string, string) {
|
||||
switch chunk.mode {
|
||||
case chunkInsertWithContext:
|
||||
regex := buildPatternWithContext(chunk.preLines, "", chunk.postLines, true)
|
||||
replacement := strings.Join(chunk.newLines, "\n")
|
||||
if len(chunk.postLines) > 0 {
|
||||
replacement += "\n"
|
||||
}
|
||||
return regex, replacement
|
||||
default:
|
||||
regex := buildPatternWithContext(chunk.preLines, strings.Join(chunk.oldLines, "\n"), chunk.postLines, false)
|
||||
return regex, strings.Join(chunk.newLines, "\n")
|
||||
}
|
||||
}
|
||||
|
||||
func buildPatternWithContext(preLines []string, captureText string, postLines []string, emptyCapture bool) string {
|
||||
var b strings.Builder
|
||||
pre := strings.Join(preLines, "\n")
|
||||
post := strings.Join(postLines, "\n")
|
||||
|
||||
if pre != "" {
|
||||
b.WriteString(regexp.QuoteMeta(pre))
|
||||
}
|
||||
|
||||
if emptyCapture {
|
||||
if pre != "" {
|
||||
b.WriteString("\n")
|
||||
}
|
||||
b.WriteString("()")
|
||||
if post != "" {
|
||||
b.WriteString(regexp.QuoteMeta(post))
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
if pre != "" && captureText != "" {
|
||||
b.WriteString("\n")
|
||||
}
|
||||
b.WriteString("(")
|
||||
b.WriteString(regexp.QuoteMeta(captureText))
|
||||
b.WriteString(")")
|
||||
if post != "" {
|
||||
b.WriteString("\n")
|
||||
b.WriteString(regexp.QuoteMeta(post))
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func normalizeDiffPath(name string) string {
|
||||
trimmed := strings.TrimSpace(name)
|
||||
if trimmed == "" || trimmed == "/dev/null" || trimmed == "a/dev/null" || trimmed == "b/dev/null" {
|
||||
return ""
|
||||
}
|
||||
trimmed = strings.TrimPrefix(trimmed, "b/")
|
||||
trimmed = strings.TrimPrefix(trimmed, "a/")
|
||||
return filepath.ToSlash(trimmed)
|
||||
}
|
||||
|
||||
func generateCommandName(path string, idx int) string {
|
||||
base := filepath.Base(path)
|
||||
ext := filepath.Ext(base)
|
||||
stem := strings.TrimSuffix(base, ext)
|
||||
stem = nameSanitizer.ReplaceAllString(stem, "_")
|
||||
stem = strings.Trim(stem, "_")
|
||||
if stem == "" {
|
||||
stem = "cmd"
|
||||
}
|
||||
return fmt.Sprintf("gen_%s_%d", stem, idx)
|
||||
}
|
||||
|
||||
func renderGeneratedCommandsTOML(commands []utils.ModifyCommand) (string, error) {
|
||||
var b strings.Builder
|
||||
|
||||
for i, cmd := range commands {
|
||||
if i > 0 {
|
||||
b.WriteString("\n")
|
||||
}
|
||||
b.WriteString("[[commands]]\n")
|
||||
b.WriteString("name = '")
|
||||
b.WriteString(strings.ReplaceAll(cmd.Name, "'", "_"))
|
||||
b.WriteString("'\n")
|
||||
|
||||
b.WriteString("regex = ")
|
||||
b.WriteString(formatTomlRegexLiteral(stripUnsafeControlChars(cmd.Regex)))
|
||||
b.WriteString("\n")
|
||||
|
||||
b.WriteString("lua = '''\n")
|
||||
b.WriteString(stripUnsafeControlChars(cmd.Lua))
|
||||
b.WriteString("\n'''\n")
|
||||
|
||||
b.WriteString("files = [")
|
||||
for j, f := range cmd.Files {
|
||||
if j > 0 {
|
||||
b.WriteString(", ")
|
||||
}
|
||||
b.WriteString("'")
|
||||
b.WriteString(strings.ReplaceAll(f, "'", "_"))
|
||||
b.WriteString("'")
|
||||
}
|
||||
b.WriteString("]\n")
|
||||
b.WriteString("noreset = false\n")
|
||||
}
|
||||
|
||||
out := b.String()
|
||||
var validate struct {
|
||||
Commands []utils.ModifyCommand `toml:"commands"`
|
||||
}
|
||||
if _, err := toml.Decode(out, &validate); err != nil {
|
||||
return "", fmt.Errorf("generated TOML is invalid: %w", err)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func normalizeNewlines(s string) string {
|
||||
s = strings.ReplaceAll(s, "\r\n", "\n")
|
||||
s = strings.ReplaceAll(s, "\r", "\n")
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeDiffLine(s string) string {
|
||||
s = strings.TrimSuffix(s, "\n")
|
||||
s = strings.TrimSuffix(s, "\r")
|
||||
return s
|
||||
}
|
||||
|
||||
func stripUnsafeControlChars(s string) string {
|
||||
var b strings.Builder
|
||||
b.Grow(len(s))
|
||||
for _, r := range s {
|
||||
if r == '\n' || r == '\t' || r >= 0x20 {
|
||||
b.WriteRune(r)
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func formatTomlRegexLiteral(s string) string {
|
||||
if strings.Contains(s, "'''") {
|
||||
return strconv.Quote(s)
|
||||
}
|
||||
return "'''\n" + s + "\n'''"
|
||||
}
|
||||
185
gen_test.go
Normal file
185
gen_test.go
Normal file
@@ -0,0 +1,185 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGenerateCommandsFromUnifiedDiffBasicReplacement(t *testing.T) {
|
||||
diffText := `diff --git a/src/activity_actor.cpp b/src/activity_actor.cpp
|
||||
index 1234567..89abcde 100644
|
||||
--- a/src/activity_actor.cpp
|
||||
+++ b/src/activity_actor.cpp
|
||||
@@ -2840 +2840 @@
|
||||
- if( it != nullptr && it->has_flag( flag_SINGLE_USE ) ) {
|
||||
+ if( it != nullptr && it->has_flag( flag_SINGLE_USE ) && !it->count_by_charges() ) {
|
||||
`
|
||||
|
||||
commands, skipped, err := GenerateCommandsFromUnifiedDiff(diffText)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 0, skipped)
|
||||
require.Len(t, commands, 1)
|
||||
|
||||
cmd := commands[0]
|
||||
assert.Equal(t, false, cmd.NoReset)
|
||||
assert.Equal(t, []string{"src/activity_actor.cpp"}, cmd.Files)
|
||||
oldLine := " if( it != nullptr && it->has_flag( flag_SINGLE_USE ) ) {"
|
||||
assert.Equal(t, "("+regexp.QuoteMeta(oldLine)+")", cmd.Regex)
|
||||
assert.Equal(t, `s1 = [[ if( it != nullptr && it->has_flag( flag_SINGLE_USE ) && !it->count_by_charges() ) {]]`, cmd.Lua)
|
||||
assert.True(t, strings.HasPrefix(cmd.Name, "gen_activity_actor_"))
|
||||
}
|
||||
|
||||
func TestGenerateCommandsFromUnifiedDiffInsertionOnlyAnchoredByContext(t *testing.T) {
|
||||
diffText := `diff --git a/src/ranged.cpp b/src/ranged.cpp
|
||||
index 1234567..89abcde 100644
|
||||
--- a/src/ranged.cpp
|
||||
+++ b/src/ranged.cpp
|
||||
@@ -100,1 +101,3 @@
|
||||
line_before
|
||||
+
|
||||
+ dispersion.add_multiplier(0.25);
|
||||
`
|
||||
|
||||
commands, skipped, err := GenerateCommandsFromUnifiedDiff(diffText)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, skipped)
|
||||
require.Len(t, commands, 1)
|
||||
cmd := commands[0]
|
||||
assert.Equal(t, regexp.QuoteMeta("line_before")+"\n()", cmd.Regex)
|
||||
assert.Equal(t, "s1 = [[\n dispersion.add_multiplier(0.25);]]", cmd.Lua)
|
||||
}
|
||||
|
||||
func TestGenerateCommandsFromUnifiedDiffDeleteOnly(t *testing.T) {
|
||||
diffText := `diff --git a/src/ranged.cpp b/src/ranged.cpp
|
||||
index 1234567..89abcde 100644
|
||||
--- a/src/ranged.cpp
|
||||
+++ b/src/ranged.cpp
|
||||
@@ -200,2 +200,0 @@
|
||||
-old line one
|
||||
-old line two
|
||||
`
|
||||
|
||||
commands, skipped, err := GenerateCommandsFromUnifiedDiff(diffText)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, skipped)
|
||||
require.Len(t, commands, 1)
|
||||
|
||||
cmd := commands[0]
|
||||
oldBlock := "old line one\nold line two"
|
||||
assert.Equal(t, "("+regexp.QuoteMeta(oldBlock)+")", cmd.Regex)
|
||||
assert.Equal(t, `s1 = [[]]`, cmd.Lua)
|
||||
assert.Equal(t, false, cmd.NoReset)
|
||||
}
|
||||
|
||||
func TestRenderGeneratedCommandsTOMLUsesLiteralNewlinesWithoutLogLevel(t *testing.T) {
|
||||
commands := []struct {
|
||||
Name string
|
||||
Regex string
|
||||
Lua string
|
||||
Files []string
|
||||
NoReset bool
|
||||
}{
|
||||
{
|
||||
Name: "gen_advanced_inv_1",
|
||||
Regex: `( const int input_amount = by_charges \? it\.charges : action == "MOVE_SINGLE_ITEM" \? 1 : sitem\.stacks;)`,
|
||||
Lua: "s1 = [[ const int input_amount = std::max( 1, by_charges ? it.charges :\n action == \"MOVE_SINGLE_ITEM\" ? 1 : sitem.stacks );]]",
|
||||
Files: []string{"src/advanced_inv.cpp"},
|
||||
NoReset: false,
|
||||
},
|
||||
}
|
||||
|
||||
actual := make([]utils.ModifyCommand, 0, len(commands))
|
||||
for _, cmd := range commands {
|
||||
actual = append(actual, utils.ModifyCommand{
|
||||
Name: cmd.Name,
|
||||
Regex: cmd.Regex,
|
||||
Lua: cmd.Lua,
|
||||
Files: cmd.Files,
|
||||
NoReset: cmd.NoReset,
|
||||
})
|
||||
}
|
||||
|
||||
out, err := renderGeneratedCommandsTOML(actual)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.NotContains(t, out, `loglevel`)
|
||||
assert.NotContains(t, out, `\n`)
|
||||
assert.Contains(t, out, "lua = '''\n")
|
||||
assert.Contains(t, out, "regex = '''\n")
|
||||
assert.Contains(t, out, "files = ['src/advanced_inv.cpp']")
|
||||
}
|
||||
|
||||
func TestGenerateCommandsFromUnifiedDiffHandlesCRLFPropsDiff(t *testing.T) {
|
||||
diffText := "diff --git a/msvc-full-features/Cataclysm-common.props b/msvc-full-features/Cataclysm-common.props\r\n" +
|
||||
"--- a/msvc-full-features/Cataclysm-common.props\r\n" +
|
||||
"+++ b/msvc-full-features/Cataclysm-common.props\r\n" +
|
||||
"@@ -26,6 +26,11 @@\r\n" +
|
||||
" <PropertyGroup Label=\"Vcpkg\">\r\n" +
|
||||
" <VcpkgAdditionalInstallOptions>--clean-after-build</VcpkgAdditionalInstallOptions>\r\n" +
|
||||
" </PropertyGroup>\r\n" +
|
||||
"+ <PropertyGroup>\r\n" +
|
||||
"+ <!-- debug versions of libs often (but not always) use `d` suffix, as a convention. E.g. \"SDL2-staticd.lib\" -->\r\n" +
|
||||
"+ <VcpkgLibSuffix></VcpkgLibSuffix>\r\n" +
|
||||
"+ <VcpkgLibSuffix Condition='\"'\"'$(Configuration)'\"'\"'=='\"'\"'Debug'\"'\"''>d</VcpkgLibSuffix>\r\n" +
|
||||
"+ </PropertyGroup>\r\n" +
|
||||
" <PropertyGroup Condition=\"$(_CDDA_USE_CCACHE)\">\r\n" +
|
||||
" <ClToolPath>$(CDDA_CCACHE_PATH)</ClToolPath>\r\n" +
|
||||
" </PropertyGroup>\r\n" +
|
||||
"@@ -39,8 +44,6 @@\r\n" +
|
||||
" <!-- vcpkg passes dependecy libs via a glob pattern which lld-link doesn't accept. We have to manually enumerate the deps now. -->\r\n" +
|
||||
" <VcpkgAutoLink>false</VcpkgAutoLink>\r\n" +
|
||||
" <!-- debug versions of libs often (but not always) use `d` suffix, as a convention. E.g. \"SDL2-staticd.lib\" -->\r\n" +
|
||||
"- <VcpkgLibSuffix></VcpkgLibSuffix>\r\n" +
|
||||
"- <VcpkgLibSuffix Condition=\"$(Configuration.StartsWith(Debug))\">d</VcpkgLibSuffix>\r\n" +
|
||||
" </PropertyGroup>\r\n" +
|
||||
" <ItemDefinitionGroup>\r\n" +
|
||||
" <ClCompile>\r\n"
|
||||
|
||||
commands, skipped, err := GenerateCommandsFromUnifiedDiff(diffText)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, skipped)
|
||||
assert.GreaterOrEqual(t, len(commands), 2)
|
||||
|
||||
out, err := renderGeneratedCommandsTOML(commands)
|
||||
require.NoError(t, err)
|
||||
assert.NotContains(t, out, "\r")
|
||||
assert.NotContains(t, out, `\n`)
|
||||
assert.Contains(t, out, "files = ['msvc-full-features/Cataclysm-common.props']")
|
||||
|
||||
first := commands[0]
|
||||
assert.Contains(t, first.Regex, regexp.QuoteMeta(" <PropertyGroup Label=\"Vcpkg\">"))
|
||||
assert.Contains(t, first.Regex, regexp.QuoteMeta(" <PropertyGroup Condition=\"$(_CDDA_USE_CCACHE)\">"))
|
||||
assert.Contains(t, first.Regex, "()")
|
||||
|
||||
second := commands[1]
|
||||
assert.Contains(t, second.Regex, regexp.QuoteMeta(" <!-- debug versions of libs often (but not always) use `d` suffix, as a convention. E.g. \"SDL2-staticd.lib\" -->"))
|
||||
assert.Contains(t, second.Regex, regexp.QuoteMeta(" </PropertyGroup>"))
|
||||
}
|
||||
|
||||
func TestGenerateCommandsFromUnifiedDiffDeletionUsesContextOutsideCapture(t *testing.T) {
|
||||
diffText := `diff --git a/src/test.txt b/src/test.txt
|
||||
--- a/src/test.txt
|
||||
+++ b/src/test.txt
|
||||
@@ -1,4 +1,2 @@
|
||||
keep-before
|
||||
-delete-one
|
||||
-delete-two
|
||||
keep-after
|
||||
`
|
||||
|
||||
commands, skipped, err := GenerateCommandsFromUnifiedDiff(diffText)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, skipped)
|
||||
require.Len(t, commands, 1)
|
||||
|
||||
cmd := commands[0]
|
||||
assert.Contains(t, cmd.Regex, regexp.QuoteMeta("keep-before"))
|
||||
assert.Contains(t, cmd.Regex, regexp.QuoteMeta("keep-after"))
|
||||
assert.Contains(t, cmd.Regex, "("+regexp.QuoteMeta("delete-one\ndelete-two")+")")
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"modify/utils"
|
||||
"cook/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
@@ -82,7 +82,7 @@ func TestGlobExpansion(t *testing.T) {
|
||||
for _, pattern := range tc.patterns {
|
||||
patternMap[pattern] = struct{}{}
|
||||
}
|
||||
files, err := utils.ExpandGLobs(patternMap)
|
||||
files, err := utils.ExpandGlobs(patternMap)
|
||||
if err != nil {
|
||||
t.Fatalf("ExpandGLobs failed: %v", err)
|
||||
}
|
||||
|
||||
68
go.mod
68
go.mod
@@ -1,38 +1,60 @@
|
||||
module modify
|
||||
module cook
|
||||
|
||||
go 1.24.1
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
git.site.quack-lab.dev/dave/cylogger v1.8.0
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/yuin/gopher-lua v1.1.1
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gorm.io/gorm v1.30.0
|
||||
)
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.5 // indirect
|
||||
github.com/cloudflare/circl v1.6.0 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/bits-and-blooms/bitset v1.24.2 // indirect
|
||||
github.com/bits-and-blooms/bloom/v3 v3.7.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.6.2 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/gdamore/encoding v1.0.1 // indirect
|
||||
github.com/hexops/valast v1.5.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
golang.org/x/crypto v0.35.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
golang.org/x/exp/shiny v0.0.0-20250606033433-dcc06ee1d476 // indirect
|
||||
golang.org/x/image v0.28.0 // indirect
|
||||
golang.org/x/mobile v0.0.0-20250606033058-a2a15c67f36f // indirect
|
||||
golang.org/x/mod v0.29.0 // indirect
|
||||
golang.org/x/sync v0.18.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
golang.org/x/term v0.37.0 // indirect
|
||||
golang.org/x/text v0.31.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/tools v0.38.0 // indirect
|
||||
mvdan.cc/gofumpt v0.4.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/go-git/go-git/v5 v5.14.0
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
golang.org/x/net v0.35.0 // indirect
|
||||
git.site.quack-lab.dev/dave/cyutils v1.7.0
|
||||
github.com/BurntSushi/toml v1.5.0
|
||||
github.com/antchfx/xpath v1.3.6
|
||||
github.com/atotto/clipboard v0.1.4
|
||||
github.com/bluekeyes/go-gitdiff v0.8.1
|
||||
github.com/gdamore/tcell/v2 v2.8.1
|
||||
github.com/google/go-cmp v0.6.0
|
||||
github.com/jedib0t/go-pretty/v6 v6.7.8
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/theory/jsonpath v0.11.0
|
||||
github.com/tidwall/gjson v1.18.0
|
||||
golang.design/x/clipboard v0.7.1
|
||||
gorm.io/driver/sqlite v1.6.0
|
||||
)
|
||||
|
||||
239
go.sum
239
go.sum
@@ -1,106 +1,179 @@
|
||||
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
|
||||
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4=
|
||||
github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
||||
git.site.quack-lab.dev/dave/cylogger v1.8.0 h1:a5e3fWTxOar3KnHl7WoifH0hc24KmXuEbNfSOXlPd1Q=
|
||||
git.site.quack-lab.dev/dave/cylogger v1.8.0/go.mod h1:wctgZplMvroA4X6p8f4B/LaCKtiBcT1Pp+L14kcS8jk=
|
||||
git.site.quack-lab.dev/dave/cyutils v1.7.0 h1:+b2mrJ8hVF0jF4amIw9QE6gEHHwTvnu3dKztyttPvzk=
|
||||
git.site.quack-lab.dev/dave/cyutils v1.7.0/go.mod h1:2tH6NnD3fy29GKc3xFMigoeMuJ1eTYqIkGZumczMNxM=
|
||||
github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg=
|
||||
github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/antchfx/xpath v1.3.6 h1:s0y+ElRRtTQdfHP609qFu0+c6bglDv20pqOViQjjdPI=
|
||||
github.com/antchfx/xpath v1.3.6/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
|
||||
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
|
||||
github.com/bits-and-blooms/bitset v1.24.2 h1:M7/NzVbsytmtfHbumG+K2bremQPMJuqv1JD3vOaFxp0=
|
||||
github.com/bits-and-blooms/bitset v1.24.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
github.com/bits-and-blooms/bloom/v3 v3.7.1 h1:WXovk4TRKZttAMJfoQx6K2DM0zNIt8w+c67UqO+etV0=
|
||||
github.com/bits-and-blooms/bloom/v3 v3.7.1/go.mod h1:rZzYLLje2dfzXfAkJNxQQHsKurAyK55KUnL43Euk0hU=
|
||||
github.com/bluekeyes/go-gitdiff v0.8.1 h1:lL1GofKMywO17c0lgQmJYcKek5+s8X6tXVNOLxy4smI=
|
||||
github.com/bluekeyes/go-gitdiff v0.8.1/go.mod h1:WWAk1Mc6EgWarCrPFO+xeYlujPu98VuLW3Tu+B/85AE=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk=
|
||||
github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
|
||||
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
|
||||
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
|
||||
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
|
||||
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
|
||||
github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60=
|
||||
github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k=
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
||||
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
|
||||
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
|
||||
github.com/gdamore/encoding v1.0.1 h1:YzKZckdBL6jVt2Gc+5p82qhrGiqMdG/eNs6Wy0u3Uhw=
|
||||
github.com/gdamore/encoding v1.0.1/go.mod h1:0Z0cMFinngz9kS1QfMjCP8TY7em3bZYeeklsSDPivEo=
|
||||
github.com/gdamore/tcell/v2 v2.8.1 h1:KPNxyqclpWpWQlPLx6Xui1pMk8S+7+R37h3g07997NU=
|
||||
github.com/gdamore/tcell/v2 v2.8.1/go.mod h1:bj8ori1BG3OYMjmb3IklZVWfZUJ1UBQt9JXrOCOhGWw=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/hexops/autogold v0.8.1 h1:wvyd/bAJ+Dy+DcE09BoLk6r4Fa5R5W+O+GUzmR985WM=
|
||||
github.com/hexops/autogold v0.8.1/go.mod h1:97HLDXyG23akzAoRYJh/2OBs3kd80eHyKPvZw0S5ZBY=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/hexops/valast v1.5.0 h1:FBTuvVi0wjTngtXJRZXMbkN/Dn6DgsUsBwch2DUJU8Y=
|
||||
github.com/hexops/valast v1.5.0/go.mod h1:Jcy1pNH7LNraVaAZDLyv21hHg2WBv9Nf9FL6fGxU7o4=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jedib0t/go-pretty/v6 v6.7.8 h1:BVYrDy5DPBA3Qn9ICT+PokP9cvCv1KaHv2i+Hc8sr5o=
|
||||
github.com/jedib0t/go-pretty/v6 v6.7.8/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
|
||||
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
|
||||
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
|
||||
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0 h1:2/yBRLdWBZKrf7gB40FoiKfAWYQ0lqNcbuQwVHXptag=
|
||||
github.com/lucasb-eyer/go-colorful v1.3.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.3/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
||||
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/theory/jsonpath v0.11.0 h1:IlzhI/Ui8nFu50SVpnyYHzZVvI0+etEoZLR5LsZJOVc=
|
||||
github.com/theory/jsonpath v0.11.0/go.mod h1:vl8nfJyq9MKMbcAiKv+7N9W3jDCH8qPr0mZoZj8wRk8=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg=
|
||||
github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.design/x/clipboard v0.7.1 h1:OEG3CmcYRBNnRwpDp7+uWLiZi3hrMRJpE9JkkkYtz2c=
|
||||
golang.design/x/clipboard v0.7.1/go.mod h1:i5SiIqj0wLFw9P/1D7vfILFK0KHMk7ydE72HRrUIgkg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/exp/shiny v0.0.0-20250606033433-dcc06ee1d476 h1:Wdx0vgH5Wgsw+lF//LJKmWOJBLWX6nprsMqnf99rYDE=
|
||||
golang.org/x/exp/shiny v0.0.0-20250606033433-dcc06ee1d476/go.mod h1:ygj7T6vSGhhm/9yTpOQQNvuAUFziTH7RUiH74EoE2C8=
|
||||
golang.org/x/image v0.28.0 h1:gdem5JW1OLS4FbkWgLO+7ZeFzYtL3xClb97GaUzYMFE=
|
||||
golang.org/x/image v0.28.0/go.mod h1:GUJYXtnGKEUgggyzh+Vxt+AviiCcyiwpsl8iQ8MvwGY=
|
||||
golang.org/x/mobile v0.0.0-20250606033058-a2a15c67f36f h1:/n+PL2HlfqeSiDCuhdBbRNlGS/g2fM4OHufalHaTVG8=
|
||||
golang.org/x/mobile v0.0.0-20250606033058-a2a15c67f36f/go.mod h1:ESkJ836Z6LpG6mTVAhA48LpfW/8fNR0ifStlH2axyfg=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
|
||||
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
|
||||
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
|
||||
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
|
||||
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
|
||||
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
|
||||
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
|
||||
mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM=
|
||||
mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ=
|
||||
|
||||
378
isolate_test.go
Normal file
378
isolate_test.go
Normal file
@@ -0,0 +1,378 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func setWorkingDir(t *testing.T, dir string) {
|
||||
t.Helper()
|
||||
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, os.Chdir(dir))
|
||||
|
||||
t.Cleanup(func() {
|
||||
require.NoError(t, os.Chdir(origDir))
|
||||
})
|
||||
}
|
||||
|
||||
func writeTestFile(t *testing.T, path, content string) {
|
||||
t.Helper()
|
||||
require.NoError(t, os.WriteFile(path, []byte(content), 0644))
|
||||
}
|
||||
|
||||
func TestIsolateCommandsSequentialExecution(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
// Create test file content
|
||||
testContent := `BEGIN
|
||||
block1 content with value 42
|
||||
END
|
||||
Some other content
|
||||
BEGIN
|
||||
block2 content with value 100
|
||||
END
|
||||
More content
|
||||
BEGIN
|
||||
block3 content with value 200
|
||||
END`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.txt")
|
||||
writeTestFile(t, testFile, testContent)
|
||||
setWorkingDir(t, tmpDir)
|
||||
|
||||
// Create isolate commands that work sequentially on the same block
|
||||
// First command: 42 -> 84
|
||||
// Second command: 84 -> 168 (works on result of first command)
|
||||
// Third command: 168 -> 336 (works on result of second command)
|
||||
commands := []utils.ModifyCommand{
|
||||
{
|
||||
Name: "MultiplyFirst",
|
||||
Regex: `BEGIN\n(?P<block>.*?value 42.*?)\nEND`,
|
||||
Lua: `replacement = "BEGIN\n" .. string.gsub(block, "42", "84") .. "\nEND"; return true`,
|
||||
Files: []string{"test.txt"},
|
||||
Isolate: true,
|
||||
},
|
||||
{
|
||||
Name: "MultiplySecond",
|
||||
Regex: `BEGIN\nblock1 content with value (?P<value>!num)\nEND`,
|
||||
Lua: `value = "168"; return true`,
|
||||
Files: []string{"test.txt"},
|
||||
Isolate: true,
|
||||
},
|
||||
{
|
||||
Name: "MultiplyThird",
|
||||
Regex: `BEGIN\nblock1 content with value (?P<value>!num)\nEND`,
|
||||
Lua: `value = "336"; return true`,
|
||||
Files: []string{"test.txt"},
|
||||
Isolate: true,
|
||||
},
|
||||
}
|
||||
|
||||
// Associate files with commands
|
||||
files := []string{"test.txt"}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that all three isolate commands are associated
|
||||
association := associations["test.txt"]
|
||||
assert.Len(t, association.IsolateCommands, 3, "Expected 3 isolate commands to be associated")
|
||||
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
|
||||
|
||||
// Run the isolate commands
|
||||
result, err := RunIsolateCommands(association, "test.txt", testContent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify that all isolate commands were applied sequentially
|
||||
// First command: 42 -> 84
|
||||
// Second command: 84 -> 168 (works on result of first)
|
||||
// Third command: 168 -> 336 (works on result of second)
|
||||
assert.Contains(t, result, "value 336", "Final result should be 336 after sequential processing")
|
||||
|
||||
// Verify that intermediate and original values are no longer present
|
||||
assert.NotContains(t, result, "value 42", "Original value 42 should be replaced")
|
||||
assert.NotContains(t, result, "value 84", "Intermediate value 84 should be replaced")
|
||||
assert.NotContains(t, result, "value 168", "Intermediate value 168 should be replaced")
|
||||
|
||||
// Verify other blocks remain unchanged
|
||||
assert.Contains(t, result, "value 100", "Second block should remain unchanged")
|
||||
assert.Contains(t, result, "value 200", "Third block should remain unchanged")
|
||||
|
||||
}
|
||||
|
||||
func TestIsolateCommandsWithDifferentPatterns(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
// Create test file content with distinct patterns
|
||||
testContent := `SECTION1
|
||||
value = 10
|
||||
END_SECTION1
|
||||
|
||||
SECTION2
|
||||
value = 20
|
||||
END_SECTION2`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.txt")
|
||||
writeTestFile(t, testFile, testContent)
|
||||
setWorkingDir(t, tmpDir)
|
||||
|
||||
// Create isolate commands with different patterns on the same content
|
||||
commands := []utils.ModifyCommand{
|
||||
{
|
||||
Name: "UpdateSection1",
|
||||
Regex: `SECTION1.*?value = (?P<value>!num).*?END_SECTION1`,
|
||||
Lua: `value = "100"; return true`,
|
||||
Files: []string{"test.txt"},
|
||||
Isolate: true,
|
||||
},
|
||||
{
|
||||
Name: "UpdateSection2",
|
||||
Regex: `SECTION2.*?value = (?P<value>!num).*?END_SECTION2`,
|
||||
Lua: `value = "200"; return true`,
|
||||
Files: []string{"test.txt"},
|
||||
Isolate: true,
|
||||
},
|
||||
}
|
||||
|
||||
// Associate files with commands
|
||||
files := []string{"test.txt"}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Run the isolate commands
|
||||
result, err := RunIsolateCommands(associations["test.txt"], "test.txt", testContent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify that both isolate commands were applied
|
||||
assert.Contains(t, result, "value = 100", "Section1 should be updated")
|
||||
assert.Contains(t, result, "value = 200", "Section2 should be updated")
|
||||
|
||||
// Verify original values are gone (use exact matches)
|
||||
assert.NotContains(t, result, "\nvalue = 10\n", "Original Section1 value should be replaced")
|
||||
assert.NotContains(t, result, "\nvalue = 20\n", "Original Section2 value should be replaced")
|
||||
|
||||
}
|
||||
|
||||
func TestIsolateCommandsWithJSONMode(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
// Create test JSON content
|
||||
testContent := `{
|
||||
"section1": {
|
||||
"value": 42
|
||||
},
|
||||
"section2": {
|
||||
"value": 100
|
||||
}
|
||||
}`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.json")
|
||||
writeTestFile(t, testFile, testContent)
|
||||
setWorkingDir(t, tmpDir)
|
||||
|
||||
// Create isolate commands with JSON mode
|
||||
commands := []utils.ModifyCommand{
|
||||
{
|
||||
Name: "UpdateJSONFirst",
|
||||
JSON: true,
|
||||
Lua: `data.section1.value = data.section1.value * 2; return true`,
|
||||
Files: []string{"test.json"},
|
||||
Isolate: true,
|
||||
},
|
||||
{
|
||||
Name: "UpdateJSONSecond",
|
||||
JSON: true,
|
||||
Lua: `data.section2.value = data.section2.value * 3; return true`,
|
||||
Files: []string{"test.json"},
|
||||
Isolate: true,
|
||||
},
|
||||
}
|
||||
|
||||
// Associate files with commands
|
||||
files := []string{"test.json"}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Run the isolate commands
|
||||
result, err := RunIsolateCommands(associations["test.json"], "test.json", testContent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify that both JSON isolate commands were applied
|
||||
assert.Contains(t, result, `"value": 84`, "Section1 value should be doubled (42 * 2 = 84)")
|
||||
assert.Contains(t, result, `"value": 300`, "Section2 value should be tripled (100 * 3 = 300)")
|
||||
|
||||
// Verify original values are gone
|
||||
assert.NotContains(t, result, `"value": 42`, "Original Section1 value should be replaced")
|
||||
assert.NotContains(t, result, `"value": 100`, "Original Section2 value should be replaced")
|
||||
|
||||
}
|
||||
|
||||
func TestRunOtherCommandsWithXMLMode(t *testing.T) {
|
||||
testContent := `<root version="1"><item price="10" /></root>`
|
||||
association := utils.FileCommandAssociation{
|
||||
File: "test.xml",
|
||||
Commands: []utils.ModifyCommand{
|
||||
{
|
||||
Name: "UpdateXML",
|
||||
XML: true,
|
||||
Lua: `data.attr.version = "2"; data.children[1].attr.price = tostring(tonumber(data.children[1].attr.price.val) * 3); return true`,
|
||||
Files: []string{"test.xml"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result, err := RunOtherCommands("test.xml", testContent, association, map[string]*logger.Logger{})
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to run XML commands: %v", err)
|
||||
}
|
||||
|
||||
assert.Equal(t, `<root version="2"><item price="30" /></root>`, result)
|
||||
}
|
||||
|
||||
func TestRunOtherCommandsRegexPredSkipsStructuredParsing(t *testing.T) {
|
||||
testContent := `<root><item value="1"/></root>`
|
||||
association := utils.FileCommandAssociation{
|
||||
File: "test.xml",
|
||||
Commands: []utils.ModifyCommand{
|
||||
{
|
||||
Name: "SkipByPred",
|
||||
XML: true,
|
||||
RegexPred: `DefinitelyNotInFile`,
|
||||
Lua: `error("should not execute")`,
|
||||
Files: []string{"test.xml"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := RunOtherCommands("test.xml", testContent, association, map[string]*logger.Logger{})
|
||||
assert.Equal(t, ErrNothingToDo, err)
|
||||
}
|
||||
|
||||
func TestIsolateVsRegularCommands(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
// Create test file with distinct sections
|
||||
testContent := `ISOLATE_SECTION
|
||||
value = 5
|
||||
END_ISOLATE
|
||||
|
||||
REGULAR_SECTION
|
||||
value = 10
|
||||
END_REGULAR`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.txt")
|
||||
writeTestFile(t, testFile, testContent)
|
||||
setWorkingDir(t, tmpDir)
|
||||
|
||||
// Create both isolate and regular commands
|
||||
commands := []utils.ModifyCommand{
|
||||
{
|
||||
Name: "IsolateMultiply",
|
||||
Regex: `ISOLATE_SECTION.*?value = (?P<value>!num).*?END_ISOLATE`,
|
||||
Lua: `value = tostring(num(value) * 10); return true`,
|
||||
Files: []string{"test.txt"},
|
||||
Isolate: true,
|
||||
},
|
||||
{
|
||||
Name: "RegularMultiply",
|
||||
Regex: `value = (?P<value>!num)`,
|
||||
Lua: `value = tostring(num(value) + 100); return true`,
|
||||
Files: []string{"test.txt"},
|
||||
},
|
||||
}
|
||||
|
||||
// Associate files with commands
|
||||
files := []string{"test.txt"}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the association
|
||||
association := associations["test.txt"]
|
||||
assert.Len(t, association.IsolateCommands, 1, "Expected 1 isolate command")
|
||||
assert.Len(t, association.Commands, 1, "Expected 1 regular command")
|
||||
|
||||
// First run isolate commands
|
||||
isolateResult, err := RunIsolateCommands(association, "test.txt", testContent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify isolate command result
|
||||
assert.Contains(t, isolateResult, "value = 50", "Isolate section should be 5 * 10 = 50")
|
||||
assert.Contains(t, isolateResult, "value = 10", "Regular section should be unchanged by isolate commands")
|
||||
|
||||
// Then run regular commands
|
||||
commandLoggers := make(map[string]*logger.Logger)
|
||||
finalResult, err := RunOtherCommands("test.txt", isolateResult, association, commandLoggers)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify final results - regular commands should affect ALL values
|
||||
assert.Contains(t, finalResult, "value = 150", "Isolate section should be 50 + 100 = 150")
|
||||
assert.Contains(t, finalResult, "value = 110", "Regular section should be 10 + 100 = 110")
|
||||
|
||||
}
|
||||
|
||||
func TestMultipleIsolateModifiersOnSameValue(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
// Create test file content that matches the scenario in the issue
|
||||
testContent := `irons_spellbooks:chain_creeper
|
||||
SpellPowerMultiplier = 1
|
||||
irons_spellbooks:chain_lightning
|
||||
SpellPowerMultiplier = 1`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "irons_spellbooks-server.toml")
|
||||
writeTestFile(t, testFile, testContent)
|
||||
setWorkingDir(t, tmpDir)
|
||||
|
||||
// Create isolate commands that match the issue scenario
|
||||
// First command: targets chain_creeper and chain_lightning with multiplier *4
|
||||
// Second command: targets all SpellPowerMultiplier with multiplier *4
|
||||
commands := []utils.ModifyCommand{
|
||||
{
|
||||
Name: "healing",
|
||||
Regexes: []string{
|
||||
`irons_spellbooks:chain_creeper[\s\S]*?SpellPowerMultiplier = !num`,
|
||||
`irons_spellbooks:chain_lightning[\s\S]*?SpellPowerMultiplier = !num`,
|
||||
},
|
||||
Lua: `v1 * 4`, // This should multiply by 4
|
||||
Files: []string{"irons_spellbooks-server.toml"},
|
||||
NoReset: false,
|
||||
Isolate: true,
|
||||
},
|
||||
{
|
||||
Name: "spellpower",
|
||||
Regex: `SpellPowerMultiplier = !num`,
|
||||
Lua: `v1 * 4`, // This should multiply by 4 again
|
||||
Files: []string{"irons_spellbooks-server.toml"},
|
||||
NoReset: false,
|
||||
Isolate: true,
|
||||
},
|
||||
}
|
||||
|
||||
// Associate files with commands
|
||||
files := []string{"irons_spellbooks-server.toml"}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that both isolate commands are associated
|
||||
association := associations["irons_spellbooks-server.toml"]
|
||||
assert.Len(t, association.IsolateCommands, 2, "Expected 2 isolate commands to be associated")
|
||||
assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
|
||||
|
||||
// Run the isolate commands
|
||||
result, err := RunIsolateCommands(association, "irons_spellbooks-server.toml", testContent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify that both isolate commands were applied sequentially
|
||||
// Expected: 1 -> 4 (first command) -> 16 (second command)
|
||||
assert.Contains(t, result, "SpellPowerMultiplier = 16", "Final result should be 16 after sequential processing (1 * 4 * 4)")
|
||||
|
||||
assert.Contains(t, result, "SpellPowerMultiplier = 16", "The system correctly applies both isolate modifiers sequentially")
|
||||
}
|
||||
465
logger/logger.go
465
logger/logger.go
@@ -1,465 +0,0 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// LogLevel defines the severity of log messages
|
||||
type LogLevel int
|
||||
|
||||
const (
|
||||
// LevelError is for critical errors that should always be displayed
|
||||
LevelError LogLevel = iota
|
||||
// LevelWarning is for important warnings
|
||||
LevelWarning
|
||||
// LevelInfo is for informational messages
|
||||
LevelInfo
|
||||
// LevelDebug is for detailed debugging information
|
||||
LevelDebug
|
||||
// LevelTrace is for very detailed tracing information
|
||||
LevelTrace
|
||||
// LevelLua is specifically for output from Lua scripts
|
||||
LevelLua
|
||||
)
|
||||
|
||||
var levelNames = map[LogLevel]string{
|
||||
LevelError: "ERROR",
|
||||
LevelWarning: "WARNING",
|
||||
LevelInfo: "INFO",
|
||||
LevelDebug: "DEBUG",
|
||||
LevelTrace: "TRACE",
|
||||
LevelLua: "LUA",
|
||||
}
|
||||
|
||||
var levelColors = map[LogLevel]string{
|
||||
LevelError: "\033[1;31m", // Bold Red
|
||||
LevelWarning: "\033[1;33m", // Bold Yellow
|
||||
LevelInfo: "\033[1;32m", // Bold Green
|
||||
LevelDebug: "\033[1;36m", // Bold Cyan
|
||||
LevelTrace: "\033[1;35m", // Bold Magenta
|
||||
LevelLua: "\033[1;34m", // Bold Blue
|
||||
}
|
||||
|
||||
// ResetColor is the ANSI code to reset text color
|
||||
const ResetColor = "\033[0m"
|
||||
|
||||
// Logger is our custom logger with level support
|
||||
type Logger struct {
|
||||
mu sync.Mutex
|
||||
out io.Writer
|
||||
currentLevel LogLevel
|
||||
prefix string
|
||||
flag int
|
||||
useColors bool
|
||||
callerOffset int
|
||||
defaultFields map[string]interface{}
|
||||
showGoroutine bool
|
||||
}
|
||||
|
||||
var (
|
||||
// DefaultLogger is the global logger instance
|
||||
DefaultLogger *Logger
|
||||
// defaultLogLevel is the default log level if not specified
|
||||
defaultLogLevel = LevelInfo
|
||||
// Global mutex for DefaultLogger initialization
|
||||
initMutex sync.Mutex
|
||||
)
|
||||
|
||||
// ParseLevel converts a string log level to LogLevel
|
||||
func ParseLevel(levelStr string) LogLevel {
|
||||
switch strings.ToUpper(levelStr) {
|
||||
case "ERROR":
|
||||
return LevelError
|
||||
case "WARNING", "WARN":
|
||||
return LevelWarning
|
||||
case "INFO":
|
||||
return LevelInfo
|
||||
case "DEBUG":
|
||||
return LevelDebug
|
||||
case "TRACE":
|
||||
return LevelTrace
|
||||
case "LUA":
|
||||
return LevelLua
|
||||
default:
|
||||
return defaultLogLevel
|
||||
}
|
||||
}
|
||||
|
||||
// String returns the string representation of the log level
|
||||
func (l LogLevel) String() string {
|
||||
if name, ok := levelNames[l]; ok {
|
||||
return name
|
||||
}
|
||||
return fmt.Sprintf("Level(%d)", l)
|
||||
}
|
||||
|
||||
// New creates a new Logger instance
|
||||
func New(out io.Writer, prefix string, flag int) *Logger {
|
||||
return &Logger{
|
||||
out: out,
|
||||
currentLevel: defaultLogLevel,
|
||||
prefix: prefix,
|
||||
flag: flag,
|
||||
useColors: true,
|
||||
callerOffset: 0,
|
||||
defaultFields: make(map[string]interface{}),
|
||||
showGoroutine: true,
|
||||
}
|
||||
}
|
||||
|
||||
// Init initializes the DefaultLogger
|
||||
func Init(level LogLevel) {
|
||||
initMutex.Lock()
|
||||
defer initMutex.Unlock()
|
||||
|
||||
if DefaultLogger == nil {
|
||||
DefaultLogger = New(os.Stdout, "", log.Lmicroseconds|log.Lshortfile)
|
||||
}
|
||||
DefaultLogger.SetLevel(level)
|
||||
}
|
||||
|
||||
// SetLevel sets the current log level
|
||||
func (l *Logger) SetLevel(level LogLevel) {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.currentLevel = level
|
||||
}
|
||||
|
||||
// GetLevel returns the current log level
|
||||
func (l *Logger) GetLevel() LogLevel {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
return l.currentLevel
|
||||
}
|
||||
|
||||
// SetCallerOffset sets the caller offset for correct file and line reporting
|
||||
func (l *Logger) SetCallerOffset(offset int) {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.callerOffset = offset
|
||||
}
|
||||
|
||||
// SetShowGoroutine sets whether to include goroutine ID in log messages
|
||||
func (l *Logger) SetShowGoroutine(show bool) {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.showGoroutine = show
|
||||
}
|
||||
|
||||
// ShowGoroutine returns whether goroutine ID is included in log messages
|
||||
func (l *Logger) ShowGoroutine() bool {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
return l.showGoroutine
|
||||
}
|
||||
|
||||
// WithField adds a field to the logger's context
|
||||
func (l *Logger) WithField(key string, value interface{}) *Logger {
|
||||
newLogger := &Logger{
|
||||
out: l.out,
|
||||
currentLevel: l.currentLevel,
|
||||
prefix: l.prefix,
|
||||
flag: l.flag,
|
||||
useColors: l.useColors,
|
||||
callerOffset: l.callerOffset,
|
||||
defaultFields: make(map[string]interface{}),
|
||||
showGoroutine: l.showGoroutine,
|
||||
}
|
||||
|
||||
// Copy existing fields
|
||||
for k, v := range l.defaultFields {
|
||||
newLogger.defaultFields[k] = v
|
||||
}
|
||||
|
||||
// Add new field
|
||||
newLogger.defaultFields[key] = value
|
||||
return newLogger
|
||||
}
|
||||
|
||||
// WithFields adds multiple fields to the logger's context
|
||||
func (l *Logger) WithFields(fields map[string]interface{}) *Logger {
|
||||
newLogger := &Logger{
|
||||
out: l.out,
|
||||
currentLevel: l.currentLevel,
|
||||
prefix: l.prefix,
|
||||
flag: l.flag,
|
||||
useColors: l.useColors,
|
||||
callerOffset: l.callerOffset,
|
||||
defaultFields: make(map[string]interface{}),
|
||||
showGoroutine: l.showGoroutine,
|
||||
}
|
||||
|
||||
// Copy existing fields
|
||||
for k, v := range l.defaultFields {
|
||||
newLogger.defaultFields[k] = v
|
||||
}
|
||||
|
||||
// Add new fields
|
||||
for k, v := range fields {
|
||||
newLogger.defaultFields[k] = v
|
||||
}
|
||||
return newLogger
|
||||
}
|
||||
|
||||
// GetGoroutineID extracts the goroutine ID from the runtime stack
|
||||
func GetGoroutineID() string {
|
||||
buf := make([]byte, 64)
|
||||
n := runtime.Stack(buf, false)
|
||||
// Format of first line is "goroutine N [state]:"
|
||||
// We only need the N part
|
||||
buf = buf[:n]
|
||||
idField := bytes.Fields(bytes.Split(buf, []byte{':'})[0])[1]
|
||||
return string(idField)
|
||||
}
|
||||
|
||||
// formatMessage formats a log message with level, time, file, and line information
|
||||
func (l *Logger) formatMessage(level LogLevel, format string, args ...interface{}) string {
|
||||
var msg string
|
||||
if len(args) > 0 {
|
||||
msg = fmt.Sprintf(format, args...)
|
||||
} else {
|
||||
msg = format
|
||||
}
|
||||
|
||||
// Format default fields if any
|
||||
var fields string
|
||||
if len(l.defaultFields) > 0 {
|
||||
var pairs []string
|
||||
for k, v := range l.defaultFields {
|
||||
pairs = append(pairs, fmt.Sprintf("%s=%v", k, v))
|
||||
}
|
||||
fields = " " + strings.Join(pairs, " ")
|
||||
}
|
||||
|
||||
var levelColor, resetColor string
|
||||
if l.useColors {
|
||||
levelColor = levelColors[level]
|
||||
resetColor = ResetColor
|
||||
}
|
||||
|
||||
var caller string
|
||||
if l.flag&log.Lshortfile != 0 || l.flag&log.Llongfile != 0 {
|
||||
// Find the actual caller by scanning up the stack
|
||||
// until we find a function outside the logger package
|
||||
var file string
|
||||
var line int
|
||||
var ok bool
|
||||
|
||||
// Start at a reasonable depth and scan up to 10 frames
|
||||
for depth := 4; depth < 15; depth++ {
|
||||
_, file, line, ok = runtime.Caller(depth)
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
|
||||
// If the caller is not in the logger package, we found our caller
|
||||
if !strings.Contains(file, "logger/logger.go") {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !ok {
|
||||
file = "???"
|
||||
line = 0
|
||||
}
|
||||
|
||||
if l.flag&log.Lshortfile != 0 {
|
||||
file = filepath.Base(file)
|
||||
}
|
||||
caller = fmt.Sprintf("%-25s ", file+":"+strconv.Itoa(line))
|
||||
}
|
||||
|
||||
// Format the timestamp with fixed width
|
||||
var timeStr string
|
||||
if l.flag&(log.Ldate|log.Ltime|log.Lmicroseconds) != 0 {
|
||||
t := time.Now()
|
||||
if l.flag&log.Ldate != 0 {
|
||||
timeStr += fmt.Sprintf("%04d/%02d/%02d ", t.Year(), t.Month(), t.Day())
|
||||
}
|
||||
if l.flag&(log.Ltime|log.Lmicroseconds) != 0 {
|
||||
timeStr += fmt.Sprintf("%02d:%02d:%02d", t.Hour(), t.Minute(), t.Second())
|
||||
if l.flag&log.Lmicroseconds != 0 {
|
||||
timeStr += fmt.Sprintf(".%06d", t.Nanosecond()/1000)
|
||||
}
|
||||
}
|
||||
timeStr = fmt.Sprintf("%-15s ", timeStr)
|
||||
}
|
||||
|
||||
// Add goroutine ID if enabled, with fixed width
|
||||
var goroutineStr string
|
||||
if l.showGoroutine {
|
||||
goroutineID := GetGoroutineID()
|
||||
goroutineStr = fmt.Sprintf("[g:%-4s] ", goroutineID)
|
||||
}
|
||||
|
||||
// Create a colored level indicator with both brackets colored
|
||||
levelStr := fmt.Sprintf("%s[%s]%s", levelColor, levelNames[level], levelColor)
|
||||
// Add a space after the level and before the reset color
|
||||
levelColumn := fmt.Sprintf("%s %s", levelStr, resetColor)
|
||||
|
||||
return fmt.Sprintf("%s%s%s%s%s%s%s\n",
|
||||
l.prefix, timeStr, caller, goroutineStr, levelColumn, msg, fields)
|
||||
}
|
||||
|
||||
// log logs a message at the specified level
|
||||
func (l *Logger) log(level LogLevel, format string, args ...interface{}) {
|
||||
// Always show LUA level logs regardless of the current log level
|
||||
if level != LevelLua && level > l.currentLevel {
|
||||
return
|
||||
}
|
||||
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
|
||||
msg := l.formatMessage(level, format, args...)
|
||||
fmt.Fprint(l.out, msg)
|
||||
}
|
||||
|
||||
// Error logs an error message
|
||||
func (l *Logger) Error(format string, args ...interface{}) {
|
||||
l.log(LevelError, format, args...)
|
||||
}
|
||||
|
||||
// Warning logs a warning message
|
||||
func (l *Logger) Warning(format string, args ...interface{}) {
|
||||
l.log(LevelWarning, format, args...)
|
||||
}
|
||||
|
||||
// Info logs an informational message
|
||||
func (l *Logger) Info(format string, args ...interface{}) {
|
||||
l.log(LevelInfo, format, args...)
|
||||
}
|
||||
|
||||
// Debug logs a debug message
|
||||
func (l *Logger) Debug(format string, args ...interface{}) {
|
||||
l.log(LevelDebug, format, args...)
|
||||
}
|
||||
|
||||
// Trace logs a trace message
|
||||
func (l *Logger) Trace(format string, args ...interface{}) {
|
||||
l.log(LevelTrace, format, args...)
|
||||
}
|
||||
|
||||
// Lua logs a Lua message
|
||||
func (l *Logger) Lua(format string, args ...interface{}) {
|
||||
l.log(LevelLua, format, args...)
|
||||
}
|
||||
|
||||
// Global log functions that use DefaultLogger
|
||||
|
||||
// Error logs an error message using the default logger
|
||||
func Error(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Error(format, args...)
|
||||
}
|
||||
|
||||
// Warning logs a warning message using the default logger
|
||||
func Warning(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Warning(format, args...)
|
||||
}
|
||||
|
||||
// Info logs an informational message using the default logger
|
||||
func Info(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Info(format, args...)
|
||||
}
|
||||
|
||||
// Debug logs a debug message using the default logger
|
||||
func Debug(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Debug(format, args...)
|
||||
}
|
||||
|
||||
// Trace logs a trace message using the default logger
|
||||
func Trace(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Trace(format, args...)
|
||||
}
|
||||
|
||||
// Lua logs a Lua message using the default logger
|
||||
func Lua(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Lua(format, args...)
|
||||
}
|
||||
|
||||
// LogPanic logs a panic error and its stack trace
|
||||
func LogPanic(r interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
stack := make([]byte, 4096)
|
||||
n := runtime.Stack(stack, false)
|
||||
DefaultLogger.Error("PANIC: %v\n%s", r, stack[:n])
|
||||
}
|
||||
|
||||
// SetLevel sets the log level for the default logger
|
||||
func SetLevel(level LogLevel) {
|
||||
if DefaultLogger == nil {
|
||||
Init(level)
|
||||
return
|
||||
}
|
||||
DefaultLogger.SetLevel(level)
|
||||
}
|
||||
|
||||
// GetLevel gets the log level for the default logger
|
||||
func GetLevel() LogLevel {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.GetLevel()
|
||||
}
|
||||
|
||||
// WithField returns a new logger with the field added to the default logger's context
|
||||
func WithField(key string, value interface{}) *Logger {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.WithField(key, value)
|
||||
}
|
||||
|
||||
// WithFields returns a new logger with the fields added to the default logger's context
|
||||
func WithFields(fields map[string]interface{}) *Logger {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.WithFields(fields)
|
||||
}
|
||||
|
||||
// SetShowGoroutine enables or disables goroutine ID display in the default logger
|
||||
func SetShowGoroutine(show bool) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.SetShowGoroutine(show)
|
||||
}
|
||||
|
||||
// ShowGoroutine returns whether goroutine ID is included in default logger's messages
|
||||
func ShowGoroutine() bool {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.ShowGoroutine()
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
// PanicHandler handles a panic and logs it
|
||||
func PanicHandler() {
|
||||
if r := recover(); r != nil {
|
||||
goroutineID := GetGoroutineID()
|
||||
stackTrace := debug.Stack()
|
||||
Error("PANIC in goroutine %s: %v\n%s", goroutineID, r, stackTrace)
|
||||
}
|
||||
}
|
||||
|
||||
// SafeGo launches a goroutine with panic recovery
|
||||
// Usage: logger.SafeGo(func() { ... your code ... })
|
||||
func SafeGo(f func()) {
|
||||
go func() {
|
||||
defer PanicHandler()
|
||||
f()
|
||||
}()
|
||||
}
|
||||
|
||||
// SafeGoWithArgs launches a goroutine with panic recovery and passes arguments
|
||||
// Usage: logger.SafeGoWithArgs(func(arg1, arg2 interface{}) { ... }, "value1", 42)
|
||||
func SafeGoWithArgs(f func(...interface{}), args ...interface{}) {
|
||||
go func() {
|
||||
defer PanicHandler()
|
||||
f(args...)
|
||||
}()
|
||||
}
|
||||
|
||||
// SafeExec executes a function with panic recovery
|
||||
// Useful for code that should not panic
|
||||
func SafeExec(f func()) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
goroutineID := GetGoroutineID()
|
||||
stackTrace := debug.Stack()
|
||||
Error("PANIC in goroutine %s: %v\n%s", goroutineID, r, stackTrace)
|
||||
err = fmt.Errorf("panic recovered: %v", r)
|
||||
}
|
||||
}()
|
||||
|
||||
f()
|
||||
return nil
|
||||
}
|
||||
268
main.go
268
main.go
@@ -1,274 +1,30 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"modify/processor"
|
||||
"modify/utils"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
|
||||
"modify/logger"
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
type GlobalStats struct {
|
||||
TotalMatches int
|
||||
TotalModifications int
|
||||
ProcessedFiles int
|
||||
FailedFiles int
|
||||
ModificationsPerCommand map[string]int
|
||||
TotalMatches int64
|
||||
TotalModifications int64
|
||||
ProcessedFiles int64
|
||||
FailedFiles int64
|
||||
ModificationsPerCommand sync.Map
|
||||
CommandStats sync.Map
|
||||
}
|
||||
|
||||
var (
|
||||
repo *git.Repository
|
||||
worktree *git.Worktree
|
||||
stats GlobalStats = GlobalStats{
|
||||
ModificationsPerCommand: make(map[string]int),
|
||||
stats GlobalStats = GlobalStats{
|
||||
ModificationsPerCommand: sync.Map{},
|
||||
}
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.Usage = func() {
|
||||
fmt.Fprintf(os.Stderr, "Usage: %s [options] <pattern> <lua_expression> <...files_or_globs>\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\nOptions:\n")
|
||||
fmt.Fprintf(os.Stderr, " -git\n")
|
||||
fmt.Fprintf(os.Stderr, " Use git to manage files\n")
|
||||
fmt.Fprintf(os.Stderr, " -reset\n")
|
||||
fmt.Fprintf(os.Stderr, " Reset files to their original state\n")
|
||||
fmt.Fprintf(os.Stderr, " -loglevel string\n")
|
||||
fmt.Fprintf(os.Stderr, " Set logging level: ERROR, WARNING, INFO, DEBUG, TRACE (default \"INFO\")\n")
|
||||
fmt.Fprintf(os.Stderr, "\nExamples:\n")
|
||||
fmt.Fprintf(os.Stderr, " Regex mode (default):\n")
|
||||
fmt.Fprintf(os.Stderr, " %s \"<value>(\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\nNote: v1, v2, etc. are used to refer to capture groups as numbers.\n")
|
||||
fmt.Fprintf(os.Stderr, " s1, s2, etc. are used to refer to capture groups as strings.\n")
|
||||
fmt.Fprintf(os.Stderr, " Helper functions: num(str) converts string to number, str(num) converts number to string\n")
|
||||
fmt.Fprintf(os.Stderr, " is_number(str) checks if a string is numeric\n")
|
||||
fmt.Fprintf(os.Stderr, " If expression starts with an operator like *, /, +, -, =, etc., v1 is automatically prepended\n")
|
||||
fmt.Fprintf(os.Stderr, " You can use any valid Lua code, including if statements, loops, etc.\n")
|
||||
fmt.Fprintf(os.Stderr, " Glob patterns are supported for file selection (*.xml, data/**.xml, etc.)\n")
|
||||
}
|
||||
// TODO: Implement -f flag for filtering recipes by name
|
||||
// TODO: Fix bed shitting when doing *.yml in barotrauma directory
|
||||
// TODO: Fix disaster:
|
||||
// fatal error: concurrent map writes
|
||||
//
|
||||
// goroutine 243 [running]:
|
||||
// internal/runtime/maps.fatal({0x8b9c77?, 0xc003399ce8?})
|
||||
// C:/Users/Administrator/scoop/apps/go/current/src/runtime/panic.go:1058 +0x18
|
||||
// main.main.func2({0x0?, 0x0?, 0x0?})
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/main.go:137 +0x31d
|
||||
// modify/logger.SafeGoWithArgs.func1()
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/panic_handler.go:31 +0x43
|
||||
// created by modify/logger.SafeGoWithArgs in goroutine 1
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/panic_handler.go:29 +0x86
|
||||
//
|
||||
// goroutine 1 [chan send]:
|
||||
// main.main()
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/main.go:107 +0x5aa
|
||||
//
|
||||
// goroutine 547 [sync.Mutex.Lock]:
|
||||
// internal/sync.runtime_SemacquireMutex(0x0?, 0x5b?, 0x0?)
|
||||
// C:/Users/Administrator/scoop/apps/go/current/src/runtime/sema.go:95 +0x25
|
||||
// internal/sync.(*Mutex).lockSlow(0xc0000a2240)
|
||||
// C:/Users/Administrator/scoop/apps/go/current/src/internal/sync/mutex.go:149 +0x15d
|
||||
// internal/sync.(*Mutex).Lock(...)
|
||||
// C:/Users/Administrator/scoop/apps/go/current/src/internal/sync/mutex.go:70
|
||||
// sync.(*Mutex).Lock(...)
|
||||
// C:/Users/Administrator/scoop/apps/go/current/src/sync/mutex.go:46
|
||||
// modify/logger.(*Logger).log(0xc0000a2240, 0x1, {0x8bf8ab, 0x1e}, {0xc007049d08, 0x1, 0x1})
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/logger.go:321 +0x9d
|
||||
// modify/logger.(*Logger).Warning(...)
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/logger.go:335
|
||||
// modify/logger.Warning({0x8bf8ab?, 0x5c?}, {0xc004d87d08?, 0xc006b85ae0?, 0x53f3b9?})
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/logger.go:373 +0x87
|
||||
// modify/processor.ProcessRegex({0xc004025000, 0x1f0}, {{0xc00026c450, 0xa}, {0xc000015bd0, 0x4b}, {0xc000261c00, 0xf4}, {0xc000282110, 0x1, ...}, ...})
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/processor/regex.go:64 +0x5c5
|
||||
// main.main.func2({0x0?, 0x0?, 0x0?})
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/main.go:128 +0x471
|
||||
// modify/logger.SafeGoWithArgs.func1()
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/panic_handler.go:31 +0x43
|
||||
// created by modify/logger.SafeGoWithArgs in goroutine 1
|
||||
// C:/Users/Administrator/Seafile/Projects-Go/GoProjects/modifier/logger/panic_handler.go:29 +0x86
|
||||
flag.Parse()
|
||||
args := flag.Args()
|
||||
|
||||
level := logger.ParseLevel(*utils.LogLevel)
|
||||
logger.Init(level)
|
||||
logger.Info("Initializing with log level: %s", level.String())
|
||||
|
||||
// The plan is:
|
||||
// Load all commands
|
||||
commands, err := utils.LoadCommands(args)
|
||||
if err != nil {
|
||||
logger.Error("Failed to load commands: %v", err)
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
|
||||
// Then aggregate all the globs and deduplicate them
|
||||
globs := utils.AggregateGlobs(commands)
|
||||
logger.Debug("Aggregated %d globs before deduplication", utils.CountGlobsBeforeDedup(commands))
|
||||
|
||||
// Resolve all the files for all the globs
|
||||
logger.Info("Found %d unique file patterns", len(globs))
|
||||
files, err := utils.ExpandGLobs(globs)
|
||||
if err != nil {
|
||||
logger.Error("Failed to expand file patterns: %v", err)
|
||||
return
|
||||
}
|
||||
logger.Info("Found %d files to process", len(files))
|
||||
|
||||
// Somehow connect files to commands via globs..
|
||||
// For each file check every glob of every command
|
||||
// Maybe memoize this part
|
||||
// That way we know what commands affect what files
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
if err != nil {
|
||||
logger.Error("Failed to associate files with commands: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Then for each file run all commands associated with the file
|
||||
workers := make(chan struct{}, *utils.ParallelFiles)
|
||||
wg := sync.WaitGroup{}
|
||||
|
||||
// Add performance tracking
|
||||
startTime := time.Now()
|
||||
var fileMutex sync.Mutex
|
||||
|
||||
for file, commands := range associations {
|
||||
workers <- struct{}{}
|
||||
wg.Add(1)
|
||||
logger.SafeGoWithArgs(func(args ...interface{}) {
|
||||
defer func() { <-workers }()
|
||||
defer wg.Done()
|
||||
|
||||
// Track per-file processing time
|
||||
fileStartTime := time.Now()
|
||||
|
||||
fileData, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
logger.Error("Failed to read file %q: %v", file, err)
|
||||
return
|
||||
}
|
||||
logger.Trace("Loaded %d bytes of data for file %q", len(fileData), file)
|
||||
fileDataStr := string(fileData)
|
||||
|
||||
// Aggregate all the modifications and execute them
|
||||
modifications := []utils.ReplaceCommand{}
|
||||
for _, command := range commands {
|
||||
logger.Info("Processing file %q with command %q", file, command.Regex)
|
||||
commands, err := processor.ProcessRegex(fileDataStr, command)
|
||||
if err != nil {
|
||||
logger.Error("Failed to process file %q with command %q: %v", file, command.Regex, err)
|
||||
return
|
||||
}
|
||||
modifications = append(modifications, commands...)
|
||||
// It is not guranteed that all the commands will be executed...
|
||||
// TODO: Make this better
|
||||
// We'd have to pass the map to executemodifications or something...
|
||||
stats.ModificationsPerCommand[command.Name] += len(commands)
|
||||
}
|
||||
|
||||
if len(modifications) == 0 {
|
||||
logger.Info("No modifications found for file %q", file)
|
||||
return
|
||||
}
|
||||
|
||||
// Sort commands in reverse order for safe replacements
|
||||
fileDataStr, count := utils.ExecuteModifications(modifications, fileDataStr)
|
||||
|
||||
fileMutex.Lock()
|
||||
stats.ProcessedFiles++
|
||||
stats.TotalModifications += count
|
||||
fileMutex.Unlock()
|
||||
|
||||
logger.Info("Executed %d modifications for file %q", count, file)
|
||||
|
||||
err = os.WriteFile(file, []byte(fileDataStr), 0644)
|
||||
if err != nil {
|
||||
logger.Error("Failed to write file %q: %v", file, err)
|
||||
return
|
||||
}
|
||||
|
||||
logger.Debug("File %q processed in %v", file, time.Since(fileStartTime))
|
||||
}, file, commands)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
processingTime := time.Since(startTime)
|
||||
logger.Info("Processing completed in %v", processingTime)
|
||||
if stats.ProcessedFiles > 0 {
|
||||
logger.Info("Average time per file: %v", processingTime/time.Duration(stats.ProcessedFiles))
|
||||
}
|
||||
|
||||
// TODO: Also give each command its own logger, maybe prefix it with something... Maybe give commands a name?
|
||||
// Do that with logger.WithField("loglevel", level.String())
|
||||
// Since each command also has its own log level
|
||||
// TODO: Maybe even figure out how to run individual commands...?
|
||||
// TODO: What to do with git? Figure it out ....
|
||||
|
||||
// if *gitFlag {
|
||||
// logger.Info("Git integration enabled, setting up git repository")
|
||||
// err := setupGit()
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to setup git: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error setting up git: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
|
||||
// logger.Debug("Expanding file patterns")
|
||||
// files, err := expandFilePatterns(filePatterns)
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to expand file patterns: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// if *gitFlag {
|
||||
// logger.Info("Cleaning up git files before processing")
|
||||
// err := cleanupGitFiles(files)
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to cleanup git files: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error cleaning up git files: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
// if *resetFlag {
|
||||
// logger.Info("Files reset to their original state, nothing more to do")
|
||||
// log.Printf("Files reset to their original state, nothing more to do")
|
||||
// return
|
||||
// }
|
||||
|
||||
// Print summary
|
||||
if stats.TotalModifications == 0 {
|
||||
logger.Warning("No modifications were made in any files")
|
||||
fmt.Fprintf(os.Stderr, "No modifications were made in any files\n")
|
||||
} else {
|
||||
logger.Info("Operation complete! Modified %d values in %d/%d files",
|
||||
stats.TotalModifications, stats.ProcessedFiles, stats.ProcessedFiles+stats.FailedFiles)
|
||||
fmt.Printf("Operation complete! Modified %d values in %d/%d files\n",
|
||||
stats.TotalModifications, stats.ProcessedFiles, stats.ProcessedFiles+stats.FailedFiles)
|
||||
sortedCommands := make([]string, 0, len(stats.ModificationsPerCommand))
|
||||
for command := range stats.ModificationsPerCommand {
|
||||
sortedCommands = append(sortedCommands, command)
|
||||
}
|
||||
sort.Strings(sortedCommands)
|
||||
|
||||
for _, command := range sortedCommands {
|
||||
count := stats.ModificationsPerCommand[command]
|
||||
if count > 0 {
|
||||
logger.Info("\tCommand %q made %d modifications", command, count)
|
||||
} else {
|
||||
logger.Warning("\tCommand %q made no modifications", command)
|
||||
}
|
||||
}
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
logger.Error("Command execution failed: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
24
main_createexample_test.go
Normal file
24
main_createexample_test.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCreateExampleConfigWritesFile(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmp))
|
||||
|
||||
CreateExampleConfig()
|
||||
|
||||
b, err := os.ReadFile(filepath.Join(tmp, "example_cook.toml"))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, exampleTOMLContent, string(b))
|
||||
}
|
||||
43
main_runchef_test.go
Normal file
43
main_runchef_test.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func resetGlobalStatsForTest() {
|
||||
stats = GlobalStats{ModificationsPerCommand: sync.Map{}, CommandStats: sync.Map{}}
|
||||
}
|
||||
|
||||
func TestRunChefEndToEnd(t *testing.T) {
|
||||
tmp, err := os.MkdirTemp("", "runchef-e2e-*")
|
||||
require.NoError(t, err)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmp))
|
||||
|
||||
resetGlobalStatsForTest()
|
||||
|
||||
contentPath := filepath.Join(tmp, "sample.txt")
|
||||
require.NoError(t, os.WriteFile(contentPath, []byte("value=1\n"), 0644))
|
||||
|
||||
cookPath := filepath.Join(tmp, "cook.toml")
|
||||
cook := `[[commands]]
|
||||
name = "bump"
|
||||
regex = "(1)"
|
||||
lua = "v1 = v1 + 1"
|
||||
files = ["sample.txt"]
|
||||
`
|
||||
require.NoError(t, os.WriteFile(cookPath, []byte(cook), 0644))
|
||||
|
||||
runChef([]string{"cook.toml"}, rootCmd)
|
||||
|
||||
out, err := os.ReadFile(contentPath)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "value=2\n", string(out))
|
||||
}
|
||||
147
processor/evalregex_test.go
Normal file
147
processor/evalregex_test.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package processor_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
|
||||
"cook/processor"
|
||||
)
|
||||
|
||||
// Happy Path: Function correctly returns all regex capture groups as Lua table when given valid pattern and input.
|
||||
func TestEvalRegexReturnsCaptureGroups(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
pattern := `(\w+)-(\d+)`
|
||||
input := "test-42"
|
||||
L.Push(lua.LString(pattern))
|
||||
L.Push(lua.LString(input))
|
||||
|
||||
result := processor.EvalRegex(L)
|
||||
|
||||
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
|
||||
|
||||
out := L.Get(-1)
|
||||
tbl, ok := out.(*lua.LTable)
|
||||
if !ok {
|
||||
t.Fatalf("Expected Lua table, got %T", out)
|
||||
}
|
||||
expected := []string{"test-42", "test", "42"}
|
||||
for i, v := range expected {
|
||||
val := tbl.RawGetInt(i + 1)
|
||||
assert.Equal(t, lua.LString(v), val, "Expected index %d to be %q", i+1, v)
|
||||
}
|
||||
}
|
||||
|
||||
// Happy Path: Function returns nil when regex pattern does not match input string.
|
||||
func TestEvalRegexReturnsNilWhenNoMatch(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
L.Push(lua.LString(`(foo)(bar)`))
|
||||
L.Push(lua.LString("no-match-here"))
|
||||
|
||||
result := processor.EvalRegex(L)
|
||||
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
|
||||
|
||||
out := L.Get(-1)
|
||||
// Should be nil when no matches found
|
||||
assert.Equal(t, lua.LNil, out, "Expected nil when no matches found")
|
||||
}
|
||||
|
||||
// Happy Path: Function handles patterns with no capture groups by returning the full match in the Lua table.
|
||||
func TestEvalRegexReturnsFullMatchWithoutCaptureGroups(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
pattern := `foo\d+`
|
||||
input := "foo123"
|
||||
L.Push(lua.LString(pattern))
|
||||
L.Push(lua.LString(input))
|
||||
|
||||
result := processor.EvalRegex(L)
|
||||
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
|
||||
|
||||
out := L.Get(-1)
|
||||
tbl, ok := out.(*lua.LTable)
|
||||
if !ok {
|
||||
t.Fatalf("Expected Lua table, got %T", out)
|
||||
}
|
||||
fullMatch := tbl.RawGetInt(1)
|
||||
assert.Equal(t, lua.LString("foo123"), fullMatch)
|
||||
// There should be only the full match (index 1)
|
||||
count := 0
|
||||
tbl.ForEach(func(k, v lua.LValue) {
|
||||
count++
|
||||
})
|
||||
assert.Equal(t, 1, count)
|
||||
}
|
||||
|
||||
// Edge Case: Function handles invalid regex pattern by letting regexp.MustCompile panic (which is expected behavior)
|
||||
func TestEvalRegexPanicsOnInvalidPattern(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
pattern := `([a-z` // invalid regex
|
||||
L.Push(lua.LString(pattern))
|
||||
L.Push(lua.LString("someinput"))
|
||||
|
||||
// This should panic due to invalid regex pattern
|
||||
assert.Panics(t, func() {
|
||||
processor.EvalRegex(L)
|
||||
}, "Expected panic for invalid regex pattern")
|
||||
}
|
||||
|
||||
// Edge Case: Function returns nil when input string is empty and pattern doesn't match.
|
||||
func TestEvalRegexReturnsNilForEmptyInput(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
L.Push(lua.LString(`(foo)`))
|
||||
L.Push(lua.LString(""))
|
||||
|
||||
result := processor.EvalRegex(L)
|
||||
assert.Equal(t, 1, result, "Expected return value to be 1 (one value pushed to Lua stack)")
|
||||
|
||||
out := L.Get(-1)
|
||||
// Should be nil when no matches found
|
||||
assert.Equal(t, lua.LNil, out, "Expected nil when input is empty and pattern doesn't match")
|
||||
}
|
||||
|
||||
// Edge Case: Function handles nil or missing arguments gracefully without causing a runtime panic.
|
||||
func TestEvalRegexHandlesMissingArguments(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
t.Errorf("Did not expect panic when arguments are missing, got: %v", r)
|
||||
}
|
||||
}()
|
||||
// No arguments pushed at all
|
||||
processor.EvalRegex(L)
|
||||
// Should just not match anything or produce empty table, but must not panic
|
||||
}
|
||||
|
||||
func TestEvalRegexHandlesComplexPattern(t *testing.T) {
|
||||
// Test complex regex pattern with multiple capture groups
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
pattern := `^((Bulk_)?(Pistol|Rifle).*?Round.*?)$`
|
||||
input := "Pistol_Round"
|
||||
L.Push(lua.LString(pattern))
|
||||
L.Push(lua.LString(input))
|
||||
|
||||
processor.EvalRegex(L)
|
||||
|
||||
out := L.Get(-1)
|
||||
tbl, ok := out.(*lua.LTable)
|
||||
if !ok {
|
||||
t.Fatalf("Expected Lua table, got %T", out)
|
||||
}
|
||||
|
||||
// Pattern should match: ["Pistol_Round", "Pistol_Round", "", "Pistol"]
|
||||
// This creates 4 elements in the matches array, not 1
|
||||
expectedCount := 4
|
||||
actualCount := 0
|
||||
tbl.ForEach(func(k, v lua.LValue) {
|
||||
actualCount++
|
||||
})
|
||||
assert.Equal(t, expectedCount, actualCount, "Expected %d matches for pattern %q with input %q", expectedCount, pattern, input)
|
||||
}
|
||||
1257
processor/json.go
Normal file
1257
processor/json.go
Normal file
File diff suppressed because it is too large
Load Diff
153
processor/json_deepequal_test.go
Normal file
153
processor/json_deepequal_test.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDeepEqual(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
a interface{}
|
||||
b interface{}
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "both nil",
|
||||
a: nil,
|
||||
b: nil,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "first nil",
|
||||
a: nil,
|
||||
b: "something",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "second nil",
|
||||
a: "something",
|
||||
b: nil,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "equal primitives",
|
||||
a: 42,
|
||||
b: 42,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "different primitives",
|
||||
a: 42,
|
||||
b: 43,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "equal strings",
|
||||
a: "hello",
|
||||
b: "hello",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "equal maps",
|
||||
a: map[string]interface{}{
|
||||
"key1": "value1",
|
||||
"key2": 42,
|
||||
},
|
||||
b: map[string]interface{}{
|
||||
"key1": "value1",
|
||||
"key2": 42,
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "maps different lengths",
|
||||
a: map[string]interface{}{
|
||||
"key1": "value1",
|
||||
},
|
||||
b: map[string]interface{}{
|
||||
"key1": "value1",
|
||||
"key2": 42,
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "maps different values",
|
||||
a: map[string]interface{}{
|
||||
"key1": "value1",
|
||||
},
|
||||
b: map[string]interface{}{
|
||||
"key1": "value2",
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "map vs non-map",
|
||||
a: map[string]interface{}{
|
||||
"key1": "value1",
|
||||
},
|
||||
b: "not a map",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "equal arrays",
|
||||
a: []interface{}{1, 2, 3},
|
||||
b: []interface{}{1, 2, 3},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "arrays different lengths",
|
||||
a: []interface{}{1, 2},
|
||||
b: []interface{}{1, 2, 3},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "arrays different values",
|
||||
a: []interface{}{1, 2, 3},
|
||||
b: []interface{}{1, 2, 4},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "array vs non-array",
|
||||
a: []interface{}{1, 2, 3},
|
||||
b: "not an array",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "nested equal structures",
|
||||
a: map[string]interface{}{
|
||||
"outer": map[string]interface{}{
|
||||
"inner": []interface{}{1, 2, 3},
|
||||
},
|
||||
},
|
||||
b: map[string]interface{}{
|
||||
"outer": map[string]interface{}{
|
||||
"inner": []interface{}{1, 2, 3},
|
||||
},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "nested different structures",
|
||||
a: map[string]interface{}{
|
||||
"outer": map[string]interface{}{
|
||||
"inner": []interface{}{1, 2, 3},
|
||||
},
|
||||
},
|
||||
b: map[string]interface{}{
|
||||
"outer": map[string]interface{}{
|
||||
"inner": []interface{}{1, 2, 4},
|
||||
},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := deepEqual(tt.a, tt.b)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
273
processor/json_edge_cases_test.go
Normal file
273
processor/json_edge_cases_test.go
Normal file
@@ -0,0 +1,273 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestJSONFloatFormatting(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"value": 10.5,
|
||||
"another": 3.14159
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_float",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
data.value = data.value * 2
|
||||
data.another = data.another * 10
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, "21") // 10.5 * 2
|
||||
assert.Contains(t, result, "31.4159") // 3.14159 * 10
|
||||
}
|
||||
|
||||
func TestJSONNestedObjectAddition(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"items": {}
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_nested",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
data.items.newObject = {
|
||||
name = "test",
|
||||
value = 42,
|
||||
enabled = true
|
||||
}
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, `"newObject"`)
|
||||
assert.Contains(t, result, `"name"`)
|
||||
assert.Contains(t, result, `"test"`)
|
||||
assert.Contains(t, result, `"value"`)
|
||||
assert.Contains(t, result, "42")
|
||||
}
|
||||
|
||||
func TestJSONKeyWithQuotes(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"data": {}
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_key_quotes",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
data.data["key-with-dash"] = "value1"
|
||||
data.data.normalKey = "value2"
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, `"key-with-dash"`)
|
||||
assert.Contains(t, result, `"normalKey"`)
|
||||
}
|
||||
|
||||
func TestJSONArrayInValue(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"data": {}
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_array_value",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
data.data.items = {1, 2, 3, 4, 5}
|
||||
data.data.strings = {"a", "b", "c"}
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, `"items"`)
|
||||
assert.Contains(t, result, `[1,2,3,4,5]`)
|
||||
assert.Contains(t, result, `"strings"`)
|
||||
assert.Contains(t, result, `["a","b","c"]`)
|
||||
}
|
||||
|
||||
func TestJSONRootArrayElementRemoval(t *testing.T) {
|
||||
jsonContent := `[
|
||||
{"id": 1, "name": "first"},
|
||||
{"id": 2, "name": "second"},
|
||||
{"id": 3, "name": "third"}
|
||||
]`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_root_array_removal",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
-- Remove the second element
|
||||
table.remove(data, 2)
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, `"first"`)
|
||||
assert.Contains(t, result, `"third"`)
|
||||
assert.NotContains(t, result, `"second"`)
|
||||
}
|
||||
|
||||
func TestJSONRootArrayElementChange(t *testing.T) {
|
||||
jsonContent := `[10, 20, 30, 40, 50]`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_root_array_change",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
-- Double all values
|
||||
for i = 1, #data do
|
||||
data[i] = data[i] * 2
|
||||
end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, "20")
|
||||
assert.Contains(t, result, "40")
|
||||
assert.Contains(t, result, "60")
|
||||
assert.Contains(t, result, "80")
|
||||
assert.Contains(t, result, "100")
|
||||
assert.NotContains(t, result, "10,")
|
||||
}
|
||||
|
||||
func TestJSONRootArrayStringElements(t *testing.T) {
|
||||
jsonContent := `["apple", "banana", "cherry"]`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_root_array_strings",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
data[2] = "orange"
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, `"apple"`)
|
||||
assert.Contains(t, result, `"orange"`)
|
||||
assert.Contains(t, result, `"cherry"`)
|
||||
assert.NotContains(t, result, `"banana"`)
|
||||
}
|
||||
|
||||
func TestJSONComplexNestedStructure(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"config": {
|
||||
"multiplier": 2.5
|
||||
}
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_complex",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
-- Add nested object with array
|
||||
data.config.settings = {
|
||||
enabled = true,
|
||||
values = {1.5, 2.5, 3.5},
|
||||
names = {"alpha", "beta"}
|
||||
}
|
||||
-- Change float
|
||||
data.config.multiplier = 7.777
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, "7.777")
|
||||
assert.Contains(t, result, `"settings"`)
|
||||
assert.Contains(t, result, `"values"`)
|
||||
assert.Contains(t, result, `[1.5,2.5,3.5]`)
|
||||
}
|
||||
|
||||
func TestJSONRemoveFirstArrayElement(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"items": [1, 2, 3, 4, 5]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_remove_first",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
table.remove(data.items, 1)
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.NotContains(t, result, "[1,")
|
||||
assert.Contains(t, result, "2")
|
||||
assert.Contains(t, result, "5")
|
||||
}
|
||||
|
||||
func TestJSONRemoveLastArrayElement(t *testing.T) {
|
||||
jsonContent := `{
|
||||
"items": [1, 2, 3, 4, 5]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test_remove_last",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
table.remove(data.items, 5)
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(jsonContent, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, jsonContent)
|
||||
assert.Contains(t, result, "1")
|
||||
assert.Contains(t, result, "4")
|
||||
assert.NotContains(t, result, ", 5")
|
||||
}
|
||||
106
processor/json_internal_test.go
Normal file
106
processor/json_internal_test.go
Normal file
@@ -0,0 +1,106 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestConsumeJSONCompositeBranches(t *testing.T) {
|
||||
obj := `{"a":{"b":[1,"x\\\"y"]}} trailing`
|
||||
assert.Equal(t, 24, consumeJSONComposite(obj, 0))
|
||||
|
||||
arr := `[1,{"k":"v"},3]x`
|
||||
assert.Equal(t, 15, consumeJSONComposite(arr, 0))
|
||||
|
||||
unclosed := `{"a":1`
|
||||
assert.Equal(t, -1, consumeJSONComposite(unclosed, 0))
|
||||
}
|
||||
|
||||
func TestConsumeJSONValueBranches(t *testing.T) {
|
||||
assert.Equal(t, -1, consumeJSONValue("", 0))
|
||||
assert.Equal(t, -1, consumeJSONValue("x", 2))
|
||||
|
||||
assert.Equal(t, 5, consumeJSONValue(`"abc",`, 0))
|
||||
assert.Equal(t, -1, consumeJSONValue(`"abc`, 0))
|
||||
|
||||
assert.Equal(t, 7, consumeJSONValue(`[1,2,3]`, 0))
|
||||
assert.Equal(t, -1, consumeJSONValue(`[1,2,3`, 0))
|
||||
|
||||
assert.Equal(t, 2, consumeJSONValue(`42,`, 0))
|
||||
assert.Equal(t, 4, consumeJSONValue(`true}`, 0))
|
||||
}
|
||||
|
||||
func TestParseJSONObjectMembersSuccessAndBreakPaths(t *testing.T) {
|
||||
success := parseJSONObjectMembers(`{"a":1,"b":{"x":2},"c":"v"}`)
|
||||
if assert.Len(t, success, 3) {
|
||||
assert.Equal(t, "a", success[0].key)
|
||||
assert.Equal(t, "b", success[1].key)
|
||||
assert.Equal(t, "c", success[2].key)
|
||||
}
|
||||
|
||||
breakCases := []string{
|
||||
`{a:1}`, // not quoted key
|
||||
`{"a:1}`, // unterminated key string
|
||||
`{"\uZZZZ":1}`, // invalid key escape for strconv.Unquote
|
||||
`{"a" 1}`, // missing colon
|
||||
`{"a": }`, // missing value
|
||||
`{"a": {`, // invalid composite value
|
||||
}
|
||||
for _, raw := range breakCases {
|
||||
members := parseJSONObjectMembers(raw)
|
||||
assert.Len(t, members, 0, "raw=%s", raw)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindObjectFieldRemovalRangeErrorPaths(t *testing.T) {
|
||||
start, end := findObjectFieldRemovalRange(`[]`, "", "a")
|
||||
assert.Equal(t, -1, start)
|
||||
assert.Equal(t, -1, end)
|
||||
|
||||
start, end = findObjectFieldRemovalRange(`{"obj":123}`, "obj", "a")
|
||||
assert.Equal(t, -1, start)
|
||||
assert.Equal(t, -1, end)
|
||||
|
||||
start, end = findObjectFieldRemovalRange(`{"obj":{"a":1}}`, "missing", "a")
|
||||
assert.Equal(t, -1, start)
|
||||
assert.Equal(t, -1, end)
|
||||
|
||||
start, end = findObjectFieldRemovalRange(`{"obj":{"a":1}}`, "obj", "missing")
|
||||
assert.Equal(t, -1, start)
|
||||
assert.Equal(t, -1, end)
|
||||
}
|
||||
|
||||
func TestFindObjectFieldRemovalRangeHappyPaths(t *testing.T) {
|
||||
content := `{"a":1,"b":2,"c":3}`
|
||||
|
||||
start, end := findObjectFieldRemovalRange(content, "", "a")
|
||||
assert.Equal(t, `{"b":2,"c":3}`, content[:start]+content[end:])
|
||||
|
||||
start, end = findObjectFieldRemovalRange(content, "", "b")
|
||||
assert.Equal(t, `{"a":1,"c":3}`, content[:start]+content[end:])
|
||||
|
||||
start, end = findObjectFieldRemovalRange(content, "", "c")
|
||||
assert.Equal(t, `{"a":1,"b":2}`, content[:start]+content[end:])
|
||||
|
||||
one := `{"only":1}`
|
||||
start, end = findObjectFieldRemovalRange(one, "", "only")
|
||||
assert.Equal(t, `{}`, one[:start]+one[end:])
|
||||
}
|
||||
|
||||
func TestProcessJSONErrorPaths(t *testing.T) {
|
||||
_, err := ProcessJSON(`{"a":`, utils.ModifyCommand{Name: "bad_json", JSON: true, Lua: `modified=false`}, "test.json")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "failed to parse JSON")
|
||||
|
||||
_, err = ProcessJSON(`{"a":1}`, utils.ModifyCommand{Name: "bad_lua", JSON: true, Lua: `this is not lua`}, "test.json")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "lua script execution failed")
|
||||
|
||||
_, err = ProcessJSON(`{"a":1}`, utils.ModifyCommand{Name: "bad_jpath", JSON: true, Lua: `jpath(data, "$") ; jpath(data, "$") ; jpath(data, "$[")`}, "test.json")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "invalid jsonpath expression")
|
||||
}
|
||||
360
processor/json_test.go
Normal file
360
processor/json_test.go
Normal file
@@ -0,0 +1,360 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestProcessJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
luaExpression string
|
||||
expectedOutput string
|
||||
expectedMods int
|
||||
}{
|
||||
{
|
||||
name: "Basic JSON object modification",
|
||||
input: `{"name": "test", "value": 42}`,
|
||||
luaExpression: `data.value = data.value * 2; return true`,
|
||||
expectedOutput: `{"name": "test", "value": 84}`,
|
||||
expectedMods: 1,
|
||||
},
|
||||
{
|
||||
name: "JSON array modification",
|
||||
input: `{"items": [{"name": "item1", "value": 10}, {"name": "item2", "value": 20}]}`,
|
||||
luaExpression: `for i, item in ipairs(data.items) do item.value = item.value * 2 end modified = true`,
|
||||
expectedOutput: `{"items": [{"name": "item1", "value": 20}, {"name": "item2", "value": 40}]}`,
|
||||
expectedMods: 2,
|
||||
},
|
||||
{
|
||||
name: "JSON nested object modification",
|
||||
input: `{"config": {"setting1": {"enabled": true, "value": 5}, "setting2": {"enabled": false, "value": 10}}}`,
|
||||
luaExpression: `data.config.setting1.enabled = false data.config.setting2.value = 15 modified = true`,
|
||||
expectedOutput: `{"config": {"setting1": {"enabled": false, "value": 5}, "setting2": {"enabled": false, "value": 15}}}`,
|
||||
expectedMods: 2,
|
||||
},
|
||||
{
|
||||
name: "JSON no modification",
|
||||
input: `{"name": "test", "value": 42}`,
|
||||
luaExpression: `return false`,
|
||||
expectedOutput: `{"name": "test", "value": 42}`,
|
||||
expectedMods: 0,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
command := utils.ModifyCommand{
|
||||
Name: tt.name,
|
||||
JSON: true,
|
||||
Lua: tt.luaExpression,
|
||||
}
|
||||
|
||||
modifications, err := ProcessJSON(tt.input, command, "test.json")
|
||||
assert.NoError(t, err, "ProcessJSON failed: %v", err)
|
||||
|
||||
if len(modifications) > 0 {
|
||||
// Execute modifications
|
||||
result, count := utils.ExecuteModifications(modifications, tt.input)
|
||||
assert.Equal(t, tt.expectedMods, count, "Expected %d modifications, got %d", tt.expectedMods, count)
|
||||
assert.Equal(t, tt.expectedOutput, result, "Expected output: %s, got: %s", tt.expectedOutput, result)
|
||||
} else {
|
||||
assert.Equal(t, 0, tt.expectedMods, "Expected no modifications but got some")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestToLuaValue(t *testing.T) {
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create Lua state: %v", err)
|
||||
}
|
||||
defer L.Close()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input interface{}
|
||||
expected string
|
||||
}{
|
||||
{"string", "hello", "hello"},
|
||||
{"number", 42.0, "42"},
|
||||
{"boolean", true, "true"},
|
||||
{"nil", nil, "nil"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := ToLuaValue(L, tt.input)
|
||||
assert.Equal(t, tt.expected, result.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathObjectFilterMutation(t *testing.T) {
|
||||
input := `{"items":[{"enabled":true,"value":10},{"enabled":false,"value":20},{"enabled":true,"value":7}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_filter",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
for _, item in ipairs(jpath(data, "$.items[?@.enabled == true]")) do
|
||||
item.value = item.value * 3
|
||||
end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"items":[{"enabled":true,"value":30},{"enabled":false,"value":20},{"enabled":true,"value":21}]}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathPrimitiveMatchesAreReturned(t *testing.T) {
|
||||
input := `{"items":[{"price":10},{"price":20}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_primitives",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local prices = jpath(data, "$.items[*].price")
|
||||
if #prices ~= 2 or prices[1].val ~= 10 or prices[2].val ~= 20 then
|
||||
error("unexpected primitive jpath values")
|
||||
end
|
||||
data.items[2].price = prices[1] + prices[2]
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"items":[{"price":10},{"price":30}]}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathNodeListContext(t *testing.T) {
|
||||
input := `{"groups":[{"items":[{"id":1}]},{"items":[{"id":2},{"id":3}]}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_nodelist_context",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local groups = jpath(data, "$.groups[*]")
|
||||
for _, item in ipairs(jpath(groups, "$.items[*]")) do
|
||||
item.seen = true
|
||||
end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"groups":[{"items":[{"id":1,"seen": true}]},{"items":[{"id":2,"seen": true},{"id":3,"seen": true}]}]}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathPrimitiveContext(t *testing.T) {
|
||||
input := `{"items":[{"price":10},{"price":20}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_primitive_context",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local selected = jpath(data.items[1].price, "$")
|
||||
if #selected < 1 then
|
||||
error("expected scalar context to yield at least one result")
|
||||
end
|
||||
data.items[1].price = selected[1].val + 5
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"items":[{"price":15},{"price":20}]}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathMixedContextList(t *testing.T) {
|
||||
input := `{"groups":[{"items":[{"id":1}]},{"items":[{"id":2}]}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_mixed_list",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local groups = jpath(data, "$.groups[*]")
|
||||
table.insert(groups, 123)
|
||||
for _, item in ipairs(jpath(groups, "$.items[*]")) do
|
||||
item.tagged = true
|
||||
end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"groups":[{"items":[{"id":1,"tagged": true}]},{"items":[{"id":2,"tagged": true}]}]}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathNoMatchReturnsEmptyArray(t *testing.T) {
|
||||
input := `{"items":[{"id":1}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_empty",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local matches = jpath(data, "$.items[?@.id == 999]")
|
||||
if type(matches) ~= "table" then error("expected table") end
|
||||
if #matches ~= 0 then error("expected empty array") end
|
||||
modified = false
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, mods)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathNullResultIsFilteredFromArray(t *testing.T) {
|
||||
input := `{"items":[{"value":null},{"value":5}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_null_filter",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local vals = jpath(data, "$.items[*].value")
|
||||
if type(vals) ~= "table" then error("expected table") end
|
||||
if #vals ~= 1 or vals[1].val ~= 5 then
|
||||
error("expected null to be skipped and only numeric value returned")
|
||||
end
|
||||
modified = false
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, mods)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathCoercesNumericLikeStrings(t *testing.T) {
|
||||
input := `{"code":"001","items":[{"value":"1"},{"value":2}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_lossless_strings",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local root = jpath(data, "$.code")
|
||||
if #root ~= 1 or type(root[1]) ~= "table" or root[1].val ~= 1 or root[1].raw ~= "001" then
|
||||
error("expected $.code to be coerced to number")
|
||||
end
|
||||
local vals = jpath(data, "$.items[*].value")
|
||||
if #vals ~= 2 then error("expected two values") end
|
||||
if type(vals[1]) ~= "table" or vals[1].val ~= 1 or vals[1].raw ~= "1" then
|
||||
error("expected first value to be coerced to number")
|
||||
end
|
||||
if type(vals[2]) ~= "table" or vals[2].val ~= 2 or vals[2].raw ~= 2 then
|
||||
error("expected second value to remain number")
|
||||
end
|
||||
modified = false
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, mods)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathRawKeepsNumericLikeStrings(t *testing.T) {
|
||||
input := `{"code":"001","items":[{"value":"1"},{"value":2}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpath_raw_mode_lossless_strings",
|
||||
JSON: true,
|
||||
Raw: true,
|
||||
Lua: `
|
||||
local root = jpath(data, "$.code")
|
||||
if #root ~= 1 or type(root[1]) ~= "string" or root[1] ~= "001" then
|
||||
error("expected $.code to remain raw string")
|
||||
end
|
||||
local vals = jpath(data, "$.items[*].value")
|
||||
if #vals ~= 2 then error("expected two values") end
|
||||
if type(vals[1]) ~= "string" or vals[1] ~= "1" then
|
||||
error("expected first value to remain string")
|
||||
end
|
||||
if type(vals[2]) ~= "number" or vals[2] ~= 2 then
|
||||
error("expected second value to remain number")
|
||||
end
|
||||
modified = false
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, mods)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathRmRemovesArrayMatches(t *testing.T) {
|
||||
input := `{"items":[{"id":1},{"id":2},{"id":3}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpathrm_array",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local removed = jpathrm(data, "$.items[?@.id == 2]")
|
||||
if removed ~= 1 then error("expected one array element removal") end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"items":[{"id":1},{"id":3}]}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathRmRemovesObjectField(t *testing.T) {
|
||||
input := `{"keep":1,"drop":2}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpathrm_object",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local probe = jpath(data, "$.drop")
|
||||
if #probe ~= 1 or probe[1].val ~= 2 then error("expected probe to see drop") end
|
||||
local removed = jpathrm(data, "$['drop']")
|
||||
if removed ~= 1 then error("expected one object field removal") end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
mods, err := ProcessJSON(input, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, mods)
|
||||
|
||||
result, _ := utils.ExecuteModifications(mods, input)
|
||||
assert.Equal(t, `{"keep":1}`, result)
|
||||
}
|
||||
|
||||
func TestProcessJSONJPathRmRejectsUnmappedContext(t *testing.T) {
|
||||
input := `{"items":[{"id":1}]}`
|
||||
command := utils.ModifyCommand{
|
||||
Name: "json_jpathrm_unmapped",
|
||||
JSON: true,
|
||||
Lua: `
|
||||
local detached = { id = 1 }
|
||||
jpathrm(detached, "$")
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
_, err := ProcessJSON(input, command, "test.json")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "jpathrm context must come from data or jpath results")
|
||||
}
|
||||
191
processor/lua_external_integration_test.go
Normal file
191
processor/lua_external_integration_test.go
Normal file
@@ -0,0 +1,191 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestProcessRegexWithExternalLuaFile(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-integration-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file with replacement variable
|
||||
luaFile := filepath.Join(tmpDir, "multiply.lua")
|
||||
luaContent := `v1 = v1 * 2
|
||||
replacement = format("<value>%s</value>", v1)
|
||||
return true`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test content
|
||||
content := `<value>10</value>`
|
||||
|
||||
// Create command with external Lua reference
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Regex: `<value>(\d+)</value>`,
|
||||
Lua: "@" + filepath.Base(luaFile),
|
||||
SourceDir: tmpDir,
|
||||
}
|
||||
|
||||
// Process
|
||||
modifications, err := ProcessRegex(content, command, "test.xml")
|
||||
assert.NoError(t, err)
|
||||
assert.Greater(t, len(modifications), 0)
|
||||
|
||||
// Apply modifications
|
||||
result := content
|
||||
for _, mod := range modifications {
|
||||
result = result[:mod.From] + mod.With + result[mod.To:]
|
||||
}
|
||||
assert.Contains(t, result, "<value>20</value>")
|
||||
}
|
||||
|
||||
func TestProcessJSONWithExternalLuaFile(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-json-integration-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file
|
||||
luaFile := filepath.Join(tmpDir, "json_modify.lua")
|
||||
luaContent := `data.value = 84
|
||||
modified = true`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test JSON content
|
||||
content := `{"value": 42}`
|
||||
|
||||
// Create command with external Lua reference
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
JSON: true,
|
||||
Lua: "@" + filepath.Base(luaFile),
|
||||
SourceDir: tmpDir,
|
||||
}
|
||||
|
||||
// Process
|
||||
modifications, err := ProcessJSON(content, command, "test.json")
|
||||
assert.NoError(t, err)
|
||||
assert.Greater(t, len(modifications), 0)
|
||||
|
||||
// Apply modifications to verify
|
||||
result := content
|
||||
for _, mod := range modifications {
|
||||
result = result[:mod.From] + mod.With + result[mod.To:]
|
||||
}
|
||||
// Check that value was changed to 84 (formatting may vary)
|
||||
assert.Contains(t, result, `"value"`)
|
||||
assert.Contains(t, result, `84`)
|
||||
assert.NotContains(t, result, `"value": 42`)
|
||||
assert.NotContains(t, result, `"value":42`)
|
||||
}
|
||||
|
||||
func TestProcessXMLWithExternalLuaFile(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-xml-integration-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file for XML mode
|
||||
luaFile := filepath.Join(tmpDir, "xml_modify.lua")
|
||||
luaContent := `for _, child in ipairs(data.children) do
|
||||
if child.tag.val == "Item" then
|
||||
child.attr.Weight = tostring(tonumber(child.attr.Weight.val) * 2)
|
||||
end
|
||||
end
|
||||
modified = true`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test XML content
|
||||
content := `<Items><Item Weight="10" /></Items>`
|
||||
|
||||
// Create command with external Lua reference
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: "@" + filepath.Base(luaFile),
|
||||
SourceDir: tmpDir,
|
||||
}
|
||||
|
||||
// Process
|
||||
modifications, err := ProcessXML(content, command, "test.xml")
|
||||
assert.NoError(t, err)
|
||||
assert.Greater(t, len(modifications), 0)
|
||||
|
||||
// Apply modifications to verify
|
||||
result := content
|
||||
for _, mod := range modifications {
|
||||
result = result[:mod.From] + mod.With + result[mod.To:]
|
||||
}
|
||||
assert.Contains(t, result, `Weight="20"`)
|
||||
}
|
||||
|
||||
func TestExternalLuaFileWithVariables(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-vars-integration-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file with variable reference
|
||||
luaFile := filepath.Join(tmpDir, "with_vars.lua")
|
||||
luaContent := `v1 = v1 * $multiply
|
||||
replacement = format("<value>%s</value>", v1)
|
||||
return true`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Set global variable
|
||||
SetVariables(map[string]interface{}{"multiply": 1.5})
|
||||
defer SetVariables(map[string]interface{}{})
|
||||
|
||||
// Create test content
|
||||
content := `<value>10</value>`
|
||||
|
||||
// Create command with external Lua reference
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Regex: `<value>(\d+)</value>`,
|
||||
Lua: "@" + filepath.Base(luaFile),
|
||||
SourceDir: tmpDir,
|
||||
}
|
||||
|
||||
// Process
|
||||
modifications, err := ProcessRegex(content, command, "test.xml")
|
||||
assert.NoError(t, err)
|
||||
assert.Greater(t, len(modifications), 0)
|
||||
|
||||
// Apply modifications
|
||||
result := content
|
||||
for _, mod := range modifications {
|
||||
result = result[:mod.From] + mod.With + result[mod.To:]
|
||||
}
|
||||
assert.Contains(t, result, "<value>15</value>")
|
||||
}
|
||||
|
||||
func TestExternalLuaFileErrorHandling(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-error-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create command with non-existent external Lua file
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Regex: `<value>(\d+)</value>`,
|
||||
Lua: "@nonexistent.lua",
|
||||
SourceDir: tmpDir,
|
||||
}
|
||||
|
||||
// Process - the error script will be generated but execution will fail
|
||||
// ProcessRegex continues on Lua errors, so no modifications will be made
|
||||
content := `<value>10</value>`
|
||||
modifications, err := ProcessRegex(content, command, "test.xml")
|
||||
|
||||
// No error returned (ProcessRegex continues on Lua errors), but no modifications made
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, modifications)
|
||||
}
|
||||
224
processor/lua_external_test.go
Normal file
224
processor/lua_external_test.go
Normal file
@@ -0,0 +1,224 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestLoadExternalLuaFile(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file
|
||||
luaFile := filepath.Join(tmpDir, "test.lua")
|
||||
luaContent := `data.value = 42
|
||||
modified = true`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
luaPath string
|
||||
sourceDir string
|
||||
expected string
|
||||
wantError bool
|
||||
}{
|
||||
{
|
||||
name: "Relative path with sourceDir",
|
||||
luaPath: "test.lua",
|
||||
sourceDir: tmpDir,
|
||||
expected: luaContent,
|
||||
wantError: false,
|
||||
},
|
||||
{
|
||||
name: "Absolute path",
|
||||
luaPath: luaFile,
|
||||
sourceDir: "",
|
||||
expected: luaContent,
|
||||
wantError: false,
|
||||
},
|
||||
{
|
||||
name: "Relative path without sourceDir (uses CWD)",
|
||||
luaPath: filepath.Base(luaFile),
|
||||
sourceDir: "",
|
||||
expected: luaContent,
|
||||
wantError: false,
|
||||
},
|
||||
{
|
||||
name: "Nested relative path",
|
||||
luaPath: "scripts/test.lua",
|
||||
sourceDir: tmpDir,
|
||||
expected: "",
|
||||
wantError: true,
|
||||
},
|
||||
{
|
||||
name: "Non-existent file",
|
||||
luaPath: "nonexistent.lua",
|
||||
sourceDir: tmpDir,
|
||||
expected: "",
|
||||
wantError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Change to tmpDir for CWD-based tests
|
||||
if tt.sourceDir == "" && !filepath.IsAbs(tt.luaPath) {
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
}
|
||||
|
||||
result, err := LoadExternalLuaFile(tt.luaPath, tt.sourceDir)
|
||||
if tt.wantError {
|
||||
assert.Error(t, err)
|
||||
assert.Empty(t, result)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptWithExternalFile(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-build-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file
|
||||
luaFile := filepath.Join(tmpDir, "multiply.lua")
|
||||
luaContent := `v1 = v1 * 2`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test with relative path
|
||||
relativePath := filepath.Base(luaFile)
|
||||
result := BuildLuaScript("@"+relativePath, tmpDir)
|
||||
assert.Contains(t, result, "v1 = v1 * 2")
|
||||
assert.Contains(t, result, "function run()")
|
||||
|
||||
// Test with absolute path
|
||||
result = BuildLuaScript("@"+luaFile, "")
|
||||
assert.Contains(t, result, "v1 = v1 * 2")
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptWithExternalFileError(t *testing.T) {
|
||||
// Test that missing file returns error script
|
||||
result := BuildLuaScript("@nonexistent.lua", "/tmp")
|
||||
assert.Contains(t, result, "error(")
|
||||
assert.Contains(t, result, "Failed to load external Lua file")
|
||||
}
|
||||
|
||||
func TestBuildJSONLuaScriptWithExternalFile(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-json-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file
|
||||
luaFile := filepath.Join(tmpDir, "json_modify.lua")
|
||||
luaContent := `data.value = 84
|
||||
modified = true`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test with relative path
|
||||
relativePath := filepath.Base(luaFile)
|
||||
result := BuildJSONLuaScript("@"+relativePath, tmpDir)
|
||||
assert.Contains(t, result, "data.value = 84")
|
||||
assert.Contains(t, result, "modified = true")
|
||||
assert.Contains(t, result, "function run()")
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptWithInlineCode(t *testing.T) {
|
||||
// Test that inline code (without @) still works
|
||||
result := BuildLuaScript("v1 = v1 * 2", "")
|
||||
assert.Contains(t, result, "v1 = v1 * 2")
|
||||
assert.Contains(t, result, "function run()")
|
||||
assert.NotContains(t, result, "@")
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptExternalFileWithVariables(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-vars-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file with variable reference
|
||||
luaFile := filepath.Join(tmpDir, "with_vars.lua")
|
||||
luaContent := `v1 = v1 * $multiply`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Set a global variable
|
||||
SetVariables(map[string]interface{}{"multiply": 1.5})
|
||||
defer SetVariables(map[string]interface{}{})
|
||||
|
||||
// Test that variables are substituted in external files
|
||||
result := BuildLuaScript("@"+filepath.Base(luaFile), tmpDir)
|
||||
assert.Contains(t, result, "v1 = v1 * 1.5")
|
||||
assert.NotContains(t, result, "$multiply")
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptExternalFileNestedPath(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-nested-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create nested directory structure
|
||||
scriptsDir := filepath.Join(tmpDir, "scripts")
|
||||
err = os.MkdirAll(scriptsDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
luaFile := filepath.Join(scriptsDir, "test.lua")
|
||||
luaContent := `data.value = 100`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test with nested relative path
|
||||
result := BuildLuaScript("@scripts/test.lua", tmpDir)
|
||||
assert.Contains(t, result, "data.value = 100")
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptExternalFileWithPrependLuaAssignment(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-prepend-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file with operator prefix (should trigger prepend)
|
||||
luaFile := filepath.Join(tmpDir, "multiply.lua")
|
||||
luaContent := `* 2`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test that prepend still works with external files
|
||||
result := BuildLuaScript("@"+filepath.Base(luaFile), tmpDir)
|
||||
// PrependLuaAssignment adds "v1 = v1" + "* 2" = "v1 = v1* 2" (no space between v1 and *)
|
||||
assert.Contains(t, result, "v1 = v1* 2")
|
||||
}
|
||||
|
||||
func TestBuildLuaScriptExternalFilePreservesWhitespace(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "lua-external-whitespace-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test Lua file with multiline content
|
||||
luaFile := filepath.Join(tmpDir, "multiline.lua")
|
||||
luaContent := `if data.items then
|
||||
for i, item in ipairs(data.items) do
|
||||
item.value = item.value * 2
|
||||
end
|
||||
modified = true
|
||||
end`
|
||||
err = os.WriteFile(luaFile, []byte(luaContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test that whitespace and formatting is preserved
|
||||
result := BuildLuaScript("@"+filepath.Base(luaFile), tmpDir)
|
||||
assert.Contains(t, result, "if data.items then")
|
||||
assert.Contains(t, result, " for i, item in ipairs(data.items) do")
|
||||
assert.Contains(t, result, " item.value = item.value * 2")
|
||||
}
|
||||
218
processor/lua_runtime_test.go
Normal file
218
processor/lua_runtime_test.go
Normal file
@@ -0,0 +1,218 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// Test replaceVariables function
|
||||
func TestReplaceVariables(t *testing.T) {
|
||||
// Setup global variables
|
||||
globalVariables = map[string]interface{}{
|
||||
"multiplier": 2.5,
|
||||
"prefix": "TEST_",
|
||||
"enabled": true,
|
||||
"disabled": false,
|
||||
"count": 42,
|
||||
}
|
||||
defer func() {
|
||||
globalVariables = make(map[string]interface{})
|
||||
}()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Replace numeric variable",
|
||||
input: "v1 * $multiplier",
|
||||
expected: "v1 * 2.5",
|
||||
},
|
||||
{
|
||||
name: "Replace string variable",
|
||||
input: `s1 = $prefix .. "value"`,
|
||||
expected: `s1 = "TEST_" .. "value"`,
|
||||
},
|
||||
{
|
||||
name: "Replace boolean true",
|
||||
input: "enabled = $enabled",
|
||||
expected: "enabled = true",
|
||||
},
|
||||
{
|
||||
name: "Replace boolean false",
|
||||
input: "disabled = $disabled",
|
||||
expected: "disabled = false",
|
||||
},
|
||||
{
|
||||
name: "Replace integer",
|
||||
input: "count = $count",
|
||||
expected: "count = 42",
|
||||
},
|
||||
{
|
||||
name: "Multiple replacements",
|
||||
input: "$count * $multiplier",
|
||||
expected: "42 * 2.5",
|
||||
},
|
||||
{
|
||||
name: "No variables",
|
||||
input: "v1 * 2",
|
||||
expected: "v1 * 2",
|
||||
},
|
||||
{
|
||||
name: "Undefined variable",
|
||||
input: "v1 * $undefined",
|
||||
expected: "v1 * $undefined",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := replaceVariables(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test SetVariables with all type cases
|
||||
func TestSetVariablesAllTypes(t *testing.T) {
|
||||
vars := map[string]interface{}{
|
||||
"int_val": 42,
|
||||
"int64_val": int64(100),
|
||||
"float32_val": float32(3.14),
|
||||
"float64_val": 2.718,
|
||||
"bool_true": true,
|
||||
"bool_false": false,
|
||||
"string_val": "hello",
|
||||
}
|
||||
|
||||
SetVariables(vars)
|
||||
|
||||
// Create Lua state to verify
|
||||
L, err := NewLuaState()
|
||||
assert.NoError(t, err)
|
||||
defer L.Close()
|
||||
|
||||
// Verify int64
|
||||
int64Val := L.GetGlobal("int64_val")
|
||||
assert.Equal(t, lua.LTNumber, int64Val.Type())
|
||||
assert.Equal(t, 100.0, float64(int64Val.(lua.LNumber)))
|
||||
|
||||
// Verify float32
|
||||
float32Val := L.GetGlobal("float32_val")
|
||||
assert.Equal(t, lua.LTNumber, float32Val.Type())
|
||||
assert.InDelta(t, 3.14, float64(float32Val.(lua.LNumber)), 0.01)
|
||||
|
||||
// Verify bool true
|
||||
boolTrue := L.GetGlobal("bool_true")
|
||||
assert.Equal(t, lua.LTBool, boolTrue.Type())
|
||||
assert.True(t, bool(boolTrue.(lua.LBool)))
|
||||
|
||||
// Verify bool false
|
||||
boolFalse := L.GetGlobal("bool_false")
|
||||
assert.Equal(t, lua.LTBool, boolFalse.Type())
|
||||
assert.False(t, bool(boolFalse.(lua.LBool)))
|
||||
|
||||
// Verify string
|
||||
stringVal := L.GetGlobal("string_val")
|
||||
assert.Equal(t, lua.LTString, stringVal.Type())
|
||||
assert.Equal(t, "hello", string(stringVal.(lua.LString)))
|
||||
}
|
||||
|
||||
// Test HTTP fetch with test server
|
||||
func TestFetchWithTestServerGetRequest(t *testing.T) {
|
||||
// Create test HTTP server
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Verify request
|
||||
assert.Equal(t, "GET", r.Method)
|
||||
|
||||
// Send response
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(`{"status": "success"}`))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
// Test fetch
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
|
||||
script := `
|
||||
response = fetch("` + server.URL + `")
|
||||
assert(response ~= nil, "Expected response")
|
||||
assert(response.ok == true, "Expected ok to be true")
|
||||
assert(response.status == 200, "Expected status 200")
|
||||
assert(response.body == '{"status": "success"}', "Expected correct body")
|
||||
`
|
||||
|
||||
err := L.DoString(script)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFetchWithTestServerPostRequest(t *testing.T) {
|
||||
// Create test HTTP server
|
||||
receivedBody := ""
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
assert.Equal(t, "POST", r.Method)
|
||||
assert.Equal(t, "application/json", r.Header.Get("Content-Type"))
|
||||
|
||||
// Read body
|
||||
buf := make([]byte, 1024)
|
||||
n, _ := r.Body.Read(buf)
|
||||
receivedBody = string(buf[:n])
|
||||
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
w.Write([]byte(`{"created": true}`))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
|
||||
script := `
|
||||
local opts = {
|
||||
method = "POST",
|
||||
headers = {["Content-Type"] = "application/json"},
|
||||
body = '{"test": "data"}'
|
||||
}
|
||||
response = fetch("` + server.URL + `", opts)
|
||||
assert(response ~= nil, "Expected response")
|
||||
assert(response.ok == true, "Expected ok to be true")
|
||||
assert(response.status == 201, "Expected status 201")
|
||||
`
|
||||
|
||||
err := L.DoString(script)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, `{"test": "data"}`, receivedBody)
|
||||
}
|
||||
|
||||
func TestFetchWithTestServerReturnsNotFound(t *testing.T) {
|
||||
// Create test HTTP server that returns 404
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
w.Write([]byte(`{"error": "not found"}`))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
|
||||
script := `
|
||||
response = fetch("` + server.URL + `")
|
||||
assert(response ~= nil, "Expected response")
|
||||
assert(response.ok == false, "Expected ok to be false for 404")
|
||||
assert(response.status == 404, "Expected status 404")
|
||||
`
|
||||
|
||||
err := L.DoString(script)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
43
processor/luahelper-test-regress.lua
Normal file
43
processor/luahelper-test-regress.lua
Normal file
@@ -0,0 +1,43 @@
|
||||
-- Load the helper script
|
||||
dofile("luahelper.lua")
|
||||
|
||||
-- Test helper function
|
||||
local function assert(condition, message)
|
||||
if not condition then error("ASSERTION FAILED: " .. (message or "unknown error")) end
|
||||
end
|
||||
|
||||
local function test(name, fn)
|
||||
local ok, err = pcall(fn)
|
||||
if ok then
|
||||
print("PASS: " .. name)
|
||||
else
|
||||
print("FAIL: " .. name .. " - " .. tostring(err))
|
||||
end
|
||||
end
|
||||
|
||||
test("regression test 001", function()
|
||||
local csv =
|
||||
[[Id Enabled ModuleId DepartmentId IsDepartment PositionInGraph Parents Modifiers UpgradePrice
|
||||
news_department TRUE navigation TRUE 2 0 NewsAnalyticsDepartment + 1 communication_relay communication_relay
|
||||
nd_charge_bonus TRUE navigation news_department FALSE 1 0 news_department NDSkillChargeBonus + 1 expert_disk expert_disk
|
||||
nd_cooldown_time_reduce TRUE navigation news_department FALSE 3 0 news_department NDCooldownTimeReduce - 2 communication_relay communication_relay]]
|
||||
local rows, err = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||
if err then error("fromCSV error: " .. err) end
|
||||
assert(#rows == 3, "Should have 3 rows")
|
||||
assert(rows[1].Id == "news_department", "First row Id should be 'news_department'")
|
||||
assert(rows[1].Enabled == "TRUE", "First row Enabled should be 'TRUE'")
|
||||
assert(rows[1].ModuleId == "navigation", "First row ModuleId should be 'navigation'")
|
||||
assert(rows[1].DepartmentId == "", "First row DepartmentId should be ''")
|
||||
assert(rows[1].IsDepartment == "TRUE", "First row IsDepartment should be 'TRUE'")
|
||||
assert(rows.Headers[1] == "Id", "First row Headers should be 'Id'")
|
||||
assert(rows.Headers[2] == "Enabled", "First row Headers should be 'Enabled'")
|
||||
assert(rows.Headers[3] == "ModuleId", "First row Headers should be 'ModuleId'")
|
||||
assert(rows.Headers[4] == "DepartmentId", "First row Headers should be 'DepartmentId'")
|
||||
assert(rows.Headers[5] == "IsDepartment", "First row Headers should be 'IsDepartment'")
|
||||
assert(rows.Headers[6] == "PositionInGraph", "First row Headers should be 'PositionInGraph'")
|
||||
assert(rows.Headers[7] == "Parents", "First row Headers should be 'Parents'")
|
||||
assert(rows.Headers[8] == "Modifiers", "First row Headers should be 'Modifiers'")
|
||||
assert(rows.Headers[9] == "UpgradePrice", "First row Headers should be 'UpgradePrice'")
|
||||
end)
|
||||
|
||||
print("\nAll tests completed!")
|
||||
224
processor/luahelper-test-xml.lua
Normal file
224
processor/luahelper-test-xml.lua
Normal file
@@ -0,0 +1,224 @@
|
||||
-- Load the helper script
|
||||
dofile("luahelper.lua")
|
||||
|
||||
-- Test helper function
|
||||
local function assert(condition, message)
|
||||
if not condition then error("ASSERTION FAILED: " .. (message or "unknown error")) end
|
||||
end
|
||||
|
||||
local function test(name, fn)
|
||||
local ok, err = pcall(fn)
|
||||
if ok then
|
||||
print("PASS: " .. name)
|
||||
else
|
||||
print("FAIL: " .. name .. " - " .. tostring(err))
|
||||
end
|
||||
end
|
||||
|
||||
-- Test findElements
|
||||
test("findElements finds all matching elements recursively", function()
|
||||
local testXML = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item", _attr = { name = "sword" } },
|
||||
{ _tag = "item", _attr = { name = "shield" } },
|
||||
{
|
||||
_tag = "container",
|
||||
_children = {
|
||||
{ _tag = "item", _attr = { name = "potion" } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
local items = findElements(testXML, "item")
|
||||
assert(#items == 3, "Should find 3 items total (recursive)")
|
||||
assert(items[1]._attr.name == "sword", "First item should be sword")
|
||||
assert(items[3]._attr.name == "potion", "Third item should be potion (from nested)")
|
||||
end)
|
||||
|
||||
-- Test getNumAttr and setNumAttr
|
||||
test("getNumAttr gets numeric attribute", function()
|
||||
local elem = { _tag = "item", _attr = { damage = "10" } }
|
||||
local damage = getNumAttr(elem, "damage")
|
||||
assert(damage == 10, "Should get damage as number")
|
||||
end)
|
||||
|
||||
test("getNumAttr returns nil for missing attribute", function()
|
||||
local elem = { _tag = "item", _attr = {} }
|
||||
local damage = getNumAttr(elem, "damage")
|
||||
assert(damage == nil, "Should return nil for missing attribute")
|
||||
end)
|
||||
|
||||
test("setNumAttr sets numeric attribute", function()
|
||||
local elem = { _tag = "item", _attr = {} }
|
||||
setNumAttr(elem, "damage", 20)
|
||||
assert(elem._attr.damage == "20", "Should set damage as string")
|
||||
end)
|
||||
|
||||
-- Test modifyNumAttr
|
||||
test("modifyNumAttr modifies numeric attribute", function()
|
||||
local elem = { _tag = "item", _attr = { weight = "5.5" } }
|
||||
local modified = modifyNumAttr(elem, "weight", function(val) return val * 2 end)
|
||||
assert(modified == true, "Should return true when modified")
|
||||
assert(elem._attr.weight == "11.0", "Should double weight")
|
||||
end)
|
||||
|
||||
test("modifyNumAttr returns false for missing attribute", function()
|
||||
local elem = { _tag = "item", _attr = {} }
|
||||
local modified = modifyNumAttr(elem, "weight", function(val) return val * 2 end)
|
||||
assert(modified == false, "Should return false when attribute missing")
|
||||
end)
|
||||
|
||||
-- Test filterElements
|
||||
test("filterElements filters by predicate", function()
|
||||
local testXML = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item", _attr = { healing = "20" } },
|
||||
{ _tag = "item", _attr = { damage = "10" } },
|
||||
{ _tag = "item", _attr = { healing = "50" } },
|
||||
},
|
||||
}
|
||||
local healingItems = filterElements(testXML, function(elem) return hasAttr(elem, "healing") end)
|
||||
assert(#healingItems == 2, "Should find 2 healing items")
|
||||
end)
|
||||
|
||||
-- Test visitElements
|
||||
test("visitElements visits all elements", function()
|
||||
local testXML = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item" },
|
||||
{ _tag = "container", _children = {
|
||||
{ _tag = "item" },
|
||||
} },
|
||||
},
|
||||
}
|
||||
local count = 0
|
||||
visitElements(testXML, function(elem) count = count + 1 end)
|
||||
assert(count == 4, "Should visit 4 elements (root + 2 items + container)")
|
||||
end)
|
||||
|
||||
-- Test getText and setText
|
||||
test("getText gets text content", function()
|
||||
local elem = { _tag = "item", _text = "Iron Sword" }
|
||||
local text = getText(elem)
|
||||
assert(text == "Iron Sword", "Should get text content")
|
||||
end)
|
||||
|
||||
test("setText sets text content", function()
|
||||
local elem = { _tag = "item" }
|
||||
setText(elem, "New Text")
|
||||
assert(elem._text == "New Text", "Should set text content")
|
||||
end)
|
||||
|
||||
-- Test hasAttr and getAttr
|
||||
test("hasAttr checks attribute existence", function()
|
||||
local elem = { _tag = "item", _attr = { damage = "10" } }
|
||||
assert(hasAttr(elem, "damage") == true, "Should have damage")
|
||||
assert(hasAttr(elem, "magic") == false, "Should not have magic")
|
||||
end)
|
||||
|
||||
test("getAttr gets attribute value", function()
|
||||
local elem = { _tag = "item", _attr = { name = "sword" } }
|
||||
assert(getAttr(elem, "name") == "sword", "Should get name attribute")
|
||||
assert(getAttr(elem, "missing") == nil, "Should return nil for missing")
|
||||
end)
|
||||
|
||||
test("setAttr sets attribute value", function()
|
||||
local elem = { _tag = "item" }
|
||||
setAttr(elem, "name", "sword")
|
||||
assert(elem._attr.name == "sword", "Should set attribute")
|
||||
end)
|
||||
|
||||
-- Test findFirstElement
|
||||
test("findFirstElement finds first direct child", function()
|
||||
local parent = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item", _attr = { id = "1" } },
|
||||
{ _tag = "item", _attr = { id = "2" } },
|
||||
},
|
||||
}
|
||||
local first = findFirstElement(parent, "item")
|
||||
assert(first._attr.id == "1", "Should find first item")
|
||||
end)
|
||||
|
||||
test("findFirstElement returns nil when not found", function()
|
||||
local parent = { _tag = "root", _children = {} }
|
||||
local result = findFirstElement(parent, "item")
|
||||
assert(result == nil, "Should return nil when not found")
|
||||
end)
|
||||
|
||||
-- Test getChildren
|
||||
test("getChildren gets all direct children with tag", function()
|
||||
local parent = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item", _attr = { id = "1" } },
|
||||
{ _tag = "config" },
|
||||
{ _tag = "item", _attr = { id = "2" } },
|
||||
},
|
||||
}
|
||||
local items = getChildren(parent, "item")
|
||||
assert(#items == 2, "Should get 2 items")
|
||||
assert(items[1]._attr.id == "1", "First should have id=1")
|
||||
assert(items[2]._attr.id == "2", "Second should have id=2")
|
||||
end)
|
||||
|
||||
-- Test countChildren
|
||||
test("countChildren counts direct children with tag", function()
|
||||
local parent = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item" },
|
||||
{ _tag = "config" },
|
||||
{ _tag = "item" },
|
||||
},
|
||||
}
|
||||
assert(countChildren(parent, "item") == 2, "Should count 2 items")
|
||||
assert(countChildren(parent, "config") == 1, "Should count 1 config")
|
||||
end)
|
||||
|
||||
-- Test addChild
|
||||
test("addChild adds child element", function()
|
||||
local parent = { _tag = "root", _children = {} }
|
||||
addChild(parent, { _tag = "item" })
|
||||
assert(#parent._children == 1, "Should have 1 child")
|
||||
assert(parent._children[1]._tag == "item", "Child should be item")
|
||||
end)
|
||||
|
||||
test("addChild creates children array if needed", function()
|
||||
local parent = { _tag = "root" }
|
||||
addChild(parent, { _tag = "item" })
|
||||
assert(parent._children ~= nil, "Should create _children")
|
||||
assert(#parent._children == 1, "Should have 1 child")
|
||||
end)
|
||||
|
||||
-- Test removeChildren
|
||||
test("removeChildren removes all matching children", function()
|
||||
local parent = {
|
||||
_tag = "root",
|
||||
_children = {
|
||||
{ _tag = "item" },
|
||||
{ _tag = "config" },
|
||||
{ _tag = "item" },
|
||||
},
|
||||
}
|
||||
local removed = removeChildren(parent, "item")
|
||||
assert(removed == 2, "Should remove 2 items")
|
||||
assert(#parent._children == 1, "Should have 1 child left")
|
||||
assert(parent._children[1]._tag == "config", "Remaining should be config")
|
||||
end)
|
||||
|
||||
test("removeChildren returns 0 when none found", function()
|
||||
local parent = {
|
||||
_tag = "root",
|
||||
_children = { { _tag = "item" } },
|
||||
}
|
||||
local removed = removeChildren(parent, "config")
|
||||
assert(removed == 0, "Should remove 0")
|
||||
assert(#parent._children == 1, "Should still have 1 child")
|
||||
end)
|
||||
|
||||
print("\nAll tests completed!")
|
||||
534
processor/luahelper-test.lua
Normal file
534
processor/luahelper-test.lua
Normal file
@@ -0,0 +1,534 @@
|
||||
-- Load the helper script
|
||||
dofile("luahelper.lua")
|
||||
|
||||
-- Test helper function
|
||||
local function assert(condition, message)
|
||||
if not condition then error("ASSERTION FAILED: " .. (message or "unknown error")) end
|
||||
end
|
||||
|
||||
local function test(name, fn)
|
||||
local ok, err = pcall(fn)
|
||||
if ok then
|
||||
print("PASS: " .. name)
|
||||
else
|
||||
print("FAIL: " .. name .. " - " .. tostring(err))
|
||||
end
|
||||
end
|
||||
|
||||
-- Test fromCSV option validation
|
||||
test("fromCSV invalid option", function()
|
||||
local csv = "a,b,c\n1,2,3"
|
||||
local ok, errMsg = pcall(function() fromCSV(csv, { invalidOption = true }) end)
|
||||
assert(ok == false, "Should raise error")
|
||||
assert(string.find(errMsg, "unknown option"), "Error should mention unknown option")
|
||||
end)
|
||||
|
||||
-- Test toCSV invalid delimiter
|
||||
test("toCSV invalid delimiter", function()
|
||||
local rows = { { "a", "b", "c" } }
|
||||
local csv = toCSV(rows, { delimiter = 123 })
|
||||
-- toCSV converts delimiter to string, so 123 becomes "123"
|
||||
assert(csv == "a123b123c", "Should convert delimiter to string")
|
||||
end)
|
||||
|
||||
-- Test fromCSV basic parsing
|
||||
test("fromCSV basic", function()
|
||||
local csv = "a,b,c\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(csv)
|
||||
assert(#rows == 3, "Should have 3 rows")
|
||||
assert(rows[1][1] == "a", "First row first field should be 'a'")
|
||||
assert(rows[2][2] == "2", "Second row second field should be '2'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with headers
|
||||
test("fromCSV with headers", function()
|
||||
local csv = "foo,bar,baz\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(csv, { hasheader = true })
|
||||
assert(#rows == 2, "Should have 2 data rows")
|
||||
assert(rows[1][1] == "1", "First row first field should be '1'")
|
||||
assert(rows[1].foo == "1", "First row foo should be '1'")
|
||||
assert(rows[1].bar == "2", "First row bar should be '2'")
|
||||
assert(rows[1].baz == "3", "First row baz should be '3'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with custom delimiter
|
||||
test("fromCSV with tab delimiter", function()
|
||||
local csv = "a\tb\tc\n1\t2\t3"
|
||||
local rows = fromCSV(csv, { delimiter = "\t" })
|
||||
assert(#rows == 2, "Should have 2 rows")
|
||||
assert(rows[1][1] == "a", "First row first field should be 'a'")
|
||||
assert(rows[2][2] == "2", "Second row second field should be '2'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with quoted fields
|
||||
test("fromCSV with quoted fields", function()
|
||||
local csv = '"hello,world","test"\n"foo","bar"'
|
||||
local rows = fromCSV(csv)
|
||||
assert(#rows == 2, "Should have 2 rows")
|
||||
assert(rows[1][1] == "hello,world", "Quoted field with comma should be preserved")
|
||||
assert(rows[1][2] == "test", "Second field should be 'test'")
|
||||
end)
|
||||
|
||||
-- Test toCSV basic
|
||||
test("toCSV basic", function()
|
||||
local rows = { { "a", "b", "c" }, { "1", "2", "3" } }
|
||||
local csv = toCSV(rows)
|
||||
assert(csv == "a,b,c\n1,2,3", "CSV output should match expected")
|
||||
end)
|
||||
|
||||
-- Test toCSV with custom delimiter
|
||||
test("toCSV with tab delimiter", function()
|
||||
local rows = { { "a", "b", "c" }, { "1", "2", "3" } }
|
||||
local csv = toCSV(rows, { delimiter = "\t" })
|
||||
assert(csv == "a\tb\tc\n1\t2\t3", "TSV output should match expected")
|
||||
end)
|
||||
|
||||
-- Test toCSV with fields needing quoting
|
||||
test("toCSV with quoted fields", function()
|
||||
local rows = { { "hello,world", "test" }, { "foo", "bar" } }
|
||||
local csv = toCSV(rows)
|
||||
assert(csv == '"hello,world",test\nfoo,bar', "Fields with commas should be quoted")
|
||||
end)
|
||||
|
||||
-- Test round trip
|
||||
test("fromCSV toCSV round trip", function()
|
||||
local original = "a,b,c\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(original)
|
||||
local csv = toCSV(rows)
|
||||
assert(csv == original, "Round trip should preserve original")
|
||||
end)
|
||||
|
||||
-- Test round trip with headers
|
||||
test("fromCSV toCSV round trip with headers", function()
|
||||
local original = "foo,bar,baz\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(original, { hasheader = true })
|
||||
local csv = toCSV(rows)
|
||||
local expected = "1,2,3\n4,5,6"
|
||||
assert(csv == expected, "Round trip with headers should preserve data rows")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comments
|
||||
test("fromCSV with comments", function()
|
||||
local csv = "# This is a comment\nfoo,bar,baz\n1,2,3\n# Another comment\n4,5,6"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 3, "Should have 3 rows (comments filtered, header + 2 data rows)")
|
||||
assert(rows[1][1] == "foo", "First row should be header row")
|
||||
assert(rows[2][1] == "1", "Second row first field should be '1'")
|
||||
assert(rows[3][1] == "4", "Third row first field should be '4'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comments and headers
|
||||
test("fromCSV with comments and headers", function()
|
||||
local csv = "#mercenary_profiles\nId,Name,Value\n1,Test,100\n# End of data\n2,Test2,200"
|
||||
local rows = fromCSV(csv, { hasheader = true, hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 data rows")
|
||||
assert(rows[1].Id == "1", "First row Id should be '1'")
|
||||
assert(rows[1].Name == "Test", "First row Name should be 'Test'")
|
||||
assert(rows[1].Value == "100", "First row Value should be '100'")
|
||||
assert(rows[2].Id == "2", "Second row Id should be '2'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comments disabled
|
||||
test("fromCSV without comments", function()
|
||||
local csv = "# This should not be filtered\nfoo,bar\n1,2"
|
||||
local rows = fromCSV(csv, { hascomments = false })
|
||||
assert(#rows == 3, "Should have 3 rows (including comment)")
|
||||
assert(rows[1][1] == "# This should not be filtered", "Comment line should be preserved")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment at start
|
||||
test("fromCSV comment at start", function()
|
||||
local csv = "# Header comment\nId,Name\n1,Test"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows (comment filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment with leading whitespace
|
||||
test("fromCSV comment with whitespace", function()
|
||||
local csv = " # Comment with spaces\nId,Name\n1,Test"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows (comment with spaces filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment with tabs
|
||||
test("fromCSV comment with tabs", function()
|
||||
local csv = "\t# Comment with tab\nId,Name\n1,Test"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows (comment with tab filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with multiple consecutive comments
|
||||
test("fromCSV multiple consecutive comments", function()
|
||||
local csv = "# First comment\n# Second comment\n# Third comment\nId,Name\n1,Test"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows (all comments filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment in middle of data
|
||||
test("fromCSV comment in middle", function()
|
||||
local csv = "Id,Name\n1,Test\n# Middle comment\n2,Test2"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 3, "Should have 3 rows (comment filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
assert(rows[2][1] == "1", "Second row should be first data")
|
||||
assert(rows[3][1] == "2", "Third row should be second data")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment at end
|
||||
test("fromCSV comment at end", function()
|
||||
local csv = "Id,Name\n1,Test\n# End comment"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows (end comment filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
assert(rows[2][1] == "1", "Second row should be data")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with empty comment line
|
||||
test("fromCSV empty comment", function()
|
||||
local csv = "#\nId,Name\n1,Test"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows (empty comment filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment and headers
|
||||
test("fromCSV comment with headers enabled", function()
|
||||
local csv = "#mercenary_profiles\nId,Name,Value\n1,Test,100\n2,Test2,200"
|
||||
local rows = fromCSV(csv, { hasheader = true, hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 data rows")
|
||||
assert(rows[1].Id == "1", "First row Id should be '1'")
|
||||
assert(rows[1].Name == "Test", "First row Name should be 'Test'")
|
||||
assert(rows[2].Id == "2", "Second row Id should be '2'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment and TSV delimiter
|
||||
test("fromCSV comment with tab delimiter", function()
|
||||
local csv = "# Comment\nId\tName\n1\tTest"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 rows")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
assert(rows[2][1] == "1", "Second row first field should be '1'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment and headers and TSV
|
||||
test("fromCSV comment with headers and TSV", function()
|
||||
local csv = "#mercenary_profiles\nId\tName\tValue\n1\tTest\t100"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||
assert(#rows == 1, "Should have 1 data row")
|
||||
assert(rows[1].Id == "1", "Row Id should be '1'")
|
||||
assert(rows[1].Name == "Test", "Row Name should be 'Test'")
|
||||
assert(rows[1].Value == "100", "Row Value should be '100'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with data field starting with # (not a comment)
|
||||
test("fromCSV data field starting with hash", function()
|
||||
local csv = "Id,Name\n1,#NotAComment\n2,Test"
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 3, "Should have 3 rows (data with # not filtered)")
|
||||
assert(rows[1][1] == "Id", "First row should be header")
|
||||
assert(rows[2][2] == "#NotAComment", "Second row should have #NotAComment as data")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with quoted field starting with #
|
||||
test("fromCSV quoted field with hash", function()
|
||||
local csv = 'Id,Name\n1,"#NotAComment"\n2,Test'
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 3, "Should have 3 rows (quoted # not filtered)")
|
||||
assert(rows[2][2] == "#NotAComment", "Quoted field with # should be preserved")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with comment after quoted field
|
||||
test("fromCSV comment after quoted field", function()
|
||||
local csv = 'Id,Name\n1,"Test"\n# This is a comment\n2,Test2'
|
||||
local rows = fromCSV(csv, { hascomments = true })
|
||||
assert(#rows == 3, "Should have 3 rows (comment filtered)")
|
||||
assert(rows[2][2] == "Test", "Quoted field should be preserved")
|
||||
assert(rows[3][1] == "2", "Third row should be second data row")
|
||||
end)
|
||||
|
||||
-- Math function tests
|
||||
test("min function", function()
|
||||
assert(min(5, 3) == 3, "min(5, 3) should be 3")
|
||||
assert(min(-1, 0) == -1, "min(-1, 0) should be -1")
|
||||
assert(min(10, 10) == 10, "min(10, 10) should be 10")
|
||||
end)
|
||||
|
||||
test("max function", function()
|
||||
assert(max(5, 3) == 5, "max(5, 3) should be 5")
|
||||
assert(max(-1, 0) == 0, "max(-1, 0) should be 0")
|
||||
assert(max(10, 10) == 10, "max(10, 10) should be 10")
|
||||
end)
|
||||
|
||||
test("round function", function()
|
||||
assert(round(3.14159) == 3, "round(3.14159) should be 3")
|
||||
assert(round(3.14159, 2) == 3.14, "round(3.14159, 2) should be 3.14")
|
||||
assert(round(3.5) == 4, "round(3.5) should be 4")
|
||||
assert(round(3.4) == 3, "round(3.4) should be 3")
|
||||
assert(round(123.456, 1) == 123.5, "round(123.456, 1) should be 123.5")
|
||||
end)
|
||||
|
||||
test("floor function", function()
|
||||
assert(floor(3.7) == 3, "floor(3.7) should be 3")
|
||||
assert(floor(-3.7) == -4, "floor(-3.7) should be -4")
|
||||
assert(floor(5) == 5, "floor(5) should be 5")
|
||||
end)
|
||||
|
||||
test("ceil function", function()
|
||||
assert(ceil(3.2) == 4, "ceil(3.2) should be 4")
|
||||
assert(ceil(-3.2) == -3, "ceil(-3.2) should be -3")
|
||||
assert(ceil(5) == 5, "ceil(5) should be 5")
|
||||
end)
|
||||
|
||||
-- String function tests
|
||||
test("upper function", function()
|
||||
assert(upper("hello") == "HELLO", "upper('hello') should be 'HELLO'")
|
||||
assert(upper("Hello World") == "HELLO WORLD", "upper('Hello World') should be 'HELLO WORLD'")
|
||||
assert(upper("123abc") == "123ABC", "upper('123abc') should be '123ABC'")
|
||||
end)
|
||||
|
||||
test("lower function", function()
|
||||
assert(lower("HELLO") == "hello", "lower('HELLO') should be 'hello'")
|
||||
assert(lower("Hello World") == "hello world", "lower('Hello World') should be 'hello world'")
|
||||
assert(lower("123ABC") == "123abc", "lower('123ABC') should be '123abc'")
|
||||
end)
|
||||
|
||||
test("format function", function()
|
||||
assert(format("Hello %s", "World") == "Hello World", "format should work")
|
||||
assert(format("Number: %d", 42) == "Number: 42", "format with number should work")
|
||||
assert(format("%.2f", 3.14159) == "3.14", "format with float should work")
|
||||
end)
|
||||
|
||||
test("trim function", function()
|
||||
assert(trim(" hello ") == "hello", "trim should remove leading and trailing spaces")
|
||||
assert(trim(" hello world ") == "hello world", "trim should preserve internal spaces")
|
||||
assert(trim("hello") == "hello", "trim should not affect strings without spaces")
|
||||
assert(trim(" ") == "", "trim should handle all spaces")
|
||||
end)
|
||||
|
||||
test("strsplit function", function()
|
||||
local result = strsplit("a,b,c", ",")
|
||||
assert(#result == 3, "strsplit should return 3 elements")
|
||||
assert(result[1] == "a", "First element should be 'a'")
|
||||
assert(result[2] == "b", "Second element should be 'b'")
|
||||
assert(result[3] == "c", "Third element should be 'c'")
|
||||
end)
|
||||
|
||||
test("strsplit with default separator", function()
|
||||
local result = strsplit("a b c")
|
||||
assert(#result == 3, "strsplit with default should return 3 elements")
|
||||
assert(result[1] == "a", "First element should be 'a'")
|
||||
assert(result[2] == "b", "Second element should be 'b'")
|
||||
assert(result[3] == "c", "Third element should be 'c'")
|
||||
end)
|
||||
|
||||
test("strsplit with custom separator", function()
|
||||
local result = strsplit("a|b|c", "|")
|
||||
assert(#result == 3, "strsplit with pipe should return 3 elements")
|
||||
assert(result[1] == "a", "First element should be 'a'")
|
||||
assert(result[2] == "b", "Second element should be 'b'")
|
||||
assert(result[3] == "c", "Third element should be 'c'")
|
||||
end)
|
||||
|
||||
-- Conversion function tests
|
||||
test("num function", function()
|
||||
assert(num("123") == 123, "num('123') should be 123")
|
||||
assert(num("45.67") == 45.67, "num('45.67') should be 45.67")
|
||||
assert(num("invalid") == 0, "num('invalid') should be 0")
|
||||
assert(num("") == 0, "num('') should be 0")
|
||||
end)
|
||||
|
||||
test("str function", function()
|
||||
assert(str(123) == "123", "str(123) should be '123'")
|
||||
assert(str(45.67) == "45.67", "str(45.67) should be '45.67'")
|
||||
assert(str(0) == "0", "str(0) should be '0'")
|
||||
end)
|
||||
|
||||
test("is_number function", function()
|
||||
assert(is_number("123") == true, "is_number('123') should be true")
|
||||
assert(is_number("45.67") == true, "is_number('45.67') should be true")
|
||||
assert(is_number("invalid") == false, "is_number('invalid') should be false")
|
||||
assert(is_number("") == false, "is_number('') should be false")
|
||||
assert(is_number("123abc") == false, "is_number('123abc') should be false")
|
||||
end)
|
||||
|
||||
-- Table function tests
|
||||
test("isArray function", function()
|
||||
assert(isArray({ 1, 2, 3 }) == true, "isArray should return true for sequential array")
|
||||
assert(isArray({ "a", "b", "c" }) == true, "isArray should return true for string array")
|
||||
assert(isArray({}) == true, "isArray should return true for empty array")
|
||||
assert(isArray({ a = 1, b = 2 }) == false, "isArray should return false for map")
|
||||
assert(isArray({ 1, 2, [4] = 4 }) == false, "isArray should return false for sparse array")
|
||||
assert(
|
||||
isArray({ [1] = 1, [2] = 2, [3] = 3 }) == true,
|
||||
"isArray should return true for 1-indexed array"
|
||||
)
|
||||
assert(
|
||||
isArray({ [0] = 1, [1] = 2 }) == false,
|
||||
"isArray should return false for 0-indexed array"
|
||||
)
|
||||
assert(
|
||||
isArray({ [1] = 1, [2] = 2, [4] = 4 }) == false,
|
||||
"isArray should return false for non-sequential array"
|
||||
)
|
||||
assert(isArray("not a table") == false, "isArray should return false for non-table")
|
||||
assert(isArray(123) == false, "isArray should return false for number")
|
||||
end)
|
||||
|
||||
test("fromCSV assigns header keys correctly", function()
|
||||
local teststr = [[
|
||||
#mercenary_profiles
|
||||
Id ModifyStartCost ModifyStep ModifyLevelLimit Health ResistSheet WoundSlots MeleeDamage MeleeAccuracy RangeAccuracy ReceiveAmputationChance ReceiveWoundChanceMult AttackWoundChanceMult Dodge Los StarvationLimit PainThresholdLimit PainThresholdRegen TalentPerkId ActorId SkinIndex HairType HairColorHex VoiceBank Immunity CreatureClass
|
||||
john_hawkwood_boss 20 0.1 140 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.60 critchance 0.05 0.5 0.5 0.03 0.5 1.2 0.3 8 2200 16 2 talent_the_man_who_sold_the_world human_male 0 hair1 #633D08 player Human
|
||||
francis_reid_daly 20 0.1 130 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.5 0.4 0.04 0.9 1 0.3 8 2000 10 1 talent_weapon_durability human_male 0 player Human
|
||||
]]
|
||||
local rows = fromCSV(teststr, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 data rows")
|
||||
|
||||
-- Test first row
|
||||
assert(rows[1].Id == "john_hawkwood_boss", "First row Id should be 'john_hawkwood_boss'")
|
||||
assert(rows[1].ModifyStartCost == "20", "First row ModifyStartCost should be '20'")
|
||||
assert(rows[1].ModifyStep == "0.1", "First row ModifyStep should be '0.1'")
|
||||
assert(rows[1].Health == "140", "First row Health should be '140'")
|
||||
assert(rows[1].ActorId == "human_male", "First row ActorId should be 'human_male'")
|
||||
assert(rows[1].HairColorHex == "#633D08", "First row HairColorHex should be '#633D08'")
|
||||
|
||||
-- Test second row
|
||||
assert(rows[2].Id == "francis_reid_daly", "Second row Id should be 'francis_reid_daly'")
|
||||
assert(rows[2].ModifyStartCost == "20", "Second row ModifyStartCost should be '20'")
|
||||
assert(rows[2].ModifyStep == "0.1", "Second row ModifyStep should be '0.1'")
|
||||
assert(rows[2].Health == "130", "Second row Health should be '130'")
|
||||
assert(rows[2].ActorId == "human_male", "Second row ActorId should be 'human_male'")
|
||||
|
||||
-- Test that numeric indices still work
|
||||
assert(rows[1][1] == "john_hawkwood_boss", "First row first field by index should work")
|
||||
assert(rows[1][2] == "20", "First row second field by index should work")
|
||||
end)
|
||||
|
||||
test("fromCSV debug header assignment", function()
|
||||
local csv = "Id Name Value\n1 Test 100\n2 Test2 200"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||
assert(rows[1].Id == "1", "Id should be '1'")
|
||||
assert(rows[1].Name == "Test", "Name should be 'Test'")
|
||||
assert(rows[1].Value == "100", "Value should be '100'")
|
||||
end)
|
||||
|
||||
test("fromCSV real world mercenary file format", function()
|
||||
local csv = [[#mercenary_profiles
|
||||
Id ModifyStartCost ModifyStep ModifyLevelLimit Health ResistSheet WoundSlots MeleeDamage MeleeAccuracy RangeAccuracy ReceiveAmputationChance ReceiveWoundChanceMult AttackWoundChanceMult Dodge Los StarvationLimit PainThresholdLimit PainThresholdRegen TalentPerkId ActorId SkinIndex HairType HairColorHex VoiceBank Immunity CreatureClass
|
||||
john_hawkwood_boss 20 0.1 140 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.60 critchance 0.05 0.5 0.5 0.03 0.5 1.2 0.3 8 2200 16 2 talent_the_man_who_sold_the_world human_male 0 hair1 #633D08 player Human
|
||||
francis_reid_daly 20 0.1 130 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.5 0.4 0.04 0.9 1 0.3 8 2000 10 1 talent_weapon_durability human_male 0 player Human
|
||||
]]
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||
assert(#rows == 2, "Should have 2 data rows")
|
||||
|
||||
assert(rows[1].Id == "john_hawkwood_boss", "First row Id should be 'john_hawkwood_boss'")
|
||||
assert(rows[1].ModifyStartCost == "20", "First row ModifyStartCost should be '20'")
|
||||
assert(rows[2].Id == "francis_reid_daly", "Second row Id should be 'francis_reid_daly'")
|
||||
end)
|
||||
|
||||
test("full CSV parser complex", function()
|
||||
local original = [[
|
||||
#mercenary_profiles
|
||||
Id ModifyStartCost ModifyStep ModifyLevelLimit Health ResistSheet WoundSlots MeleeDamage MeleeAccuracy RangeAccuracy ReceiveAmputationChance ReceiveWoundChanceMult AttackWoundChanceMult Dodge Los StarvationLimit PainThresholdLimit PainThresholdRegen TalentPerkId ActorId SkinIndex HairType HairColorHex VoiceBank Immunity CreatureClass
|
||||
john_hawkwood_boss 20 0.1 140 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.60 critchance 0.05 0.5 0.5 0.03 0.5 1.2 0.3 8 2200 16 2 talent_the_man_who_sold_the_world human_male 0 hair1 #633D08 player Human
|
||||
francis_reid_daly 20 0.1 130 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.5 0.4 0.04 0.9 1 0.3 8 2000 10 1 talent_weapon_durability human_male 0 player Human
|
||||
victoria_boudicca 20 0.1 90 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.1 0.4 0.45 0.05 1 1.2 0.3 8 1800 8 1 talent_weapon_distance human_female 0 hair1 #633D08 player Human
|
||||
persival_fawcett 20 0.1 150 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 6 12 crit 1.70 critchance 0.05 0.5 0.35 0.05 0.6 1 0.25 8 2100 16 1 talent_all_resists human_male 1 hair1 #633D08 player Human
|
||||
Isabella_capet 20 0.1 100 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.15 0.55 0.3 0.03 0.8 1.4 0.35 7 1700 14 2 talent_ignore_infection human_female 1 hair3 #FF3100 player Human
|
||||
maximilian_rohr 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 8 16 crit 1.75 critchance 0.05 0.45 0.45 0.06 0.9 1 0.2 8 2000 14 1 talent_ignore_pain human_male 0 hair2 #FFC400 player Human
|
||||
priya_marlon 20 0.1 110 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.15 0.45 0.35 0.05 1 1.1 0.3 7 2200 12 1 talent_all_consumables_stack human_female 0 hair2 #FFC400 player Human
|
||||
jacques_kennet 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.05 0.45 0.35 0.04 0.9 1.2 0.3 8 2300 10 1 talent_reload_time human_male 0 hair1 #908E87 player Human
|
||||
mirza_aishatu 20 0.1 110 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 7 14 crit 1.70 critchance 0.05 0.55 0.45 0.03 1 1.1 0.25 9 2000 10 1 talent_starving_slower human_female 1 hair2 #633D08 player Human
|
||||
kenzie_yukio 20 0.1 100 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.1 0.6 0.4 0.04 1 1 0.4 7 1600 12 1 talent_weight_dodge_affect human_male 0 hair2 #633D08 player Human
|
||||
marika_wulfnod 20 0.1 100 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 6 12 crit 1.60 critchance 0.05 0.5 0.5 0.04 1 1 0.3 9 1900 12 1 talent_belt_slots human_female 0 hair1 #FFC400 player Human
|
||||
auberon_lukas 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 4 8 crit 1.60 critchance 0.15 0.45 0.45 0.05 0.8 1 0.2 9 1900 8 2 talent_weapon_slot human_male 0 hair2 #633D08 player Human
|
||||
niko_medich 20 0.1 120 blunt 0 pierce 0 lacer 0 fire 0 cold 0 poison 0 shock 0 beam 0 HumanHead HumanShoulder HumanArm HumanThigh HumanFeet HumanChest HumanBody HumanStomach HumanKnee blunt 5 10 crit 1.70 critchance 0.05 0.4 0.45 0.04 1 1.3 0.25 8 2000 10 1 talent_pistol_acc human_male 0 hair1 #908E87 player Human
|
||||
#end
|
||||
|
||||
#mercenary_classes
|
||||
Id ModifyStartCost ModifyStep PerkIds
|
||||
scouts_of_hades 30 0.1 cqc_specialist_basic military_training_basic gear_maintenance_basic blind_fury_basic fire_transfer_basic assault_reflex_basic
|
||||
ecclipse_blades 30 0.1 berserkgang_basic athletics_basic reaction_training_basic cold_weapon_wielding_basic cannibalism_basic carnage_basic
|
||||
tifton_elite 30 0.1 heavy_weaponary_basic grenadier_basic selfhealing_basic stationary_defense_basic spray_and_pray_basic shock_awe_basic
|
||||
tunnel_rats 30 0.1 cautious_basic handmade_shotgun_ammo_basic marauder_basic dirty_shot_basic vicious_symbiosis_basic covermaster_basic
|
||||
phoenix_brigade 30 0.1 shielding_basic battle_physicist_basic reinforced_battery_basic revealing_flame_basic cauterize_basic scholar_basic
|
||||
]]
|
||||
|
||||
-- Parse with headers and comments
|
||||
local rows = fromCSV(original, { delimiter = "\t", hasheader = true, hascomments = true })
|
||||
assert(#rows > 0, "Should have parsed rows")
|
||||
|
||||
-- Convert back to CSV with headers
|
||||
local csv = toCSV(rows, { delimiter = "\t", hasheader = true })
|
||||
|
||||
-- Parse again
|
||||
local rows2 = fromCSV(csv, { delimiter = "\t", hasheader = true, hascomments = false })
|
||||
|
||||
-- Verify identical - same number of rows
|
||||
assert(#rows2 == #rows, "Round trip should have same number of rows")
|
||||
|
||||
-- Verify first row data is identical
|
||||
assert(rows2[1].Id == rows[1].Id, "Round trip first row Id should match")
|
||||
assert(
|
||||
rows2[1].ModifyStartCost == rows[1].ModifyStartCost,
|
||||
"Round trip first row ModifyStartCost should match"
|
||||
)
|
||||
assert(rows2[1].Health == rows[1].Health, "Round trip first row Health should match")
|
||||
|
||||
-- Verify headers are preserved
|
||||
assert(rows2.Headers ~= nil, "Round trip rows should have Headers field")
|
||||
assert(#rows2.Headers == #rows.Headers, "Headers should have same number of elements")
|
||||
assert(rows2.Headers[1] == rows.Headers[1], "First header should match")
|
||||
end)
|
||||
|
||||
-- Test metatable: row[1] and row.foobar return same value
|
||||
test("metatable row[1] equals row.header", function()
|
||||
local csv = "Id Name Value\n1 Test 100"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||
assert(rows[1][1] == rows[1].Id, "row[1] should equal row.Id")
|
||||
assert(rows[1][2] == rows[1].Name, "row[2] should equal row.Name")
|
||||
assert(rows[1][3] == rows[1].Value, "row[3] should equal row.Value")
|
||||
assert(rows[1].Id == "1", "row.Id should be '1'")
|
||||
assert(rows[1][1] == "1", "row[1] should be '1'")
|
||||
end)
|
||||
|
||||
-- Test metatable: setting via header name updates numeric index
|
||||
test("metatable set via header name", function()
|
||||
local csv = "Id Name Value\n1 Test 100"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||
rows[1].Id = "999"
|
||||
assert(rows[1][1] == "999", "Setting row.Id should update row[1]")
|
||||
assert(rows[1].Id == "999", "row.Id should be '999'")
|
||||
end)
|
||||
|
||||
-- Test metatable: error on unknown header assignment
|
||||
test("metatable error on unknown header", function()
|
||||
local csv = "Id Name Value\n1 Test 100"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||
local ok, errMsg = pcall(function() rows[1].UnknownHeader = "test" end)
|
||||
assert(ok == false, "Should error on unknown header")
|
||||
assert(string.find(errMsg, "unknown header"), "Error should mention unknown header")
|
||||
end)
|
||||
|
||||
-- Test metatable: numeric indices still work
|
||||
test("metatable numeric indices work", function()
|
||||
local csv = "Id Name Value\n1 Test 100"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||
rows[1][1] = "999"
|
||||
assert(rows[1].Id == "999", "Setting row[1] should update row.Id")
|
||||
assert(rows[1][1] == "999", "row[1] should be '999'")
|
||||
end)
|
||||
|
||||
-- Test metatable: numeric keys work normally
|
||||
test("metatable numeric keys work", function()
|
||||
local csv = "Id Name Value\n1 Test 100"
|
||||
local rows = fromCSV(csv, { delimiter = "\t", hasheader = true })
|
||||
rows[1][100] = "hundred"
|
||||
assert(rows[1][100] == "hundred", "Numeric keys should work")
|
||||
end)
|
||||
|
||||
print("\nAll tests completed!")
|
||||
756
processor/luahelper.lua
Normal file
756
processor/luahelper.lua
Normal file
@@ -0,0 +1,756 @@
|
||||
-- Custom Lua helpers for math operations
|
||||
|
||||
--- Returns the minimum of two numbers
|
||||
--- @param a number First number
|
||||
--- @param b number Second number
|
||||
--- @return number Minimum value
|
||||
function min(a, b) return math.min(a, b) end
|
||||
|
||||
--- Returns the maximum of two numbers
|
||||
--- @param a number First number
|
||||
--- @param b number Second number
|
||||
--- @return number Maximum value
|
||||
function max(a, b) return math.max(a, b) end
|
||||
|
||||
--- Rounds a number to n decimal places
|
||||
--- @param x number Number to round
|
||||
--- @param n number? Number of decimal places (default: 0)
|
||||
--- @return number Rounded number
|
||||
function round(x, n)
|
||||
if n == nil then n = 0 end
|
||||
return math.floor(x * 10 ^ n + 0.5) / 10 ^ n
|
||||
end
|
||||
|
||||
--- Returns the floor of a number
|
||||
--- @param x number Number to floor
|
||||
--- @return number Floored number
|
||||
function floor(x) return math.floor(x) end
|
||||
|
||||
--- Returns the ceiling of a number
|
||||
--- @param x number Number to ceil
|
||||
--- @return number Ceiled number
|
||||
function ceil(x) return math.ceil(x) end
|
||||
|
||||
--- Converts string to uppercase
|
||||
--- @param s string String to convert
|
||||
--- @return string Uppercase string
|
||||
function upper(s) return string.upper(s) end
|
||||
|
||||
--- Converts string to lowercase
|
||||
--- @param s string String to convert
|
||||
--- @return string Lowercase string
|
||||
function lower(s) return string.lower(s) end
|
||||
|
||||
--- Formats a string using Lua string.format
|
||||
--- @param s string Format string
|
||||
--- @param ... any Values to format
|
||||
--- @return string Formatted string
|
||||
function format(s, ...) return string.format(s, ...) end
|
||||
|
||||
--- Prints a formatted string using print(string.format(...))
|
||||
--- @param fmt string Format string
|
||||
--- @param ... any Values to format
|
||||
function printf(fmt, ...) print(string.format(fmt, ...)) end
|
||||
|
||||
--- Removes leading and trailing whitespace from string
|
||||
--- @param s string String to trim
|
||||
--- @return string Trimmed string
|
||||
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
||||
|
||||
--- Splits a string by separator
|
||||
--- @param inputstr string String to split
|
||||
--- @param sep string? Separator pattern (default: whitespace)
|
||||
--- @return table Array of string parts
|
||||
function strsplit(inputstr, sep)
|
||||
if sep == nil then sep = "%s" end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^" .. sep .. "]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
---@param table table
|
||||
---@param depth number?
|
||||
function dump(table, depth)
|
||||
if depth == nil then depth = 0 end
|
||||
if depth > 200 then
|
||||
print("Error: Depth > 200 in dump()")
|
||||
return
|
||||
end
|
||||
for k, v in pairs(table) do
|
||||
if type(v) == "table" then
|
||||
print(string.rep(" ", depth) .. k .. ":")
|
||||
dump(v, depth + 1)
|
||||
else
|
||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- @class ParserOptions
|
||||
--- @field delimiter string? The field delimiter (default: ",").
|
||||
--- @field hasheader boolean? If true, first non-comment row is treated as headers (default: false).
|
||||
--- @field hascomments boolean? If true, lines starting with '#' are skipped (default: false).
|
||||
|
||||
--- @type ParserOptions
|
||||
parserDefaultOptions = { delimiter = ",", hasheader = false, hascomments = false }
|
||||
|
||||
--- Validates options against a set of valid option keys.
|
||||
--- @param options ParserOptions? The options table to validate
|
||||
function areOptionsValid(options)
|
||||
if options == nil then return end
|
||||
|
||||
if type(options) ~= "table" then error("options must be a table") end
|
||||
|
||||
-- Build valid options list from validOptions table
|
||||
local validOptionsStr = ""
|
||||
for k, _ in pairs(parserDefaultOptions) do
|
||||
validOptionsStr = validOptionsStr .. k .. ", "
|
||||
end
|
||||
|
||||
for k, _ in pairs(options) do
|
||||
if parserDefaultOptions[k] == nil then
|
||||
error(
|
||||
"unknown option: " .. tostring(k) .. " (valid options: " .. validOptionsStr .. ")"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- Parses CSV text into rows and fields using a minimal RFC 4180 state machine.
|
||||
---
|
||||
--- Requirements/assumptions:
|
||||
--- - Input is a single string containing the entire CSV content.
|
||||
--- - Field separators are specified by delimiter option (default: comma).
|
||||
--- - Newlines between rows may be "\n" or "\r\n". "\r\n" is treated as one line break.
|
||||
--- - Fields may be quoted with double quotes (").
|
||||
--- - Inside quoted fields, doubled quotes ("") represent a literal quote character.
|
||||
--- - No backslash escaping is supported (not part of RFC 4180).
|
||||
--- - Newlines inside quoted fields are preserved as part of the field.
|
||||
--- - Leading/trailing spaces are preserved; no trimming is performed.
|
||||
--- - Empty fields and empty rows are preserved.
|
||||
--- - The final row is emitted even if the text does not end with a newline.
|
||||
--- - Lines starting with '#' (after optional leading whitespace) are treated as comments and skipped if hascomments is true.
|
||||
---
|
||||
--- @param csv string The CSV text to parse.
|
||||
--- @param options ParserOptions? Options for the parser
|
||||
--- @return table #A table (array) of rows; each row is a table with numeric indices and optionally header-named keys.
|
||||
function fromCSV(csv, options)
|
||||
if options == nil then options = {} end
|
||||
|
||||
-- Validate options
|
||||
areOptionsValid(options)
|
||||
|
||||
local delimiter = options.delimiter or parserDefaultOptions.delimiter
|
||||
local hasheader = options.hasheader or parserDefaultOptions.hasheader
|
||||
local hascomments = options.hascomments or parserDefaultOptions.hascomments
|
||||
|
||||
local allRows = {}
|
||||
local fields = {}
|
||||
local field = {}
|
||||
|
||||
local STATE_DEFAULT = 1
|
||||
local STATE_IN_QUOTES = 2
|
||||
local STATE_QUOTE_IN_QUOTES = 3
|
||||
local state = STATE_DEFAULT
|
||||
|
||||
local i = 1
|
||||
local len = #csv
|
||||
|
||||
while i <= len do
|
||||
local c = csv:sub(i, i)
|
||||
|
||||
if state == STATE_DEFAULT then
|
||||
if c == '"' then
|
||||
state = STATE_IN_QUOTES
|
||||
i = i + 1
|
||||
elseif c == delimiter then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
i = i + 1
|
||||
elseif c == "\r" or c == "\n" then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
local shouldAdd = true
|
||||
if hascomments and #fields > 0 then
|
||||
local firstField = fields[1]
|
||||
local trimmed = trim(firstField)
|
||||
if string.sub(trimmed, 1, 1) == "#" then shouldAdd = false end
|
||||
end
|
||||
if shouldAdd then table.insert(allRows, fields) end
|
||||
fields = {}
|
||||
if c == "\r" and i < len and csv:sub(i + 1, i + 1) == "\n" then
|
||||
i = i + 2
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
table.insert(field, c)
|
||||
i = i + 1
|
||||
end
|
||||
elseif state == STATE_IN_QUOTES then
|
||||
if c == '"' then
|
||||
state = STATE_QUOTE_IN_QUOTES
|
||||
i = i + 1
|
||||
else
|
||||
table.insert(field, c)
|
||||
i = i + 1
|
||||
end
|
||||
else -- STATE_QUOTE_IN_QUOTES
|
||||
if c == '"' then
|
||||
table.insert(field, '"')
|
||||
state = STATE_IN_QUOTES
|
||||
i = i + 1
|
||||
elseif c == delimiter then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
state = STATE_DEFAULT
|
||||
i = i + 1
|
||||
elseif c == "\r" or c == "\n" then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
local shouldAdd = true
|
||||
if hascomments and #fields > 0 then
|
||||
local firstField = fields[1]
|
||||
local trimmed = string.gsub(firstField, "^%s*(.-)%s*$", "%1")
|
||||
if string.sub(trimmed, 1, 1) == "#" then shouldAdd = false end
|
||||
end
|
||||
if shouldAdd then table.insert(allRows, fields) end
|
||||
fields = {}
|
||||
state = STATE_DEFAULT
|
||||
if c == "\r" and i < len and csv:sub(i + 1, i + 1) == "\n" then
|
||||
i = i + 2
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
state = STATE_DEFAULT
|
||||
-- Don't increment i, reprocess character in DEFAULT state
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if #field > 0 or #fields > 0 then
|
||||
table.insert(fields, table.concat(field))
|
||||
local shouldAdd = true
|
||||
if hascomments and #fields > 0 then
|
||||
local firstField = fields[1]
|
||||
local trimmed = string.gsub(firstField, "^%s*(.-)%s*$", "%1")
|
||||
if string.sub(trimmed, 1, 1) == "#" then shouldAdd = false end
|
||||
end
|
||||
if shouldAdd then table.insert(allRows, fields) end
|
||||
end
|
||||
|
||||
if hasheader and #allRows > 0 then
|
||||
local headers = allRows[1]
|
||||
local headerMap = {}
|
||||
for j = 1, #headers do
|
||||
if headers[j] ~= nil and headers[j] ~= "" then
|
||||
local headerName = trim(headers[j])
|
||||
headerMap[headerName] = j
|
||||
end
|
||||
end
|
||||
|
||||
local header_mt = {
|
||||
headers = headerMap,
|
||||
__index = function(t, key)
|
||||
local mt = getmetatable(t)
|
||||
if type(key) == "string" and mt.headers and mt.headers[key] then
|
||||
return rawget(t, mt.headers[key])
|
||||
end
|
||||
return rawget(t, key)
|
||||
end,
|
||||
__newindex = function(t, key, value)
|
||||
local mt = getmetatable(t)
|
||||
if type(key) == "string" and mt.headers then
|
||||
if mt.headers[key] then
|
||||
rawset(t, mt.headers[key], value)
|
||||
else
|
||||
error("unknown header: " .. tostring(key))
|
||||
end
|
||||
else
|
||||
rawset(t, key, value)
|
||||
end
|
||||
end,
|
||||
}
|
||||
|
||||
local rows = {}
|
||||
for ii = 2, #allRows do
|
||||
local row = {}
|
||||
local dataRow = allRows[ii]
|
||||
for j = 1, #dataRow do
|
||||
row[j] = dataRow[j]
|
||||
end
|
||||
setmetatable(row, header_mt)
|
||||
table.insert(rows, row)
|
||||
end
|
||||
rows.Headers = headers
|
||||
return rows
|
||||
end
|
||||
|
||||
return allRows
|
||||
end
|
||||
|
||||
--- Converts a table of rows back to CSV text format (RFC 4180 compliant).
|
||||
---
|
||||
--- Requirements:
|
||||
--- - Input is a table (array) of rows, where each row is a table (array) of field values.
|
||||
--- - Field values are converted to strings using tostring().
|
||||
--- - Fields are quoted if they contain the delimiter, newlines, or double quotes.
|
||||
--- - Double quotes inside quoted fields are doubled ("").
|
||||
--- - Fields are joined with the specified delimiter; rows are joined with newlines.
|
||||
--- - If includeHeaders is true and rows have a Headers field, headers are included as the first row.
|
||||
---
|
||||
--- @param rows table Array of rows, where each row is an array of field values.
|
||||
--- @param options ParserOptions? Options for the parser
|
||||
--- @return string #CSV-formatted text
|
||||
function toCSV(rows, options)
|
||||
if options == nil then options = {} end
|
||||
|
||||
-- Validate options
|
||||
areOptionsValid(options)
|
||||
|
||||
local delimiter = options.delimiter or parserDefaultOptions.delimiter
|
||||
local includeHeaders = options.hasheader or parserDefaultOptions.hasheader
|
||||
local rowStrings = {}
|
||||
|
||||
-- Include headers row if requested and available
|
||||
if includeHeaders and #rows > 0 and rows.Headers ~= nil then
|
||||
local headerStrings = {}
|
||||
for _, header in ipairs(rows.Headers) do
|
||||
local headerStr = tostring(header)
|
||||
local needsQuoting = false
|
||||
if
|
||||
headerStr:find(delimiter)
|
||||
or headerStr:find("\n")
|
||||
or headerStr:find("\r")
|
||||
or headerStr:find('"')
|
||||
then
|
||||
needsQuoting = true
|
||||
end
|
||||
if needsQuoting then
|
||||
headerStr = headerStr:gsub('"', '""')
|
||||
headerStr = '"' .. headerStr .. '"'
|
||||
end
|
||||
table.insert(headerStrings, headerStr)
|
||||
end
|
||||
table.insert(rowStrings, table.concat(headerStrings, delimiter))
|
||||
end
|
||||
|
||||
for _, row in ipairs(rows) do
|
||||
local fieldStrings = {}
|
||||
|
||||
for _, field in ipairs(row) do
|
||||
local fieldStr = tostring(field)
|
||||
local needsQuoting = false
|
||||
|
||||
if
|
||||
fieldStr:find(delimiter)
|
||||
or fieldStr:find("\n")
|
||||
or fieldStr:find("\r")
|
||||
or fieldStr:find('"')
|
||||
then
|
||||
needsQuoting = true
|
||||
end
|
||||
|
||||
if needsQuoting then
|
||||
fieldStr = fieldStr:gsub('"', '""')
|
||||
fieldStr = '"' .. fieldStr .. '"'
|
||||
end
|
||||
|
||||
table.insert(fieldStrings, fieldStr)
|
||||
end
|
||||
|
||||
table.insert(rowStrings, table.concat(fieldStrings, delimiter))
|
||||
end
|
||||
|
||||
return table.concat(rowStrings, "\n")
|
||||
end
|
||||
|
||||
--- Converts string to number, returns 0 if invalid
|
||||
--- @param str string String to convert
|
||||
--- @return number Numeric value or 0
|
||||
function num(str) return tonumber(str) or 0 end
|
||||
|
||||
--- Converts number to string
|
||||
--- @param num number Number to convert
|
||||
--- @return string String representation
|
||||
function str(num) return tostring(num) end
|
||||
|
||||
--- Checks if string is numeric
|
||||
--- @param str string String to check
|
||||
--- @return boolean True if string is numeric
|
||||
function is_number(str) return tonumber(str) ~= nil end
|
||||
|
||||
--- Checks if table is a sequential array (1-indexed with no gaps)
|
||||
--- @param t table Table to check
|
||||
--- @return boolean True if table is an array
|
||||
function isArray(t)
|
||||
if type(t) ~= "table" then return false end
|
||||
local max = 0
|
||||
local count = 0
|
||||
for k, _ in pairs(t) do
|
||||
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then return false end
|
||||
max = math.max(max, k)
|
||||
count = count + 1
|
||||
end
|
||||
return max == count
|
||||
end
|
||||
|
||||
-- ============================================================================
|
||||
-- PRIMITIVE WRAPPER HELPERS
|
||||
-- ============================================================================
|
||||
|
||||
local __chef_raw_mode = false
|
||||
|
||||
local function __chef_is_wrapped(v)
|
||||
return type(v) == "table" and rawget(v, "__chef_wrapped") == true
|
||||
end
|
||||
|
||||
local function __chef_unbox(v)
|
||||
if __chef_is_wrapped(v) then return rawget(v, "val") end
|
||||
return v
|
||||
end
|
||||
|
||||
local function __chef_coerce_for_val(v)
|
||||
if type(v) == "string" then
|
||||
local n = tonumber(v)
|
||||
if n ~= nil then return n end
|
||||
end
|
||||
return v
|
||||
end
|
||||
|
||||
local __chef_value_mt = {
|
||||
__add = function(a, b) return __chef_unbox(a) + __chef_unbox(b) end,
|
||||
__sub = function(a, b) return __chef_unbox(a) - __chef_unbox(b) end,
|
||||
__mul = function(a, b) return __chef_unbox(a) * __chef_unbox(b) end,
|
||||
__div = function(a, b) return __chef_unbox(a) / __chef_unbox(b) end,
|
||||
__mod = function(a, b) return __chef_unbox(a) % __chef_unbox(b) end,
|
||||
__pow = function(a, b) return __chef_unbox(a) ^ __chef_unbox(b) end,
|
||||
__unm = function(a) return -__chef_unbox(a) end,
|
||||
__eq = function(a, b) return __chef_unbox(a) == __chef_unbox(b) end,
|
||||
__lt = function(a, b) return __chef_unbox(a) < __chef_unbox(b) end,
|
||||
__le = function(a, b) return __chef_unbox(a) <= __chef_unbox(b) end,
|
||||
__concat = function(a, b) return tostring(__chef_unbox(a)) .. tostring(__chef_unbox(b)) end,
|
||||
__len = function(a) return #__chef_unbox(a) end,
|
||||
__tostring = function(a) return tostring(__chef_unbox(a)) end,
|
||||
}
|
||||
|
||||
local __chef_seen_tables = setmetatable({}, { __mode = "k" })
|
||||
local __chef_table_mt = {
|
||||
__newindex = function(t, k, v)
|
||||
rawset(t, k, __chef_wrap_value(v))
|
||||
end,
|
||||
}
|
||||
|
||||
function __chef_wrap_primitive(v)
|
||||
if __chef_raw_mode then return v end
|
||||
if v == nil then return nil end
|
||||
if type(v) == "table" then return v end
|
||||
|
||||
local wrapped = {
|
||||
__chef_wrapped = true,
|
||||
}
|
||||
wrapped.val = __chef_coerce_for_val(v)
|
||||
wrapped.raw = v
|
||||
wrapped.__orig_val = wrapped.val
|
||||
wrapped.__orig_raw = wrapped.raw
|
||||
return setmetatable(wrapped, __chef_value_mt)
|
||||
end
|
||||
|
||||
function __chef_wrap_value(v)
|
||||
if __chef_raw_mode then return v end
|
||||
if v == nil then return nil end
|
||||
if __chef_is_wrapped(v) then return v end
|
||||
if type(v) == "table" then
|
||||
return __chef_instrument_table(v)
|
||||
end
|
||||
return __chef_wrap_primitive(v)
|
||||
end
|
||||
|
||||
function __chef_instrument_table(t)
|
||||
if __chef_raw_mode then return t end
|
||||
if type(t) ~= "table" then return t end
|
||||
if __chef_is_wrapped(t) then return t end
|
||||
if __chef_seen_tables[t] then return t end
|
||||
__chef_seen_tables[t] = true
|
||||
|
||||
for k, v in pairs(t) do
|
||||
rawset(t, k, __chef_wrap_value(v))
|
||||
end
|
||||
setmetatable(t, __chef_table_mt)
|
||||
return t
|
||||
end
|
||||
|
||||
function __chef_prepare_data(root, raw_mode)
|
||||
__chef_raw_mode = raw_mode == true
|
||||
if __chef_raw_mode then return root end
|
||||
if type(root) == "table" then return __chef_instrument_table(root) end
|
||||
return root
|
||||
end
|
||||
|
||||
modified = false
|
||||
|
||||
-- ============================================================================
|
||||
-- XML HELPER FUNCTIONS
|
||||
-- ============================================================================
|
||||
|
||||
--- Find all elements with a specific tag name (recursive search)
|
||||
--- @param root table The root XML element (with tag, attr, children fields)
|
||||
--- @param tagName string The tag name to search for
|
||||
--- @return table Array of matching elements
|
||||
local function xmlTag(element)
|
||||
return element.tag or element._tag
|
||||
end
|
||||
|
||||
local function xmlChildren(element)
|
||||
return element.children or element._children
|
||||
end
|
||||
|
||||
local function xmlAttr(element)
|
||||
if element.attr == nil and element._attr ~= nil then element.attr = element._attr end
|
||||
if element._attr == nil and element.attr ~= nil then element._attr = element.attr end
|
||||
return element.attr or element._attr
|
||||
end
|
||||
|
||||
function findElements(root, tagName)
|
||||
local results = {}
|
||||
|
||||
local function search(element)
|
||||
if xmlTag(element) == tagName then table.insert(results, element) end
|
||||
local children = xmlChildren(element)
|
||||
if children then
|
||||
for _, child in ipairs(children) do
|
||||
search(child)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
search(root)
|
||||
return results
|
||||
end
|
||||
|
||||
--- Visit all elements recursively and call a function on each
|
||||
--- @param root table The root XML element
|
||||
--- @param callback function Function to call with each element: callback(element, depth, path)
|
||||
function visitElements(root, callback)
|
||||
local function visit(element, depth, path)
|
||||
callback(element, depth, path)
|
||||
local children = xmlChildren(element)
|
||||
if children then
|
||||
for i, child in ipairs(children) do
|
||||
local childPath = path .. "/" .. xmlTag(child) .. "[" .. i .. "]"
|
||||
visit(child, depth + 1, childPath)
|
||||
end
|
||||
end
|
||||
end
|
||||
visit(root, 0, "/" .. xmlTag(root))
|
||||
end
|
||||
|
||||
--- Get numeric value from XML element attribute
|
||||
--- @param element table XML element with _attr field
|
||||
--- @param attrName string Attribute name
|
||||
--- @return number|nil The numeric value or nil if not found/not numeric
|
||||
function getNumAttr(element, attrName)
|
||||
local attrs = xmlAttr(element)
|
||||
if not attrs then return nil end
|
||||
local value = attrs[attrName]
|
||||
if not value then return nil end
|
||||
return tonumber(value)
|
||||
end
|
||||
|
||||
--- Set numeric value to XML element attribute
|
||||
--- @param element table XML element with _attr field
|
||||
--- @param attrName string Attribute name
|
||||
--- @param value number Numeric value to set
|
||||
function setNumAttr(element, attrName, value)
|
||||
local attrs = xmlAttr(element)
|
||||
if not attrs then
|
||||
attrs = {}
|
||||
element.attr = attrs
|
||||
element._attr = attrs
|
||||
end
|
||||
attrs[attrName] = tostring(value)
|
||||
end
|
||||
|
||||
--- Modify numeric attribute by applying a function
|
||||
--- @param element table XML element
|
||||
--- @param attrName string Attribute name
|
||||
--- @param func function Function that takes current value and returns new value
|
||||
--- @return boolean True if modification was made
|
||||
function modifyNumAttr(element, attrName, func)
|
||||
local current = getNumAttr(element, attrName)
|
||||
if current then
|
||||
setNumAttr(element, attrName, func(current))
|
||||
return true
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
--- Find all elements matching a predicate function
|
||||
--- @param root table The root XML element
|
||||
--- @param predicate function Function that takes element and returns true/false
|
||||
--- @return table Array of matching elements
|
||||
function filterElements(root, predicate)
|
||||
local results = {}
|
||||
visitElements(root, function(element)
|
||||
if predicate(element) then table.insert(results, element) end
|
||||
end)
|
||||
return results
|
||||
end
|
||||
|
||||
--- Get text content of an element
|
||||
--- @param element table XML element
|
||||
--- @return string|nil The text content or nil
|
||||
function getText(element) return element.text or element._text end
|
||||
|
||||
--- Set text content of an element
|
||||
--- @param element table XML element
|
||||
--- @param text string Text content to set
|
||||
function setText(element, text)
|
||||
element.text = text
|
||||
element._text = text
|
||||
end
|
||||
|
||||
--- Check if element has an attribute
|
||||
--- @param element table XML element
|
||||
--- @param attrName string Attribute name
|
||||
--- @return boolean True if attribute exists
|
||||
function hasAttr(element, attrName)
|
||||
local attrs = xmlAttr(element)
|
||||
return attrs and attrs[attrName] ~= nil
|
||||
end
|
||||
|
||||
--- Get attribute value as string
|
||||
--- @param element table XML element
|
||||
--- @param attrName string Attribute name
|
||||
--- @return string|nil The attribute value or nil
|
||||
function getAttr(element, attrName)
|
||||
local attrs = xmlAttr(element)
|
||||
if not attrs then return nil end
|
||||
return attrs[attrName]
|
||||
end
|
||||
|
||||
--- Set attribute value
|
||||
--- @param element table XML element
|
||||
--- @param attrName string Attribute name
|
||||
--- @param value any Value to set (will be converted to string)
|
||||
function setAttr(element, attrName, value)
|
||||
local attrs = xmlAttr(element)
|
||||
if not attrs then
|
||||
attrs = {}
|
||||
element.attr = attrs
|
||||
element._attr = attrs
|
||||
end
|
||||
attrs[attrName] = tostring(value)
|
||||
end
|
||||
|
||||
--- Find first element with a specific tag name (searches direct children only)
|
||||
--- @param parent table The parent XML element
|
||||
--- @param tagName string The tag name to search for
|
||||
--- @return table|nil The first matching element or nil
|
||||
function findFirstElement(parent, tagName)
|
||||
local children = xmlChildren(parent)
|
||||
if not children then return nil end
|
||||
for _, child in ipairs(children) do
|
||||
if xmlTag(child) == tagName then return child end
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
--- Add a child element to a parent
|
||||
--- @param parent table The parent XML element
|
||||
--- @param child table The child element to add
|
||||
function addChild(parent, child)
|
||||
if not parent._children then parent._children = {} end
|
||||
table.insert(parent._children, child)
|
||||
end
|
||||
|
||||
--- Remove all children with a specific tag name
|
||||
--- @param parent table The parent XML element
|
||||
--- @param tagName string The tag name to remove
|
||||
--- @return number Count of removed children
|
||||
function removeChildren(parent, tagName)
|
||||
if not parent._children then return 0 end
|
||||
local removed = 0
|
||||
local i = 1
|
||||
while i <= #parent._children do
|
||||
if parent._children[i]._tag == tagName then
|
||||
table.remove(parent._children, i)
|
||||
removed = removed + 1
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
return removed
|
||||
end
|
||||
|
||||
--- Get all direct children with a specific tag name
|
||||
--- @param parent table The parent XML element
|
||||
--- @param tagName string The tag name to search for
|
||||
--- @return table Array of matching children
|
||||
function getChildren(parent, tagName)
|
||||
local results = {}
|
||||
if not parent._children then return results end
|
||||
for _, child in ipairs(parent._children) do
|
||||
if child._tag == tagName then table.insert(results, child) end
|
||||
end
|
||||
return results
|
||||
end
|
||||
|
||||
--- Count children with a specific tag name
|
||||
--- @param parent table The parent XML element
|
||||
--- @param tagName string The tag name to count
|
||||
--- @return number Count of matching children
|
||||
function countChildren(parent, tagName)
|
||||
if not parent._children then return 0 end
|
||||
local count = 0
|
||||
for _, child in ipairs(parent._children) do
|
||||
if child._tag == tagName then count = count + 1 end
|
||||
end
|
||||
return count
|
||||
end
|
||||
|
||||
-- ============================================================================
|
||||
-- JSON HELPER FUNCTIONS
|
||||
-- ============================================================================
|
||||
|
||||
--- Recursively visit all values in a JSON structure
|
||||
--- @param data table JSON data (nested tables)
|
||||
--- @param callback function Function called with (value, key, parent)
|
||||
function visitJSON(data, callback)
|
||||
local function visit(obj, key, parent)
|
||||
callback(obj, key, parent)
|
||||
if type(obj) == "table" then
|
||||
for k, v in pairs(obj) do
|
||||
visit(v, k, obj)
|
||||
end
|
||||
end
|
||||
end
|
||||
visit(data, nil, nil)
|
||||
end
|
||||
|
||||
--- Find all values in JSON matching a predicate
|
||||
--- @param data table JSON data
|
||||
--- @param predicate function Function that takes (value, key, parent) and returns true/false
|
||||
--- @return table Array of matching values
|
||||
function findInJSON(data, predicate)
|
||||
local results = {}
|
||||
visitJSON(data, function(value, key, parent)
|
||||
if predicate(value, key, parent) then table.insert(results, value) end
|
||||
end)
|
||||
return results
|
||||
end
|
||||
|
||||
--- Modify all numeric values in JSON matching a condition
|
||||
--- @param data table JSON data
|
||||
--- @param predicate function Function that takes (value, key, parent) and returns true/false
|
||||
--- @param modifier function Function that takes current value and returns new value
|
||||
function modifyJSONNumbers(data, predicate, modifier)
|
||||
visitJSON(data, function(value, key, parent)
|
||||
if type(value) == "number" and predicate(value, key, parent) then
|
||||
if parent and key then parent[key] = modifier(value) end
|
||||
end
|
||||
end)
|
||||
end
|
||||
27
processor/meta.go
Normal file
27
processor/meta.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
//go:embed meta.lua
|
||||
var metaFileContent string
|
||||
|
||||
// GenerateMetaFile generates meta.lua with function signatures for LuaLS autocomplete
|
||||
func GenerateMetaFile(outputPath string) error {
|
||||
logger.Info("Generating meta.lua file for LuaLS autocomplete")
|
||||
|
||||
// Write the embedded meta file
|
||||
err := os.WriteFile(outputPath, []byte(metaFileContent), 0644)
|
||||
if err != nil {
|
||||
logger.Error("Failed to write meta.lua: %v", err)
|
||||
return fmt.Errorf("failed to write meta.lua: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Successfully generated meta.lua at %q", outputPath)
|
||||
return nil
|
||||
}
|
||||
286
processor/meta.lua
Normal file
286
processor/meta.lua
Normal file
@@ -0,0 +1,286 @@
|
||||
---@meta
|
||||
|
||||
---@class ParserOptions
|
||||
---@field delimiter string? The field delimiter (default: ",").
|
||||
---@field hasheader boolean? If true, first non-comment row is treated as headers (default: false).
|
||||
---@field hascomments boolean? If true, lines starting with '#' are skipped (default: false).
|
||||
|
||||
---@class XMLElement
|
||||
---@field _tag string The XML tag name
|
||||
---@field _attr {[string]: string}? XML attributes as key-value pairs
|
||||
---@field _text string? Text content of the element
|
||||
---@field _children XMLElement[]? Child elements
|
||||
---@class WrappedPrimitive
|
||||
---@field val string|number|boolean|nil Coerced primitive value
|
||||
---@field raw string|number|boolean|nil Raw primitive value
|
||||
---@operator add(WrappedPrimitive|string|number|boolean): string|number
|
||||
---@operator sub(WrappedPrimitive|string|number|boolean): number
|
||||
---@operator mul(WrappedPrimitive|string|number|boolean): number
|
||||
---@operator div(WrappedPrimitive|string|number|boolean): number
|
||||
---@operator mod(WrappedPrimitive|string|number|boolean): number
|
||||
---@operator pow(WrappedPrimitive|string|number|boolean): number
|
||||
---@field tag string? XML tag name
|
||||
---@field attr {[string]: WrappedPrimitive}? XML attributes as wrapped primitive values
|
||||
---@field text WrappedPrimitive? Wrapped text content value
|
||||
---@field children XMLElement[]? Child elements
|
||||
---@field local string? Element local name
|
||||
---@field uri string? Element namespace URI
|
||||
---@field prefix string? Element namespace prefix
|
||||
---@field name { local: string, uri: string, prefix: string }? Namespace-aware name info
|
||||
|
||||
---@class JSONNode
|
||||
---@field [string] WrappedPrimitive | JSONNode | JSONArray | string | number | boolean | nil JSON object fields
|
||||
---@alias JSONArray (WrappedPrimitive | JSONNode | string | number | boolean | nil)[]
|
||||
|
||||
---@class CSVRow
|
||||
---@field [integer] string Numeric indices for field access
|
||||
---@field Headers string[]? Header row if hasheader was true
|
||||
|
||||
--- Returns the minimum of two numbers
|
||||
---@param a number First number
|
||||
---@param b number Second number
|
||||
---@return number #Minimum value
|
||||
function min(a, b) end
|
||||
|
||||
--- Returns the maximum of two numbers
|
||||
---@param a number First number
|
||||
---@param b number Second number
|
||||
---@return number #Maximum value
|
||||
function max(a, b) end
|
||||
|
||||
--- Rounds a number to n decimal places
|
||||
---@param x number Number to round
|
||||
---@param n number? Number of decimal places (default: 0)
|
||||
---@return number #Rounded number
|
||||
function round(x, n) end
|
||||
|
||||
--- Returns the floor of a number
|
||||
---@param x number Number to floor
|
||||
---@return number #Floored number
|
||||
function floor(x) end
|
||||
|
||||
--- Returns the ceiling of a number
|
||||
---@param x number Number to ceil
|
||||
---@return number #Ceiled number
|
||||
function ceil(x) end
|
||||
|
||||
--- Converts string to uppercase
|
||||
---@param s string String to convert
|
||||
---@return string #Uppercase string
|
||||
function upper(s) end
|
||||
|
||||
--- Converts string to lowercase
|
||||
---@param s string String to convert
|
||||
---@return string #Lowercase string
|
||||
function lower(s) end
|
||||
|
||||
--- Formats a string using Lua string.format
|
||||
---@param s string Format string
|
||||
---@param ... any Values to format
|
||||
---@return string #Formatted string
|
||||
function format(s, ...) end
|
||||
|
||||
--- Removes leading and trailing whitespace from string
|
||||
---@param s string String to trim
|
||||
---@return string #Trimmed string
|
||||
function trim(s) end
|
||||
|
||||
--- Splits a string by separator
|
||||
---@param inputstr string String to split
|
||||
---@param sep string? Separator pattern (default: whitespace)
|
||||
---@return string[] #Array of string parts
|
||||
function strsplit(inputstr, sep) end
|
||||
|
||||
--- Prints table structure recursively
|
||||
---@param table {[any]: any} Table to dump
|
||||
---@param depth number? Current depth (default: 0)
|
||||
function dump(table, depth) end
|
||||
|
||||
--- Validates options against a set of valid option keys.
|
||||
---@param options ParserOptions? The options table to validate
|
||||
function areOptionsValid(options) end
|
||||
|
||||
--- Parses CSV text into rows and fields using a minimal RFC 4180 state machine.
|
||||
--- Requirements/assumptions:<br>
|
||||
--- Input is a single string containing the entire CSV content.<br>
|
||||
--- Field separators are specified by delimiter option (default: comma).<br>
|
||||
--- Newlines between rows may be "\n" or "\r\n". "\r\n" is treated as one line break.<br>
|
||||
--- Fields may be quoted with double quotes (").<br>
|
||||
--- Inside quoted fields, doubled quotes ("") represent a literal quote character.<br>
|
||||
--- No backslash escaping is supported (not part of RFC 4180).<br>
|
||||
--- Newlines inside quoted fields are preserved as part of the field.<br>
|
||||
--- Leading/trailing spaces are preserved; no trimming is performed.<br>
|
||||
--- Empty fields and empty rows are preserved.<br>
|
||||
--- The final row is emitted even if the text does not end with a newline.<br>
|
||||
--- Lines starting with '#' (after optional leading whitespace) are treated as comments and skipped if hascomments is true.<br>
|
||||
---@param csv string The CSV text to parse.
|
||||
---@param options ParserOptions? Options for the parser
|
||||
---@return CSVRow[] #A table (array) of rows; each row is a table with numeric indices and optionally header-named keys.
|
||||
function fromCSV(csv, options) end
|
||||
|
||||
--- Converts a table of rows back to CSV text format (RFC 4180 compliant).<br>
|
||||
--- Requirements:<br>
|
||||
--- Input is a table (array) of rows, where each row is a table (array) of field values.<br>
|
||||
--- Field values are converted to strings using tostring().<br>
|
||||
--- Fields are quoted if they contain the delimiter, newlines, or double quotes.<br>
|
||||
--- Double quotes inside quoted fields are doubled ("").<br>
|
||||
--- Fields are joined with the specified delimiter; rows are joined with newlines.<br>
|
||||
--- If includeHeaders is true and rows have a Headers field, headers are included as the first row.<br>
|
||||
---@param rows CSVRow[] Array of rows, where each row is an array of field values.
|
||||
---@param options ParserOptions? Options for the parser
|
||||
---@return string #CSV-formatted text
|
||||
function toCSV(rows, options) end
|
||||
|
||||
--- Converts string to number, returns 0 if invalid
|
||||
---@param str string String to convert
|
||||
---@return number #Numeric value or 0
|
||||
function num(str) end
|
||||
|
||||
--- Converts number to string
|
||||
---@param num number Number to convert
|
||||
---@return string #String representation
|
||||
function str(num) end
|
||||
|
||||
--- Checks if string is numeric
|
||||
---@param str string String to check
|
||||
---@return boolean #True if string is numeric
|
||||
function is_number(str) end
|
||||
|
||||
--- Checks if table is a sequential array (1-indexed with no gaps)
|
||||
---@param t {[integer]: any} Table to check
|
||||
---@return boolean #True if table is an array
|
||||
function isArray(t) end
|
||||
|
||||
--- Find all elements with a specific tag name (recursive search)
|
||||
---@param root XMLElement The root XML element (with _tag, _attr, _children fields)
|
||||
---@param tagName string The tag name to search for
|
||||
---@return XMLElement[] #Array of matching elements
|
||||
function findElements(root, tagName) end
|
||||
|
||||
--- Visit all elements recursively and call a function on each
|
||||
---@param root XMLElement The root XML element
|
||||
---@param callback fun(element: XMLElement, depth: number, path: string) Function to call with each element
|
||||
function visitElements(root, callback) end
|
||||
|
||||
--- Get numeric value from XML element attribute
|
||||
---@param element XMLElement XML element with _attr field
|
||||
---@param attrName string Attribute name
|
||||
---@return number? #The numeric value or nil if not found/not numeric
|
||||
function getNumAttr(element, attrName) end
|
||||
|
||||
--- Set numeric value to XML element attribute
|
||||
---@param element XMLElement XML element with _attr field
|
||||
---@param attrName string Attribute name
|
||||
---@param value number Numeric value to set
|
||||
function setNumAttr(element, attrName, value) end
|
||||
|
||||
--- Modify numeric attribute by applying a function
|
||||
---@param element XMLElement XML element
|
||||
---@param attrName string Attribute name
|
||||
---@param func fun(currentValue: number): number Function that takes current value and returns new value
|
||||
---@return boolean #True if modification was made
|
||||
function modifyNumAttr(element, attrName, func) end
|
||||
|
||||
--- Find all elements matching a predicate function
|
||||
---@param root XMLElement The root XML element
|
||||
---@param predicate fun(element: XMLElement): boolean Function that takes element and returns true/false
|
||||
---@return XMLElement[] #Array of matching elements
|
||||
function filterElements(root, predicate) end
|
||||
|
||||
--- Get text content of an element
|
||||
---@param element XMLElement XML element
|
||||
---@return string? #The text content or nil
|
||||
function getText(element) end
|
||||
|
||||
--- Set text content of an element
|
||||
---@param element XMLElement XML element
|
||||
---@param text string Text content to set
|
||||
function setText(element, text) end
|
||||
|
||||
--- Check if element has an attribute
|
||||
---@param element XMLElement XML element
|
||||
---@param attrName string Attribute name
|
||||
---@return boolean #True if attribute exists
|
||||
function hasAttr(element, attrName) end
|
||||
|
||||
--- Get attribute value as string
|
||||
---@param element XMLElement XML element
|
||||
---@param attrName string Attribute name
|
||||
---@return string? #The attribute value or nil
|
||||
function getAttr(element, attrName) end
|
||||
|
||||
--- Set attribute value
|
||||
---@param element XMLElement XML element
|
||||
---@param attrName string Attribute name
|
||||
---@param value string | number | boolean Value to set (will be converted to string)
|
||||
function setAttr(element, attrName, value) end
|
||||
|
||||
--- Find first element with a specific tag name (searches direct children only)
|
||||
---@param parent XMLElement The parent XML element
|
||||
---@param tagName string The tag name to search for
|
||||
---@return XMLElement? #The first matching element or nil
|
||||
function findFirstElement(parent, tagName) end
|
||||
|
||||
--- Add a child element to a parent
|
||||
---@param parent XMLElement The parent XML element
|
||||
---@param child XMLElement The child element to add
|
||||
function addChild(parent, child) end
|
||||
|
||||
--- Remove all children with a specific tag name
|
||||
---@param parent XMLElement The parent XML element
|
||||
---@param tagName string The tag name to remove
|
||||
---@return number #Count of removed children
|
||||
function removeChildren(parent, tagName) end
|
||||
|
||||
--- Get all direct children with a specific tag name
|
||||
---@param parent XMLElement The parent XML element
|
||||
---@param tagName string The tag name to search for
|
||||
---@return XMLElement[] #Array of matching children
|
||||
function getChildren(parent, tagName) end
|
||||
|
||||
--- Count children with a specific tag name
|
||||
---@param parent XMLElement The parent XML element
|
||||
---@param tagName string The tag name to count
|
||||
---@return number #Count of matching children
|
||||
function countChildren(parent, tagName) end
|
||||
|
||||
--- Run an XPath expression from a context node or node list
|
||||
---@param node XMLElement | XMLElement[] Context node (use data for root)
|
||||
---@param expr string XPath expression
|
||||
---@return XMLElement[] #Matched XML elements
|
||||
function xpath(node, expr) end
|
||||
|
||||
--- Remove XML nodes matched by an XPath expression
|
||||
---@param node XMLElement | XMLElement[] Context node (use data for root)
|
||||
---@param expr string XPath expression
|
||||
---@return number #Count of removed XML nodes
|
||||
function xpathrm(node, expr) end
|
||||
|
||||
--- Run a JSONPath expression from a context node or node list
|
||||
---@param node JSONNode | JSONArray | (JSONNode | JSONArray | string | number | boolean | nil)[] Context node (use data for root)
|
||||
---@param expr string JSONPath expression
|
||||
---@return (string | number | boolean | nil | JSONNode | JSONArray)[] #Matched JSON values
|
||||
function jpath(node, expr) end
|
||||
|
||||
--- Remove JSON nodes matched by a JSONPath expression
|
||||
---@param node JSONNode | JSONArray | (JSONNode | JSONArray | string | number | boolean | nil)[] Context node (use data for root)
|
||||
---@param expr string JSONPath expression
|
||||
---@return number #Count of removed JSON nodes
|
||||
function jpathrm(node, expr) end
|
||||
|
||||
--- Recursively visit all values in a JSON structure
|
||||
---@param data JSONNode | JSONArray JSON data (nested tables)
|
||||
---@param callback fun(value: string | number | boolean | nil | JSONNode | JSONArray, key: string?, parent: JSONNode?): nil Function called with (value, key, parent)
|
||||
function visitJSON(data, callback) end
|
||||
|
||||
--- Find all values in JSON matching a predicate
|
||||
---@param data JSONNode | JSONArray JSON data
|
||||
---@param predicate fun(value: string | number | boolean | nil | JSONNode | JSONArray, key: string?, parent: JSONNode?): boolean Function that takes (value, key, parent) and returns true/false
|
||||
---@return (string | number | boolean | nil | JSONNode | JSONArray)[] #Array of matching values
|
||||
function findInJSON(data, predicate) end
|
||||
|
||||
--- Modify all numeric values in JSON matching a condition
|
||||
---@param data JSONNode | JSONArray JSON data
|
||||
---@param predicate fun(value: string | number | boolean | nil | JSONNode | JSONArray, key: string?, parent: JSONNode?): boolean Function that takes (value, key, parent) and returns true/false
|
||||
---@param modifier fun(currentValue: number): number Function that takes current value and returns new value
|
||||
function modifyJSONNumbers(data, predicate, modifier) end
|
||||
28
processor/meta_test.go
Normal file
28
processor/meta_test.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGenerateMetaFile(t *testing.T) {
|
||||
tmp := t.TempDir()
|
||||
path := filepath.Join(tmp, "meta.lua")
|
||||
|
||||
err := GenerateMetaFile(path)
|
||||
require.NoError(t, err)
|
||||
|
||||
content, err := os.ReadFile(path)
|
||||
require.NoError(t, err)
|
||||
assert.Contains(t, string(content), "---@meta")
|
||||
assert.Contains(t, string(content), "function xpath")
|
||||
}
|
||||
|
||||
func TestGenerateMetaFileWriteError(t *testing.T) {
|
||||
err := GenerateMetaFile(filepath.Join("missing-dir", "meta.lua"))
|
||||
assert.Error(t, err)
|
||||
}
|
||||
@@ -1,155 +1,235 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
|
||||
"modify/logger"
|
||||
)
|
||||
|
||||
//go:embed luahelper.lua
|
||||
var helperScript string
|
||||
|
||||
// Maybe we make this an interface again for the shits and giggles
|
||||
// We will see, it could easily be...
|
||||
|
||||
var globalVariables = map[string]interface{}{}
|
||||
|
||||
func SetVariables(vars map[string]interface{}) {
|
||||
for k, v := range vars {
|
||||
globalVariables[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
func NewLuaState() (*lua.LState, error) {
|
||||
newLStateLogger := logger.WithPrefix("NewLuaState")
|
||||
newLStateLogger.Debug("Creating new Lua state")
|
||||
L := lua.NewState()
|
||||
// defer L.Close()
|
||||
|
||||
// Load math library
|
||||
logger.Debug("Loading Lua math library")
|
||||
newLStateLogger.Debug("Loading Lua math library")
|
||||
L.Push(L.GetGlobal("require"))
|
||||
L.Push(lua.LString("math"))
|
||||
if err := L.PCall(1, 1, nil); err != nil {
|
||||
logger.Error("Failed to load Lua math library: %v", err)
|
||||
newLStateLogger.Error("Failed to load Lua math library: %v", err)
|
||||
return nil, fmt.Errorf("error loading Lua math library: %v", err)
|
||||
}
|
||||
newLStateLogger.Debug("Lua math library loaded")
|
||||
|
||||
// Initialize helper functions
|
||||
logger.Debug("Initializing Lua helper functions")
|
||||
newLStateLogger.Debug("Initializing Lua helper functions")
|
||||
if err := InitLuaHelpers(L); err != nil {
|
||||
logger.Error("Failed to initialize Lua helper functions: %v", err)
|
||||
newLStateLogger.Error("Failed to initialize Lua helper functions: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
newLStateLogger.Debug("Lua helper functions initialized")
|
||||
|
||||
// Inject global variables
|
||||
if len(globalVariables) > 0 {
|
||||
newLStateLogger.Debug("Injecting %d global variables into Lua state", len(globalVariables))
|
||||
for k, v := range globalVariables {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case int64:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case float32:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case float64:
|
||||
L.SetGlobal(k, lua.LNumber(val))
|
||||
case string:
|
||||
L.SetGlobal(k, lua.LString(val))
|
||||
case bool:
|
||||
if val {
|
||||
L.SetGlobal(k, lua.LTrue)
|
||||
} else {
|
||||
L.SetGlobal(k, lua.LFalse)
|
||||
}
|
||||
default:
|
||||
// Fallback to string representation
|
||||
L.SetGlobal(k, lua.LString(fmt.Sprintf("%v", val)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newLStateLogger.Debug("New Lua state created successfully")
|
||||
return L, nil
|
||||
}
|
||||
|
||||
// func Process(filename string, pattern string, luaExpr string) (int, int, error) {
|
||||
// logger.Debug("Processing file %q with pattern %q", filename, pattern)
|
||||
//
|
||||
// // Read file content
|
||||
// cwd, err := os.Getwd()
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to get current working directory: %v", err)
|
||||
// return 0, 0, fmt.Errorf("error getting current working directory: %v", err)
|
||||
// }
|
||||
//
|
||||
// fullPath := filepath.Join(cwd, filename)
|
||||
// logger.Trace("Reading file from: %s", fullPath)
|
||||
//
|
||||
// stat, err := os.Stat(fullPath)
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to stat file %s: %v", fullPath, err)
|
||||
// return 0, 0, fmt.Errorf("error getting file info: %v", err)
|
||||
// }
|
||||
// logger.Debug("File size: %d bytes, modified: %s", stat.Size(), stat.ModTime().Format(time.RFC3339))
|
||||
//
|
||||
// content, err := os.ReadFile(fullPath)
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to read file %s: %v", fullPath, err)
|
||||
// return 0, 0, fmt.Errorf("error reading file: %v", err)
|
||||
// }
|
||||
//
|
||||
// fileContent := string(content)
|
||||
// logger.Trace("File read successfully: %d bytes, hash: %x", len(content), md5sum(content))
|
||||
//
|
||||
// // Detect and log file type
|
||||
// fileType := detectFileType(filename, fileContent)
|
||||
// if fileType != "" {
|
||||
// logger.Debug("Detected file type: %s", fileType)
|
||||
// }
|
||||
//
|
||||
// // Process the content
|
||||
// logger.Debug("Starting content processing")
|
||||
// modifiedContent, modCount, matchCount, err := ProcessContent(fileContent, pattern, luaExpr)
|
||||
// if err != nil {
|
||||
// logger.Error("Processing error: %v", err)
|
||||
// return 0, 0, err
|
||||
// }
|
||||
//
|
||||
// logger.Debug("Processing results: %d matches, %d modifications", matchCount, modCount)
|
||||
//
|
||||
// // If we made modifications, save the file
|
||||
// if modCount > 0 {
|
||||
// // Calculate changes summary
|
||||
// changePercent := float64(len(modifiedContent)) / float64(len(fileContent)) * 100
|
||||
// logger.Info("File size change: %d → %d bytes (%.1f%%)",
|
||||
// len(fileContent), len(modifiedContent), changePercent)
|
||||
//
|
||||
// logger.Debug("Writing modified content to %s", fullPath)
|
||||
// err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
|
||||
// if err != nil {
|
||||
// logger.Error("Failed to write to file %s: %v", fullPath, err)
|
||||
// return 0, 0, fmt.Errorf("error writing file: %v", err)
|
||||
// }
|
||||
// logger.Debug("File written successfully, new hash: %x", md5sum([]byte(modifiedContent)))
|
||||
// } else if matchCount > 0 {
|
||||
// logger.Debug("No content modifications needed for %d matches", matchCount)
|
||||
// } else {
|
||||
// logger.Debug("No matches found in file")
|
||||
// }
|
||||
//
|
||||
// return modCount, matchCount, nil
|
||||
// }
|
||||
|
||||
// FromLua converts a Lua table to a struct or map recursively
|
||||
func FromLua(L *lua.LState, luaValue lua.LValue) (interface{}, error) {
|
||||
fromLuaLogger := logger.WithPrefix("FromLua").WithField("luaType", luaValue.Type().String())
|
||||
fromLuaLogger.Debug("Converting Lua value to Go interface")
|
||||
switch v := luaValue.(type) {
|
||||
// Well shit...
|
||||
// Tables in lua are both maps and arrays
|
||||
// As arrays they are ordered and as maps, obviously, not
|
||||
// So when we parse them to a go map we fuck up the order for arrays
|
||||
// We have to find a better way....
|
||||
case *lua.LTable:
|
||||
if wrapped, ok := resolveWrappedLuaPrimitive(v); ok {
|
||||
return wrapped, nil
|
||||
}
|
||||
fromLuaLogger.Debug("Processing Lua table")
|
||||
isArray, err := IsLuaTableArray(L, v)
|
||||
if err != nil {
|
||||
fromLuaLogger.Error("Failed to determine if Lua table is array: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
fromLuaLogger.Debug("Lua table is array: %t", isArray)
|
||||
if isArray {
|
||||
fromLuaLogger.Debug("Converting Lua table to Go array")
|
||||
result := make([]interface{}, 0)
|
||||
v.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
converted, _ := FromLua(L, value)
|
||||
result = append(result, converted)
|
||||
})
|
||||
fromLuaLogger.Trace("Converted Go array: %v", result)
|
||||
return result, nil
|
||||
} else {
|
||||
fromLuaLogger.Debug("Converting Lua table to Go map")
|
||||
result := make(map[string]interface{})
|
||||
v.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
converted, _ := FromLua(L, value)
|
||||
result[key.String()] = converted
|
||||
})
|
||||
fromLuaLogger.Trace("Converted Go map: %v", result)
|
||||
return result, nil
|
||||
}
|
||||
case lua.LString:
|
||||
fromLuaLogger.Debug("Converting Lua string to Go string")
|
||||
fromLuaLogger.Trace("Lua string: %q", string(v))
|
||||
return string(v), nil
|
||||
case lua.LBool:
|
||||
fromLuaLogger.Debug("Converting Lua boolean to Go boolean")
|
||||
fromLuaLogger.Trace("Lua boolean: %t", bool(v))
|
||||
return bool(v), nil
|
||||
case lua.LNumber:
|
||||
fromLuaLogger.Debug("Converting Lua number to Go float64")
|
||||
fromLuaLogger.Trace("Lua number: %f", float64(v))
|
||||
return float64(v), nil
|
||||
default:
|
||||
fromLuaLogger.Debug("Unsupported Lua type, returning nil")
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
func resolveWrappedLuaPrimitive(table *lua.LTable) (interface{}, bool) {
|
||||
if table == nil {
|
||||
return nil, false
|
||||
}
|
||||
marker := table.RawGetString("__chef_wrapped")
|
||||
if marker.Type() != lua.LTBool || !lua.LVAsBool(marker) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
currentVal := table.RawGetString("val")
|
||||
currentRaw := table.RawGetString("raw")
|
||||
originalVal := table.RawGetString("__orig_val")
|
||||
originalRaw := table.RawGetString("__orig_raw")
|
||||
|
||||
valChanged := !luaScalarEqual(currentVal, originalVal)
|
||||
rawChanged := !luaScalarEqual(currentRaw, originalRaw)
|
||||
valDeleted := currentVal == nil || currentVal.Type() == lua.LTNil
|
||||
rawDeleted := currentRaw == nil || currentRaw.Type() == lua.LTNil
|
||||
|
||||
if valDeleted && !rawChanged {
|
||||
return nil, true
|
||||
}
|
||||
if rawDeleted && !valChanged {
|
||||
return nil, true
|
||||
}
|
||||
|
||||
if valChanged && !valDeleted {
|
||||
if rawChanged && !rawDeleted && !luaScalarEqual(currentVal, currentRaw) {
|
||||
logger.Warning("Primitive wrapper val and raw both modified; using val")
|
||||
}
|
||||
return luaPrimitiveToGo(currentVal), true
|
||||
}
|
||||
if rawChanged && !rawDeleted {
|
||||
return luaPrimitiveToGo(currentRaw), true
|
||||
}
|
||||
if !rawDeleted {
|
||||
return luaPrimitiveToGo(currentRaw), true
|
||||
}
|
||||
if !valDeleted {
|
||||
return luaPrimitiveToGo(currentVal), true
|
||||
}
|
||||
return nil, true
|
||||
}
|
||||
|
||||
func luaPrimitiveToGo(value lua.LValue) interface{} {
|
||||
if value == nil || value.Type() == lua.LTNil {
|
||||
return nil
|
||||
}
|
||||
switch v := value.(type) {
|
||||
case lua.LString:
|
||||
return string(v)
|
||||
case lua.LBool:
|
||||
return bool(v)
|
||||
case lua.LNumber:
|
||||
return float64(v)
|
||||
default:
|
||||
return value.String()
|
||||
}
|
||||
}
|
||||
|
||||
func PrepareLuaDataForCommand(L *lua.LState, rawMode bool) error {
|
||||
prep := L.GetGlobal("__chef_prepare_data")
|
||||
fn, ok := prep.(*lua.LFunction)
|
||||
if !ok {
|
||||
return fmt.Errorf("internal error: __chef_prepare_data is not available")
|
||||
}
|
||||
data := L.GetGlobal("data")
|
||||
if err := L.CallByParam(lua.P{
|
||||
Fn: fn,
|
||||
NRet: 1,
|
||||
Protect: true,
|
||||
}, data, lua.LBool(rawMode)); err != nil {
|
||||
return fmt.Errorf("failed to prepare Lua data: %w", err)
|
||||
}
|
||||
prepared := L.Get(-1)
|
||||
L.Pop(1)
|
||||
L.SetGlobal("data", prepared)
|
||||
return nil
|
||||
}
|
||||
|
||||
func IsLuaTableArray(L *lua.LState, v *lua.LTable) (bool, error) {
|
||||
logger.Trace("Checking if Lua table is an array")
|
||||
isLuaTableArrayLogger := logger.WithPrefix("IsLuaTableArray")
|
||||
isLuaTableArrayLogger.Debug("Checking if Lua table is an array")
|
||||
isLuaTableArrayLogger.Trace("Lua table input: %v", v)
|
||||
L.SetGlobal("table_to_check", v)
|
||||
|
||||
// Use our predefined helper function from InitLuaHelpers
|
||||
err := L.DoString(`is_array = isArray(table_to_check)`)
|
||||
if err != nil {
|
||||
logger.Error("Error determining if table is an array: %v", err)
|
||||
isLuaTableArrayLogger.Error("Error determining if table is an array: %v", err)
|
||||
return false, fmt.Errorf("error determining if table is array: %w", err)
|
||||
}
|
||||
|
||||
@@ -157,111 +237,33 @@ func IsLuaTableArray(L *lua.LState, v *lua.LTable) (bool, error) {
|
||||
isArray := L.GetGlobal("is_array")
|
||||
// LVIsFalse returns true if a given LValue is a nil or false otherwise false.
|
||||
result := !lua.LVIsFalse(isArray)
|
||||
logger.Trace("Lua table is array: %v", result)
|
||||
isLuaTableArrayLogger.Debug("Lua table is array: %t", result)
|
||||
isLuaTableArrayLogger.Trace("isArray result Lua value: %v", isArray)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// InitLuaHelpers initializes common Lua helper functions
|
||||
func InitLuaHelpers(L *lua.LState) error {
|
||||
logger.Debug("Loading Lua helper functions")
|
||||
initLuaHelpersLogger := logger.WithPrefix("InitLuaHelpers")
|
||||
initLuaHelpersLogger.Debug("Loading Lua helper functions")
|
||||
|
||||
helperScript := `
|
||||
-- Custom Lua helpers for math operations
|
||||
function min(a, b) return math.min(a, b) end
|
||||
function max(a, b) return math.max(a, b) end
|
||||
function round(x, n)
|
||||
if n == nil then n = 0 end
|
||||
return math.floor(x * 10^n + 0.5) / 10^n
|
||||
end
|
||||
function floor(x) return math.floor(x) end
|
||||
function ceil(x) return math.ceil(x) end
|
||||
function upper(s) return string.upper(s) end
|
||||
function lower(s) return string.lower(s) end
|
||||
function format(s, ...) return string.format(s, ...) end
|
||||
|
||||
-- String split helper
|
||||
function strsplit(inputstr, sep)
|
||||
if sep == nil then
|
||||
sep = "%s"
|
||||
end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
---@param table table
|
||||
---@param depth number?
|
||||
function DumpTable(table, depth)
|
||||
if depth == nil then
|
||||
depth = 0
|
||||
end
|
||||
if (depth > 200) then
|
||||
print("Error: Depth > 200 in dumpTable()")
|
||||
return
|
||||
end
|
||||
for k, v in pairs(table) do
|
||||
if (type(v) == "table") then
|
||||
print(string.rep(" ", depth) .. k .. ":")
|
||||
DumpTable(v, depth + 1)
|
||||
else
|
||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- String to number conversion helper
|
||||
function num(str)
|
||||
return tonumber(str) or 0
|
||||
end
|
||||
|
||||
-- Number to string conversion
|
||||
function str(num)
|
||||
return tostring(num)
|
||||
end
|
||||
|
||||
-- Check if string is numeric
|
||||
function is_number(str)
|
||||
return tonumber(str) ~= nil
|
||||
end
|
||||
|
||||
function isArray(t)
|
||||
if type(t) ~= "table" then return false end
|
||||
local max = 0
|
||||
local count = 0
|
||||
for k, _ in pairs(t) do
|
||||
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then
|
||||
return false
|
||||
end
|
||||
max = math.max(max, k)
|
||||
count = count + 1
|
||||
end
|
||||
return max == count
|
||||
end
|
||||
|
||||
modified = false
|
||||
`
|
||||
if err := L.DoString(helperScript); err != nil {
|
||||
logger.Error("Failed to load Lua helper functions: %v", err)
|
||||
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
|
||||
return fmt.Errorf("error loading helper functions: %v", err)
|
||||
}
|
||||
initLuaHelpersLogger.Debug("Lua helper functions loaded")
|
||||
|
||||
logger.Debug("Setting up Lua print function to Go")
|
||||
initLuaHelpersLogger.Debug("Setting up Lua print function to Go")
|
||||
L.SetGlobal("print", L.NewFunction(printToGo))
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
L.SetGlobal("re", L.NewFunction(EvalRegex))
|
||||
initLuaHelpersLogger.Debug("Lua print/fetch/regex functions bound to Go")
|
||||
return nil
|
||||
}
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
func LimitString(s string, maxLen int) string {
|
||||
s = strings.ReplaceAll(s, "\n", "\\n")
|
||||
if len(s) <= maxLen {
|
||||
return s
|
||||
}
|
||||
return s[:maxLen-3] + "..."
|
||||
}
|
||||
|
||||
func PrependLuaAssignment(luaExpr string) string {
|
||||
prependLuaAssignmentLogger := logger.WithPrefix("PrependLuaAssignment").WithField("originalLuaExpr", luaExpr)
|
||||
prependLuaAssignmentLogger.Debug("Prepending Lua assignment if necessary")
|
||||
// Auto-prepend v1 for expressions starting with operators
|
||||
if strings.HasPrefix(luaExpr, "*") ||
|
||||
strings.HasPrefix(luaExpr, "/") ||
|
||||
@@ -270,30 +272,83 @@ func PrependLuaAssignment(luaExpr string) string {
|
||||
strings.HasPrefix(luaExpr, "^") ||
|
||||
strings.HasPrefix(luaExpr, "%") {
|
||||
luaExpr = "v1 = v1" + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1 = v1' due to operator prefix")
|
||||
} else if strings.HasPrefix(luaExpr, "=") {
|
||||
// Handle direct assignment with = operator
|
||||
luaExpr = "v1 " + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1' due to direct assignment operator")
|
||||
}
|
||||
|
||||
// Add assignment if needed
|
||||
if !strings.Contains(luaExpr, "=") {
|
||||
luaExpr = "v1 = " + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1 =' as no assignment was found")
|
||||
}
|
||||
prependLuaAssignmentLogger.Trace("Final Lua expression after prepending: %q", luaExpr)
|
||||
return luaExpr
|
||||
}
|
||||
|
||||
// LoadExternalLuaFile loads Lua code from an external file
|
||||
func LoadExternalLuaFile(luaPath string, sourceDir string) (string, error) {
|
||||
loadLuaLogger := logger.WithPrefix("LoadExternalLuaFile").WithField("luaPath", luaPath).WithField("sourceDir", sourceDir)
|
||||
loadLuaLogger.Debug("Loading external Lua file")
|
||||
|
||||
// Resolve path: if relative, resolve relative to sourceDir; if absolute, use as-is
|
||||
var resolvedPath string
|
||||
if filepath.IsAbs(luaPath) {
|
||||
resolvedPath = luaPath
|
||||
} else {
|
||||
if sourceDir == "" {
|
||||
// No source directory, use current working directory
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
loadLuaLogger.Error("Failed to get current working directory: %v", err)
|
||||
return "", fmt.Errorf("failed to get current working directory: %w", err)
|
||||
}
|
||||
resolvedPath = filepath.Join(cwd, luaPath)
|
||||
} else {
|
||||
resolvedPath = filepath.Join(sourceDir, luaPath)
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize path
|
||||
resolvedPath = filepath.Clean(resolvedPath)
|
||||
loadLuaLogger.Debug("Resolved Lua file path: %q", resolvedPath)
|
||||
|
||||
// Read the file
|
||||
content, err := os.ReadFile(resolvedPath)
|
||||
if err != nil {
|
||||
loadLuaLogger.Error("Failed to read Lua file %q: %v", resolvedPath, err)
|
||||
return "", fmt.Errorf("failed to read Lua file %q: %w", luaPath, err)
|
||||
}
|
||||
|
||||
loadLuaLogger.Debug("Successfully loaded %d bytes from Lua file %q", len(content), resolvedPath)
|
||||
return string(content), nil
|
||||
}
|
||||
|
||||
// BuildLuaScript prepares a Lua expression from shorthand notation
|
||||
func BuildLuaScript(luaExpr string) string {
|
||||
logger.Debug("Building Lua script from expression: %s", luaExpr)
|
||||
func BuildLuaScript(luaExpr string, sourceDir string) string {
|
||||
buildLuaScriptLogger := logger.WithPrefix("BuildLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||
buildLuaScriptLogger.Debug("Building full Lua script from expression")
|
||||
|
||||
// Check if this is an external Lua file reference
|
||||
if strings.HasPrefix(luaExpr, "@") {
|
||||
luaPath := strings.TrimPrefix(luaExpr, "@")
|
||||
externalLua, err := LoadExternalLuaFile(luaPath, sourceDir)
|
||||
if err != nil {
|
||||
buildLuaScriptLogger.Error("Failed to load external Lua file: %v", err)
|
||||
// Return error script that will fail at runtime
|
||||
return fmt.Sprintf(`error("Failed to load external Lua file: %v")`, err)
|
||||
}
|
||||
luaExpr = externalLua
|
||||
buildLuaScriptLogger.Debug("Loaded external Lua file, %d characters", len(luaExpr))
|
||||
}
|
||||
|
||||
// Perform $var substitutions from globalVariables
|
||||
luaExpr = replaceVariables(luaExpr)
|
||||
|
||||
luaExpr = PrependLuaAssignment(luaExpr)
|
||||
|
||||
// This allows the user to specify whether or not they modified a value
|
||||
// If they do nothing we assume they did modify (no return at all)
|
||||
// If they return before our return then they themselves specify what they did
|
||||
// If nothing is returned lua assumes nil
|
||||
// So we can say our value was modified if the return value is either nil or true
|
||||
// If the return value is false then the user wants to keep the original
|
||||
fullScript := fmt.Sprintf(`
|
||||
function run()
|
||||
%s
|
||||
@@ -301,11 +356,73 @@ func BuildLuaScript(luaExpr string) string {
|
||||
local res = run()
|
||||
modified = res == nil or res
|
||||
`, luaExpr)
|
||||
buildLuaScriptLogger.Trace("Generated full Lua script: %q", utils.LimitString(fullScript, 200))
|
||||
|
||||
return fullScript
|
||||
}
|
||||
|
||||
// BuildJSONLuaScript prepares a Lua expression for JSON mode
|
||||
func BuildJSONLuaScript(luaExpr string, sourceDir string) string {
|
||||
buildJSONLuaScriptLogger := logger.WithPrefix("BuildJSONLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||
buildJSONLuaScriptLogger.Debug("Building full Lua script for JSON mode from expression")
|
||||
|
||||
// Check if this is an external Lua file reference
|
||||
if strings.HasPrefix(luaExpr, "@") {
|
||||
luaPath := strings.TrimPrefix(luaExpr, "@")
|
||||
externalLua, err := LoadExternalLuaFile(luaPath, sourceDir)
|
||||
if err != nil {
|
||||
buildJSONLuaScriptLogger.Error("Failed to load external Lua file: %v", err)
|
||||
// Return error script that will fail at runtime
|
||||
return fmt.Sprintf(`error("Failed to load external Lua file: %v")`, err)
|
||||
}
|
||||
luaExpr = externalLua
|
||||
buildJSONLuaScriptLogger.Debug("Loaded external Lua file, %d characters", len(luaExpr))
|
||||
}
|
||||
|
||||
// Perform $var substitutions from globalVariables
|
||||
luaExpr = replaceVariables(luaExpr)
|
||||
|
||||
fullScript := fmt.Sprintf(`
|
||||
function run()
|
||||
%s
|
||||
end
|
||||
local res = run()
|
||||
modified = res == nil or res
|
||||
`, luaExpr)
|
||||
buildJSONLuaScriptLogger.Trace("Generated full JSON Lua script: %q", utils.LimitString(fullScript, 200))
|
||||
|
||||
return fullScript
|
||||
}
|
||||
|
||||
func replaceVariables(expr string) string {
|
||||
// $varName -> literal value
|
||||
varNameRe := regexp.MustCompile(`\$(\w+)`)
|
||||
return varNameRe.ReplaceAllStringFunc(expr, func(m string) string {
|
||||
name := varNameRe.FindStringSubmatch(m)[1]
|
||||
if v, ok := globalVariables[name]; ok {
|
||||
switch val := v.(type) {
|
||||
case int, int64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case bool:
|
||||
if val {
|
||||
return "true"
|
||||
} else {
|
||||
return "false"
|
||||
}
|
||||
case string:
|
||||
// Quote strings for Lua literal
|
||||
return fmt.Sprintf("%q", val)
|
||||
default:
|
||||
return fmt.Sprintf("%q", fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return m
|
||||
})
|
||||
}
|
||||
|
||||
func printToGo(L *lua.LState) int {
|
||||
printToGoLogger := logger.WithPrefix("printToGo")
|
||||
printToGoLogger.Debug("Lua print function called, redirecting to Go logger")
|
||||
top := L.GetTop()
|
||||
|
||||
args := make([]interface{}, top)
|
||||
@@ -319,8 +436,274 @@ func printToGo(L *lua.LState) int {
|
||||
parts = append(parts, fmt.Sprintf("%v", arg))
|
||||
}
|
||||
message := strings.Join(parts, " ")
|
||||
printToGoLogger.Trace("Lua print message: %q", message)
|
||||
|
||||
// Use the LUA log level with a script tag
|
||||
logger.Lua("%s", message)
|
||||
printToGoLogger.Debug("Message logged from Lua")
|
||||
return 0
|
||||
}
|
||||
|
||||
func fetch(L *lua.LState) int {
|
||||
fetchLogger := logger.WithPrefix("fetch")
|
||||
fetchLogger.Debug("Lua fetch function called")
|
||||
// Get URL from first argument
|
||||
url := L.ToString(1)
|
||||
if url == "" {
|
||||
fetchLogger.Error("Fetch failed: URL is required")
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString("URL is required"))
|
||||
return 2
|
||||
}
|
||||
fetchLogger.Debug("Fetching URL: %q", url)
|
||||
|
||||
// Get options from second argument if provided
|
||||
var method = "GET"
|
||||
var headers = make(map[string]string)
|
||||
var body = ""
|
||||
|
||||
if L.GetTop() > 1 {
|
||||
options := L.ToTable(2)
|
||||
if options != nil {
|
||||
fetchLogger.Debug("Processing fetch options")
|
||||
// Get method
|
||||
if methodVal := options.RawGetString("method"); methodVal != lua.LNil {
|
||||
method = methodVal.String()
|
||||
fetchLogger.Trace("Method from options: %q", method)
|
||||
}
|
||||
|
||||
// Get headers
|
||||
if headersVal := options.RawGetString("headers"); headersVal != lua.LNil {
|
||||
if headersTable, ok := headersVal.(*lua.LTable); ok {
|
||||
fetchLogger.Trace("Processing headers table")
|
||||
headersTable.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
headers[key.String()] = value.String()
|
||||
fetchLogger.Trace("Header: %q = %q", key.String(), value.String())
|
||||
})
|
||||
}
|
||||
fetchLogger.Trace("All headers: %v", headers)
|
||||
}
|
||||
|
||||
// Get body
|
||||
if bodyVal := options.RawGetString("body"); bodyVal != lua.LNil {
|
||||
body = bodyVal.String()
|
||||
fetchLogger.Trace("Body from options: %q", utils.LimitString(body, 100))
|
||||
}
|
||||
}
|
||||
}
|
||||
fetchLogger.Debug("Fetch request details: Method=%q, URL=%q, BodyLength=%d, Headers=%v", method, url, len(body), headers)
|
||||
|
||||
// Create HTTP request
|
||||
req, err := http.NewRequest(method, url, strings.NewReader(body))
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error creating HTTP request: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error creating request: %v", err)))
|
||||
return 2
|
||||
}
|
||||
hostname := strings.ToLower(req.URL.Hostname())
|
||||
if strings.HasSuffix(hostname, ".local") && hostname != "localhost" {
|
||||
errMsg := fmt.Sprintf("refusing to fetch reserved .local host: %s", req.URL.Hostname())
|
||||
fetchLogger.Error("%s", errMsg)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(errMsg))
|
||||
return 2
|
||||
}
|
||||
|
||||
// Set headers
|
||||
for key, value := range headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
fetchLogger.Debug("HTTP request created and headers set")
|
||||
fetchLogger.Trace("HTTP Request: %+v", req)
|
||||
|
||||
// Make request
|
||||
transport := http.DefaultTransport.(*http.Transport).Clone()
|
||||
transport.Proxy = nil
|
||||
client := &http.Client{
|
||||
Transport: transport,
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error making HTTP request: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error making request: %v", err)))
|
||||
return 2
|
||||
}
|
||||
defer func() {
|
||||
fetchLogger.Debug("Closing HTTP response body")
|
||||
resp.Body.Close()
|
||||
}()
|
||||
fetchLogger.Debug("HTTP request executed. Status Code: %d", resp.StatusCode)
|
||||
|
||||
// Read response body
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error reading response body: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error reading response: %v", err)))
|
||||
return 2
|
||||
}
|
||||
fetchLogger.Trace("Response body length: %d", len(bodyBytes))
|
||||
|
||||
// Create response table
|
||||
responseTable := L.NewTable()
|
||||
responseTable.RawSetString("status", lua.LNumber(resp.StatusCode))
|
||||
responseTable.RawSetString("statusText", lua.LString(resp.Status))
|
||||
responseTable.RawSetString("ok", lua.LBool(resp.StatusCode >= 200 && resp.StatusCode < 300))
|
||||
responseTable.RawSetString("body", lua.LString(string(bodyBytes)))
|
||||
fetchLogger.Debug("Created Lua response table")
|
||||
|
||||
// Set headers in response
|
||||
headersTable := L.NewTable()
|
||||
for key, values := range resp.Header {
|
||||
headersTable.RawSetString(key, lua.LString(values[0]))
|
||||
fetchLogger.Trace("Response header: %q = %q", key, values[0])
|
||||
}
|
||||
responseTable.RawSetString("headers", headersTable)
|
||||
fetchLogger.Trace("Full response table: %v", responseTable)
|
||||
|
||||
L.Push(responseTable)
|
||||
L.Push(lua.LNil)
|
||||
fetchLogger.Debug("Pushed response table to Lua stack")
|
||||
return 2
|
||||
}
|
||||
|
||||
func EvalRegex(L *lua.LState) int {
|
||||
evalRegexLogger := logger.WithPrefix("evalRegex")
|
||||
evalRegexLogger.Debug("Lua evalRegex function called")
|
||||
|
||||
pattern := L.ToString(1)
|
||||
input := L.ToString(2)
|
||||
|
||||
evalRegexLogger.Debug("Pattern: %q, Input: %q", pattern, input)
|
||||
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatch(input)
|
||||
|
||||
evalRegexLogger.Debug("Go regex matches: %v (count: %d)", matches, len(matches))
|
||||
evalRegexLogger.Debug("Matches is nil: %t", matches == nil)
|
||||
|
||||
if len(matches) > 0 {
|
||||
matchesTable := L.NewTable()
|
||||
for i, match := range matches {
|
||||
matchesTable.RawSetInt(i+1, lua.LString(match))
|
||||
evalRegexLogger.Debug("Set table[%d] = %q", i+1, match)
|
||||
}
|
||||
L.Push(matchesTable)
|
||||
} else {
|
||||
L.Push(lua.LNil)
|
||||
}
|
||||
|
||||
evalRegexLogger.Debug("Pushed matches table to Lua stack")
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
// GetLuaFunctionsHelp returns a comprehensive help string for all available Lua functions
|
||||
func GetLuaFunctionsHelp() string {
|
||||
return `Lua Functions Available in Global Environment:
|
||||
|
||||
MATH FUNCTIONS:
|
||||
min(a, b) - Returns the minimum of two numbers
|
||||
max(a, b) - Returns the maximum of two numbers
|
||||
round(x, n) - Rounds x to n decimal places (default 0)
|
||||
floor(x) - Returns the floor of x
|
||||
ceil(x) - Returns the ceiling of x
|
||||
|
||||
STRING FUNCTIONS:
|
||||
upper(s) - Converts string to uppercase
|
||||
lower(s) - Converts string to lowercase
|
||||
format(s, ...) - Formats string using Lua string.format
|
||||
trim(s) - Removes leading/trailing whitespace
|
||||
strsplit(inputstr, sep) - Splits string by separator (default: whitespace)
|
||||
fromCSV(csv, options) - Parses CSV text into rows of fields
|
||||
options: {delimiter=",", hasheader=false, hascomments=false}
|
||||
toCSV(rows, options) - Converts table of rows to CSV text format
|
||||
options: {delimiter=",", hasheader=false}
|
||||
num(str) - Converts string to number (returns 0 if invalid)
|
||||
str(num) - Converts number to string
|
||||
is_number(str) - Returns true if string is numeric
|
||||
|
||||
TABLE FUNCTIONS:
|
||||
dump(table, depth) - Prints table structure recursively
|
||||
isArray(t) - Returns true if table is a sequential array
|
||||
|
||||
XML NODE SHAPE:
|
||||
data.tag - Element name
|
||||
data.text - Wrapped primitive with .val/.raw
|
||||
data.attr - Table of wrapped primitives with .val/.raw
|
||||
data.children - Array of child elements
|
||||
data.name.local - Element local name
|
||||
data.name.uri - Element namespace URI
|
||||
data.name.prefix - Element namespace prefix
|
||||
|
||||
XML QUERY:
|
||||
xpath(node, expr) - Runs XPath relative to node (or list of nodes)
|
||||
pass data as node for root-level queries
|
||||
xpathrm(node, expr) - Removes XML element nodes matched by XPath; returns removed count
|
||||
|
||||
JSON HELPER FUNCTIONS:
|
||||
jpath(node, expr) - Runs JSONPath relative to node (or list of nodes)
|
||||
pass data as node for root-level queries
|
||||
jpathrm(node, expr) - Removes JSON nodes matched by JSONPath; returns removed count
|
||||
visitJSON(data, callback) - Visit all values in JSON structure
|
||||
callback(value, key, parent)
|
||||
findInJSON(data, predicate) - Find values matching condition
|
||||
predicate(value, key, parent) returns true/false
|
||||
modifyJSONNumbers(data, predicate, modifier) - Modify numeric values
|
||||
predicate(value, key, parent) returns true/false
|
||||
modifier(currentValue) returns newValue
|
||||
|
||||
HTTP FUNCTIONS:
|
||||
fetch(url, options) - Makes HTTP request, returns response table
|
||||
options: {method="GET", headers={}, body=""}
|
||||
returns: {status, statusText, ok, body, headers}
|
||||
|
||||
REGEX FUNCTIONS:
|
||||
re(pattern, input) - Applies regex pattern to input string
|
||||
returns: table with matches (index 1 = full match, 2+ = groups)
|
||||
|
||||
UTILITY FUNCTIONS:
|
||||
print(...) - Prints arguments to Go logger
|
||||
printf(fmt, ...) - Equivalent to print(string.format(fmt, ...))
|
||||
|
||||
EXAMPLES:
|
||||
-- Math
|
||||
round(3.14159, 2) -> 3.14
|
||||
min(5, 3) -> 3
|
||||
|
||||
-- String
|
||||
strsplit("a,b,c", ",") -> {"a", "b", "c"}
|
||||
upper("hello") -> "HELLO"
|
||||
num("123") -> 123
|
||||
|
||||
-- JSON (where data is parsed JSON object)
|
||||
for _, item in ipairs(jpath(data, "$.items[*]")) do
|
||||
item.value = item.value * 1.5
|
||||
end
|
||||
|
||||
-- Wrapped primitive access
|
||||
local prices = jpath(data, "$.items[*].price")
|
||||
print(prices[1].val, prices[1].raw)
|
||||
|
||||
visitJSON(data, function(value, key, parent)
|
||||
if type(value) == "number" and key == "price" then
|
||||
parent[key] = value * 1.5
|
||||
end
|
||||
end)
|
||||
|
||||
-- XML (where data is parsed XML element)
|
||||
for _, group in ipairs(xpath(data, "//group")) do
|
||||
for _, item in ipairs(xpath(group, ".//item")) do
|
||||
item.attr.value = tostring(tonumber(item.attr.value) * 2)
|
||||
end
|
||||
end
|
||||
|
||||
-- HTTP
|
||||
local response = fetch("https://api.example.com/data")
|
||||
if response and response.ok then
|
||||
print(response.body)
|
||||
end`
|
||||
}
|
||||
|
||||
379
processor/processor_helper_test.go
Normal file
379
processor/processor_helper_test.go
Normal file
@@ -0,0 +1,379 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
func TestSetVariables(t *testing.T) {
|
||||
// Test with various variable types
|
||||
vars := map[string]interface{}{
|
||||
"multiplier": 2.5,
|
||||
"prefix": "TEST_",
|
||||
"enabled": true,
|
||||
"count": 42,
|
||||
}
|
||||
|
||||
SetVariables(vars)
|
||||
|
||||
// Create a new Lua state to verify variables are set
|
||||
L, err := NewLuaState()
|
||||
assert.NoError(t, err)
|
||||
defer L.Close()
|
||||
|
||||
// Verify the variables are accessible
|
||||
multiplier := L.GetGlobal("multiplier")
|
||||
assert.Equal(t, lua.LTNumber, multiplier.Type())
|
||||
assert.Equal(t, 2.5, float64(multiplier.(lua.LNumber)))
|
||||
|
||||
prefix := L.GetGlobal("prefix")
|
||||
assert.Equal(t, lua.LTString, prefix.Type())
|
||||
assert.Equal(t, "TEST_", string(prefix.(lua.LString)))
|
||||
|
||||
enabled := L.GetGlobal("enabled")
|
||||
assert.Equal(t, lua.LTBool, enabled.Type())
|
||||
assert.True(t, bool(enabled.(lua.LBool)))
|
||||
|
||||
count := L.GetGlobal("count")
|
||||
assert.Equal(t, lua.LTNumber, count.Type())
|
||||
assert.Equal(t, 42.0, float64(count.(lua.LNumber)))
|
||||
}
|
||||
|
||||
func TestSetVariablesEmpty(t *testing.T) {
|
||||
// Test with empty map
|
||||
vars := map[string]interface{}{}
|
||||
SetVariables(vars)
|
||||
|
||||
// Should not panic
|
||||
L, err := NewLuaState()
|
||||
assert.NoError(t, err)
|
||||
defer L.Close()
|
||||
}
|
||||
|
||||
func TestSetVariablesNil(t *testing.T) {
|
||||
// Test with nil map
|
||||
SetVariables(nil)
|
||||
|
||||
// Should not panic
|
||||
L, err := NewLuaState()
|
||||
assert.NoError(t, err)
|
||||
defer L.Close()
|
||||
}
|
||||
|
||||
func TestGetLuaFunctionsHelp(t *testing.T) {
|
||||
help := GetLuaFunctionsHelp()
|
||||
|
||||
// Verify help is not empty
|
||||
assert.NotEmpty(t, help)
|
||||
|
||||
// Verify it contains documentation for key functions
|
||||
assert.Contains(t, help, "MATH FUNCTIONS")
|
||||
assert.Contains(t, help, "STRING FUNCTIONS")
|
||||
assert.Contains(t, help, "TABLE FUNCTIONS")
|
||||
assert.Contains(t, help, "XML NODE SHAPE")
|
||||
assert.Contains(t, help, "JSON HELPER FUNCTIONS")
|
||||
assert.Contains(t, help, "HTTP FUNCTIONS")
|
||||
assert.Contains(t, help, "REGEX FUNCTIONS")
|
||||
assert.Contains(t, help, "UTILITY FUNCTIONS")
|
||||
assert.Contains(t, help, "EXAMPLES")
|
||||
|
||||
// Verify specific functions are documented
|
||||
assert.Contains(t, help, "min(a, b)")
|
||||
assert.Contains(t, help, "max(a, b)")
|
||||
assert.Contains(t, help, "round(x, n)")
|
||||
assert.Contains(t, help, "fetch(url, options)")
|
||||
assert.Contains(t, help, "data.children")
|
||||
assert.Contains(t, help, "visitJSON(data, callback)")
|
||||
assert.Contains(t, help, "re(pattern, input)")
|
||||
assert.Contains(t, help, "print(...)")
|
||||
assert.Contains(t, help, "printf(fmt, ...)")
|
||||
}
|
||||
|
||||
func TestFetchFunction(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
// Register the fetch function
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
|
||||
// Test 1: Missing URL should return nil and error
|
||||
err := L.DoString(`
|
||||
result, err = fetch("")
|
||||
assert(result == nil, "Expected nil result for empty URL")
|
||||
assert(err ~= nil, "Expected error for empty URL")
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test 2: Invalid URL should return error
|
||||
err = L.DoString(`
|
||||
result, err = fetch("not-a-valid-url")
|
||||
assert(result == nil, "Expected nil result for invalid URL")
|
||||
assert(err ~= nil, "Expected error for invalid URL")
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFetchFunctionWithOptions(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
// Register the fetch function
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
|
||||
// Test with options (should fail gracefully with invalid URL)
|
||||
err := L.DoString(`
|
||||
local opts = {
|
||||
method = "POST",
|
||||
headers = {["Content-Type"] = "application/json"},
|
||||
body = '{"test": "data"}'
|
||||
}
|
||||
result, err = fetch("http://invalid-domain-that-does-not-exist.local", opts)
|
||||
-- Should get error due to invalid domain
|
||||
assert(result == nil, "Expected nil result for invalid domain")
|
||||
assert(err ~= nil, "Expected error for invalid domain")
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestPrependLuaAssignment(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Simple assignment",
|
||||
input: "10",
|
||||
expected: "v1 = 10",
|
||||
},
|
||||
{
|
||||
name: "Expression",
|
||||
input: "v1 * 2",
|
||||
expected: "v1 = v1 * 2",
|
||||
},
|
||||
{
|
||||
name: "Assignment with equal sign",
|
||||
input: "= 5",
|
||||
expected: "v1 = 5",
|
||||
},
|
||||
{
|
||||
name: "Complex expression",
|
||||
input: "math.floor(v1 / 2)",
|
||||
expected: "v1 = math.floor(v1 / 2)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := PrependLuaAssignment(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildJSONLuaScript(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
contains []string
|
||||
}{
|
||||
{
|
||||
name: "Simple JSON modification",
|
||||
input: "data.value = data.value * 2; modified = true",
|
||||
contains: []string{
|
||||
"data.value = data.value * 2",
|
||||
"modified = true",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Complex JSON script",
|
||||
input: "for i, item in ipairs(data.items) do item.price = item.price * 1.5 end; modified = true",
|
||||
contains: []string{
|
||||
"for i, item in ipairs(data.items)",
|
||||
"modified = true",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := BuildJSONLuaScript(tt.input, "")
|
||||
for _, substr := range tt.contains {
|
||||
assert.Contains(t, result, substr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrintToGo(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
// Register the print function
|
||||
L.SetGlobal("print", L.NewFunction(printToGo))
|
||||
|
||||
// Test printing various types
|
||||
err := L.DoString(`
|
||||
print("Hello, World!")
|
||||
print(42)
|
||||
print(true)
|
||||
print(3.14)
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestPrintfHelper(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
err := InitLuaHelpers(L)
|
||||
assert.NoError(t, err)
|
||||
|
||||
err = L.DoString(`
|
||||
printf("%s", "foobar")
|
||||
printf("%d + %d = %d", 2, 3, 5)
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestEvalRegex(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
// Register the regex function
|
||||
L.SetGlobal("re", L.NewFunction(EvalRegex))
|
||||
|
||||
// Test 1: Simple match
|
||||
err := L.DoString(`
|
||||
matches = re("(\\d+)", "The answer is 42")
|
||||
assert(matches ~= nil, "Expected matches")
|
||||
assert(matches[1] == "42", "Expected full match to be 42")
|
||||
assert(matches[2] == "42", "Expected capture group to be 42")
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test 2: No match
|
||||
err = L.DoString(`
|
||||
matches = re("(\\d+)", "No numbers here")
|
||||
assert(matches == nil, "Expected nil for no match")
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test 3: Multiple capture groups
|
||||
err = L.DoString(`
|
||||
matches = re("(\\w+)\\s+(\\d+)", "item 123")
|
||||
assert(matches ~= nil, "Expected matches")
|
||||
assert(matches[1] == "item 123", "Expected full match")
|
||||
assert(matches[2] == "item", "Expected first capture group")
|
||||
assert(matches[3] == "123", "Expected second capture group")
|
||||
`)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestEstimatePatternComplexity(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
pattern string
|
||||
minExpected int
|
||||
}{
|
||||
{
|
||||
name: "Simple literal",
|
||||
pattern: "hello",
|
||||
minExpected: 1,
|
||||
},
|
||||
{
|
||||
name: "With capture group",
|
||||
pattern: "(\\d+)",
|
||||
minExpected: 2,
|
||||
},
|
||||
{
|
||||
name: "Complex pattern",
|
||||
pattern: "(?P<name>\\w+)\\s+(?P<value>\\d+\\.\\d+)",
|
||||
minExpected: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
complexity := estimatePatternComplexity(tt.pattern)
|
||||
assert.GreaterOrEqual(t, complexity, tt.minExpected)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseNumeric(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected float64
|
||||
shouldOk bool
|
||||
}{
|
||||
{"Integer", "42", 42.0, true},
|
||||
{"Float", "3.14", 3.14, true},
|
||||
{"Negative", "-10", -10.0, true},
|
||||
{"Invalid", "not a number", 0, false},
|
||||
{"Empty", "", 0, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, ok := parseNumeric(tt.input)
|
||||
assert.Equal(t, tt.shouldOk, ok)
|
||||
if tt.shouldOk {
|
||||
assert.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormatNumeric(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input float64
|
||||
expected string
|
||||
}{
|
||||
{"Integer value", 42.0, "42"},
|
||||
{"Float value", 3.14, "3.14"},
|
||||
{"Negative integer", -10.0, "-10"},
|
||||
{"Negative float", -3.14, "-3.14"},
|
||||
{"Zero", 0.0, "0"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := formatNumeric(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLuaHelperFunctionsDocumentation(t *testing.T) {
|
||||
help := GetLuaFunctionsHelp()
|
||||
|
||||
// All main function categories should be documented
|
||||
expectedCategories := []string{
|
||||
"MATH FUNCTIONS",
|
||||
"STRING FUNCTIONS",
|
||||
"XML NODE SHAPE",
|
||||
"JSON HELPER FUNCTIONS",
|
||||
}
|
||||
|
||||
for _, category := range expectedCategories {
|
||||
assert.Contains(t, help, category, "Help should contain category: %s", category)
|
||||
}
|
||||
|
||||
// Verify some key functions are mentioned
|
||||
keyFunctions := []string{
|
||||
"data.children",
|
||||
"data.attr",
|
||||
"visitJSON",
|
||||
"round",
|
||||
"fetch",
|
||||
}
|
||||
|
||||
for _, fn := range keyFunctions {
|
||||
assert.Contains(t, help, fn, "Help should mention function: %s", fn)
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,15 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
|
||||
"modify/logger"
|
||||
"modify/utils"
|
||||
)
|
||||
|
||||
type CaptureGroup struct {
|
||||
@@ -20,49 +19,67 @@ type CaptureGroup struct {
|
||||
Range [2]int
|
||||
}
|
||||
|
||||
// ProcessContent applies regex replacement with Lua processing
|
||||
func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceCommand, error) {
|
||||
var commands []utils.ReplaceCommand
|
||||
logger.Trace("Processing regex: %q", command.Regex)
|
||||
// ProcessRegex applies regex replacement with Lua processing.
|
||||
// The filename here exists ONLY so we can pass it to the lua environment.
|
||||
// It's not used for anything else.
|
||||
func ProcessRegex(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||
commandPrefix := command.Name
|
||||
if commandPrefix == "" {
|
||||
commandPrefix = "<unnamed-command>"
|
||||
}
|
||||
processlogger := logger.WithPrefix("ProcessRegex").WithPrefix(commandPrefix).WithField("file", fmt.Sprintf("%q", filename))
|
||||
processlogger.Debug("Starting regex processing for file")
|
||||
processlogger.Trace("Initial file content length: %d", len(content))
|
||||
processlogger.Trace("Command details: %+v", command)
|
||||
|
||||
var commands []utils.ReplaceCommand
|
||||
// Start timing the regex processing
|
||||
startTime := time.Now()
|
||||
|
||||
// We don't HAVE to do this multiple times for a pattern
|
||||
// But it's quick enough for us to not care
|
||||
pattern := resolveRegexPlaceholders(command.Regex)
|
||||
logger.Debug("Compiling regex pattern: %s", pattern)
|
||||
processlogger.Debug("Resolved regex placeholders. Pattern: %s", pattern)
|
||||
|
||||
// I'm not too happy about having to trim regex, we could have meaningful whitespace or newlines
|
||||
// But it's a compromise that allows us to use | in yaml
|
||||
// Otherwise we would have to escape every god damn pair of quotation marks
|
||||
// And a bunch of other shit
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
processlogger.Debug("Trimmed regex pattern: %s", pattern)
|
||||
|
||||
patternCompileStart := time.Now()
|
||||
compiledPattern, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
logger.Error("Error compiling pattern: %v", err)
|
||||
processlogger.Error("Error compiling pattern %q: %v", pattern, err)
|
||||
return commands, fmt.Errorf("error compiling pattern: %v", err)
|
||||
}
|
||||
logger.Debug("Compiled pattern successfully in %v: %s", time.Since(patternCompileStart), pattern)
|
||||
processlogger.Debug("Compiled pattern successfully in %v. Pattern: %s", time.Since(patternCompileStart), pattern)
|
||||
|
||||
// Same here, it's just string concatenation, it won't kill us
|
||||
// More important is that we don't fuck up the command
|
||||
// But we shouldn't be able to since it's passed by value
|
||||
previous := command.Lua
|
||||
luaExpr := BuildLuaScript(command.Lua)
|
||||
logger.Debug("Transformed Lua expression: %q → %q", previous, luaExpr)
|
||||
previousLuaExpr := command.Lua
|
||||
luaExpr := BuildLuaScript(command.Lua, command.SourceDir)
|
||||
processlogger.Debug("Transformed Lua expression: %q → %q", previousLuaExpr, luaExpr)
|
||||
processlogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
||||
|
||||
// Process all regex matches
|
||||
matchFindStart := time.Now()
|
||||
indices := compiledPattern.FindAllStringSubmatchIndex(content, -1)
|
||||
matchFindDuration := time.Since(matchFindStart)
|
||||
|
||||
logger.Debug("Found %d matches in content of length %d (search took %v)",
|
||||
processlogger.Debug("Found %d matches in content of length %d (search took %v)",
|
||||
len(indices), len(content), matchFindDuration)
|
||||
processlogger.Trace("Match indices: %v", indices)
|
||||
|
||||
// Log pattern complexity metrics
|
||||
patternComplexity := estimatePatternComplexity(pattern)
|
||||
logger.Debug("Pattern complexity estimate: %d", patternComplexity)
|
||||
processlogger.Debug("Pattern complexity estimate: %d", patternComplexity)
|
||||
|
||||
if len(indices) == 0 {
|
||||
logger.Warning("No matches found for regex: %q", pattern)
|
||||
logger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
processlogger.Warning("No matches found for regex: %s", pattern)
|
||||
processlogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
@@ -71,19 +88,21 @@ func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceC
|
||||
// By going backwards we fuck up all the indices to the end of the file that we don't care about
|
||||
// Because there either aren't any (last match) or they're already modified (subsequent matches)
|
||||
for i, matchIndices := range indices {
|
||||
logger.Debug("Processing match %d of %d", i+1, len(indices))
|
||||
logger.Trace("Match indices: %v (match position %d-%d)", matchIndices, matchIndices[0], matchIndices[1])
|
||||
matchLogger := processlogger.WithField("matchNum", i+1)
|
||||
matchLogger.Debug("Processing match %d of %d", i+1, len(indices))
|
||||
matchLogger.Trace("Match indices: %v (match position %d-%d)", matchIndices, matchIndices[0], matchIndices[1])
|
||||
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
logger.Error("Error creating Lua state: %v", err)
|
||||
matchLogger.Error("Error creating Lua state: %v", err)
|
||||
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
L.SetGlobal("file", lua.LString(filename))
|
||||
// Hmm... Maybe we don't want to defer this..
|
||||
// Maybe we want to close them every iteration
|
||||
// We'll leave it as is for now
|
||||
defer L.Close()
|
||||
logger.Trace("Lua state created successfully for match %d", i+1)
|
||||
matchLogger.Trace("Lua state created successfully for match %d", i+1)
|
||||
|
||||
// Why we're doing this whole song and dance of indices is to properly handle empty matches
|
||||
// Plus it's a little cleaner to surgically replace our matches
|
||||
@@ -92,20 +111,17 @@ func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceC
|
||||
// So when we're cutting open the array we say 0:7 + modified + 7:end
|
||||
// As if concatenating in the middle of the array
|
||||
// Plus it supports lookarounds
|
||||
match := content[matchIndices[0]:matchIndices[1]]
|
||||
matchPreview := match
|
||||
if len(match) > 50 {
|
||||
matchPreview = match[:47] + "..."
|
||||
}
|
||||
logger.Trace("Matched content: %q (length: %d)", matchPreview, len(match))
|
||||
matchContent := content[matchIndices[0]:matchIndices[1]]
|
||||
matchPreview := utils.LimitString(matchContent, 50)
|
||||
matchLogger.Trace("Matched content: %q (length: %d)", matchPreview, len(matchContent))
|
||||
|
||||
groups := matchIndices[2:]
|
||||
if len(groups) <= 0 {
|
||||
logger.Warning("No capture groups found for match %q and regex %q", matchPreview, pattern)
|
||||
matchLogger.Warning("No capture groups found for match %q and regex %q", matchPreview, pattern)
|
||||
continue
|
||||
}
|
||||
if len(groups)%2 == 1 {
|
||||
logger.Warning("Invalid number of group indices (%d), should be even: %v", len(groups), groups)
|
||||
matchLogger.Warning("Invalid number of group indices (%d), should be even: %v", len(groups), groups)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -116,11 +132,11 @@ func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceC
|
||||
validGroups++
|
||||
}
|
||||
}
|
||||
logger.Debug("Found %d valid capture groups in match", validGroups)
|
||||
matchLogger.Debug("Found %d valid capture groups in match", validGroups)
|
||||
|
||||
for _, index := range groups {
|
||||
if index == -1 {
|
||||
logger.Warning("Negative index encountered in match indices %v. This may indicate an issue with the regex pattern or an empty/optional capture group.", matchIndices)
|
||||
matchLogger.Warning("Negative index encountered in match indices %v. This may indicate an issue with the regex pattern or an empty/optional capture group.", matchIndices)
|
||||
continue
|
||||
}
|
||||
}
|
||||
@@ -135,6 +151,7 @@ func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceC
|
||||
start := groups[i*2]
|
||||
end := groups[i*2+1]
|
||||
if start == -1 || end == -1 {
|
||||
matchLogger.Debug("Skipping empty or unmatched capture group #%d (name: %q)", i+1, name)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -147,71 +164,77 @@ func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceC
|
||||
|
||||
// Include name info in log if available
|
||||
if name != "" {
|
||||
logger.Trace("Capture group '%s': %q (pos %d-%d)", name, value, start, end)
|
||||
matchLogger.Trace("Capture group '%s': %q (pos %d-%d)", name, value, start, end)
|
||||
} else {
|
||||
logger.Trace("Capture group #%d: %q (pos %d-%d)", i+1, value, start, end)
|
||||
matchLogger.Trace("Capture group #%d: %q (pos %d-%d)", i+1, value, start, end)
|
||||
}
|
||||
}
|
||||
|
||||
captureGroups = deduplicateGroups(captureGroups)
|
||||
// Use the DeduplicateGroups flag to control whether to deduplicate capture groups
|
||||
if !command.NoDedup {
|
||||
matchLogger.Debug("Deduplicating capture groups as specified in command settings")
|
||||
captureGroups = deduplicateGroups(captureGroups)
|
||||
matchLogger.Trace("Capture groups after deduplication: %v", captureGroups)
|
||||
} else {
|
||||
matchLogger.Debug("Skipping deduplication of capture groups (NoDedup is true)")
|
||||
}
|
||||
|
||||
if err := toLua(L, captureGroups); err != nil {
|
||||
logger.Error("Failed to set Lua variables: %v", err)
|
||||
matchLogger.Error("Failed to set Lua variables for capture groups: %v", err)
|
||||
continue
|
||||
}
|
||||
logger.Trace("Set %d capture groups as Lua variables", len(captureGroups))
|
||||
matchLogger.Debug("Set %d capture groups as Lua variables", len(captureGroups))
|
||||
matchLogger.Trace("Lua globals set for capture groups")
|
||||
|
||||
if err := L.DoString(luaExpr); err != nil {
|
||||
logger.Error("Lua script execution failed: %v\nScript: %s\nCapture Groups: %+v",
|
||||
err, luaExpr, captureGroups)
|
||||
matchLogger.Error("Lua script execution failed: %v\nScript: %s\nCapture Groups: %+v",
|
||||
err, utils.LimitString(luaExpr, 200), captureGroups)
|
||||
continue
|
||||
}
|
||||
logger.Trace("Lua script executed successfully")
|
||||
matchLogger.Debug("Lua script executed successfully")
|
||||
|
||||
// Get modifications from Lua
|
||||
captureGroups, err = fromLua(L, captureGroups)
|
||||
updatedCaptureGroups, err := fromLua(L, captureGroups)
|
||||
if err != nil {
|
||||
logger.Error("Failed to retrieve modifications from Lua: %v", err)
|
||||
matchLogger.Error("Failed to retrieve modifications from Lua: %v", err)
|
||||
continue
|
||||
}
|
||||
logger.Trace("Retrieved updated values from Lua")
|
||||
matchLogger.Debug("Retrieved updated values from Lua")
|
||||
matchLogger.Trace("Updated capture groups from Lua: %v", updatedCaptureGroups)
|
||||
|
||||
replacement := ""
|
||||
replacementVar := L.GetGlobal("replacement")
|
||||
if replacementVar.Type() != lua.LTNil {
|
||||
replacement = replacementVar.String()
|
||||
logger.Debug("Using global replacement: %q", replacement)
|
||||
matchLogger.Debug("Using global replacement variable from Lua: %q", replacement)
|
||||
}
|
||||
|
||||
// Check if modification flag is set
|
||||
modifiedVal := L.GetGlobal("modified")
|
||||
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
||||
logger.Debug("Skipping match - no modifications made by Lua script")
|
||||
matchLogger.Debug("Skipping match - no modifications indicated by Lua script")
|
||||
continue
|
||||
}
|
||||
|
||||
if replacement == "" {
|
||||
// Apply the modifications to the original match
|
||||
replacement = match
|
||||
|
||||
// Count groups that were actually modified
|
||||
modifiedGroups := 0
|
||||
for _, capture := range captureGroups {
|
||||
modifiedGroupsCount := 0
|
||||
for _, capture := range updatedCaptureGroups {
|
||||
if capture.Value != capture.Updated {
|
||||
modifiedGroups++
|
||||
modifiedGroupsCount++
|
||||
}
|
||||
}
|
||||
logger.Info("%d of %d capture groups identified for modification", modifiedGroups, len(captureGroups))
|
||||
matchLogger.Debug("%d of %d capture groups identified for modification", modifiedGroupsCount, len(updatedCaptureGroups))
|
||||
|
||||
for _, capture := range captureGroups {
|
||||
for _, capture := range updatedCaptureGroups {
|
||||
if capture.Value == capture.Updated {
|
||||
logger.Info("Capture group unchanged: %s", capture.Value)
|
||||
matchLogger.Debug("Capture group unchanged: %s", utils.LimitString(capture.Value, 50))
|
||||
continue
|
||||
}
|
||||
|
||||
// Log what changed with context
|
||||
logger.Debug("Capture group %s scheduled for modification: %q → %q",
|
||||
capture.Name, capture.Value, capture.Updated)
|
||||
matchLogger.Debug("Capture group %q scheduled for modification: %q → %q",
|
||||
capture.Name, utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
|
||||
|
||||
// Indices of the group are relative to content
|
||||
// To relate them to match we have to subtract the match start index
|
||||
@@ -221,42 +244,57 @@ func ProcessRegex(content string, command utils.ModifyCommand) ([]utils.ReplaceC
|
||||
To: capture.Range[1],
|
||||
With: capture.Updated,
|
||||
})
|
||||
matchLogger.Trace("Added replacement command: %+v", commands[len(commands)-1])
|
||||
}
|
||||
} else {
|
||||
matchLogger.Debug("Using full replacement string from Lua: %q", utils.LimitString(replacement, 50))
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: matchIndices[0],
|
||||
To: matchIndices[1],
|
||||
With: replacement,
|
||||
})
|
||||
matchLogger.Trace("Added full replacement command: %+v", commands[len(commands)-1])
|
||||
}
|
||||
}
|
||||
|
||||
logger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
processlogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
processlogger.Debug("Generated %d total modifications", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func deduplicateGroups(captureGroups []*CaptureGroup) []*CaptureGroup {
|
||||
deduplicatedGroups := make([]*CaptureGroup, 0)
|
||||
deduplicateGroupsLogger := logger.WithPrefix("deduplicateGroups")
|
||||
deduplicateGroupsLogger.Debug("Starting deduplication of capture groups")
|
||||
deduplicateGroupsLogger.Trace("Input capture groups: %v", captureGroups)
|
||||
|
||||
// Preserve input order and drop any group that overlaps with an already accepted group
|
||||
accepted := make([]*CaptureGroup, 0, len(captureGroups))
|
||||
for _, group := range captureGroups {
|
||||
groupLogger := deduplicateGroupsLogger.WithField("groupName", group.Name).WithField("groupRange", group.Range)
|
||||
groupLogger.Debug("Processing capture group")
|
||||
|
||||
overlaps := false
|
||||
logger.Debug("Checking capture group: %s with range %v", group.Name, group.Range)
|
||||
for _, existingGroup := range deduplicatedGroups {
|
||||
logger.Debug("Comparing with existing group: %s with range %v", existingGroup.Name, existingGroup.Range)
|
||||
if group.Range[0] < existingGroup.Range[1] && group.Range[1] > existingGroup.Range[0] {
|
||||
for _, kept := range accepted {
|
||||
// Overlap if start < keptEnd and end > keptStart (adjacent is allowed)
|
||||
if group.Range[0] < kept.Range[1] && group.Range[1] > kept.Range[0] {
|
||||
overlaps = true
|
||||
logger.Warning("Detected overlap between capture group '%s' and existing group '%s' in range %v-%v and %v-%v", group.Name, existingGroup.Name, group.Range[0], group.Range[1], existingGroup.Range[0], existingGroup.Range[1])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if overlaps {
|
||||
// We CAN just continue despite this fuckup
|
||||
logger.Warning("Overlapping capture group: %s", group.Name)
|
||||
groupLogger.Warning("Overlapping capture group detected and skipped.")
|
||||
continue
|
||||
}
|
||||
logger.Debug("No overlap detected for capture group: %s. Adding to deduplicated groups.", group.Name)
|
||||
deduplicatedGroups = append(deduplicatedGroups, group)
|
||||
|
||||
groupLogger.Debug("Capture group does not overlap with previously accepted groups. Adding.")
|
||||
accepted = append(accepted, group)
|
||||
}
|
||||
return deduplicatedGroups
|
||||
|
||||
deduplicateGroupsLogger.Debug("Finished deduplication. Original %d groups, %d deduplicated.", len(captureGroups), len(accepted))
|
||||
deduplicateGroupsLogger.Trace("Deduplicated groups: %v", accepted)
|
||||
|
||||
return accepted
|
||||
}
|
||||
|
||||
// The order of these replaces is important
|
||||
@@ -265,105 +303,186 @@ func deduplicateGroups(captureGroups []*CaptureGroup) []*CaptureGroup {
|
||||
// Expand to another capture group in the capture group
|
||||
// We really only want one (our named) capture group
|
||||
func resolveRegexPlaceholders(pattern string) string {
|
||||
resolveLogger := logger.WithPrefix("resolveRegexPlaceholders").WithField("originalPattern", utils.LimitString(pattern, 100))
|
||||
resolveLogger.Debug("Resolving regex placeholders in pattern")
|
||||
|
||||
// Handle special pattern modifications
|
||||
if !strings.HasPrefix(pattern, "(?s)") {
|
||||
pattern = "(?s)" + pattern
|
||||
resolveLogger.Debug("Prepended '(?s)' to pattern for single-line mode")
|
||||
}
|
||||
|
||||
namedGroupNum := regexp.MustCompile(`(?:(\?<[^>]+>)(!num))`)
|
||||
pattern = namedGroupNum.ReplaceAllStringFunc(pattern, func(match string) string {
|
||||
funcLogger := resolveLogger.WithPrefix("namedGroupNumReplace").WithField("match", utils.LimitString(match, 50))
|
||||
funcLogger.Debug("Processing named group !num placeholder")
|
||||
parts := namedGroupNum.FindStringSubmatch(match)
|
||||
if len(parts) != 3 {
|
||||
funcLogger.Warning("Unexpected number of submatches for namedGroupNum: %d. Returning original match.", len(parts))
|
||||
return match
|
||||
}
|
||||
replacement := `-?\d*\.?\d+`
|
||||
funcLogger.Trace("Replacing !num in named group with: %q", replacement)
|
||||
return parts[1] + replacement
|
||||
})
|
||||
resolveLogger.Debug("Handled named group !num placeholders")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "!num", `(-?\d*\.?\d+)`)
|
||||
resolveLogger.Debug("Replaced !num with numeric capture group")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "!any", `.*?`)
|
||||
resolveLogger.Debug("Replaced !any with non-greedy wildcard")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "\n", "\r?\n")
|
||||
resolveLogger.Debug("Added optional carriage return support for Windows line endings")
|
||||
|
||||
repPattern := regexp.MustCompile(`!rep\(([^,]+),\s*(\d+)\)`)
|
||||
// !rep(pattern, count) repeats the pattern n times
|
||||
// Inserting !any between each repetition
|
||||
pattern = repPattern.ReplaceAllStringFunc(pattern, func(match string) string {
|
||||
funcLogger := resolveLogger.WithPrefix("repPatternReplace").WithField("match", utils.LimitString(match, 50))
|
||||
funcLogger.Debug("Processing !rep placeholder")
|
||||
parts := repPattern.FindStringSubmatch(match)
|
||||
if len(parts) != 3 {
|
||||
funcLogger.Warning("Unexpected number of submatches for repPattern: %d. Returning original match.", len(parts))
|
||||
return match
|
||||
}
|
||||
repeatedPattern := parts[1]
|
||||
count := parts[2]
|
||||
repetitions, _ := strconv.Atoi(count)
|
||||
return strings.Repeat(repeatedPattern+".*?", repetitions-1) + repeatedPattern
|
||||
countStr := parts[2]
|
||||
repetitions, err := strconv.Atoi(countStr)
|
||||
if err != nil {
|
||||
funcLogger.Error("Failed to parse repetition count %q: %v. Returning original match.", countStr, err)
|
||||
return match
|
||||
}
|
||||
|
||||
var finalReplacement string
|
||||
if repetitions > 0 {
|
||||
finalReplacement = strings.Repeat(repeatedPattern+".*?", repetitions-1) + repeatedPattern
|
||||
} else {
|
||||
finalReplacement = ""
|
||||
}
|
||||
|
||||
funcLogger.Trace("Replaced !rep with %d repetitions of %q: %q", repetitions, utils.LimitString(repeatedPattern, 30), utils.LimitString(finalReplacement, 100))
|
||||
return finalReplacement
|
||||
})
|
||||
resolveLogger.Debug("Handled !rep placeholders")
|
||||
|
||||
resolveLogger.Debug("Finished resolving regex placeholders")
|
||||
resolveLogger.Trace("Final resolved pattern: %q", utils.LimitString(pattern, 100))
|
||||
return pattern
|
||||
}
|
||||
|
||||
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
|
||||
func toLua(L *lua.LState, data interface{}) error {
|
||||
toLuaLogger := logger.WithPrefix("toLua")
|
||||
toLuaLogger.Debug("Setting capture groups as Lua variables")
|
||||
|
||||
captureGroups, ok := data.([]*CaptureGroup)
|
||||
if !ok {
|
||||
toLuaLogger.Error("Invalid data type for toLua. Expected []*CaptureGroup, got %T", data)
|
||||
return fmt.Errorf("expected []*CaptureGroup for captures, got %T", data)
|
||||
}
|
||||
toLuaLogger.Trace("Input capture groups: %v", captureGroups)
|
||||
|
||||
groupindex := 0
|
||||
for _, capture := range captureGroups {
|
||||
groupLogger := toLuaLogger.WithField("captureGroup", capture.Name).WithField("value", utils.LimitString(capture.Value, 50))
|
||||
groupLogger.Debug("Processing capture group for Lua")
|
||||
|
||||
if capture.Name == "" {
|
||||
// We don't want to change the name of the capture group
|
||||
// Even if it's empty
|
||||
tempName := fmt.Sprintf("%d", groupindex+1)
|
||||
groupindex++
|
||||
groupLogger.Debug("Unnamed capture group, assigning temporary name: %q", tempName)
|
||||
|
||||
L.SetGlobal("s"+tempName, lua.LString(capture.Value))
|
||||
groupLogger.Trace("Set Lua global s%s = %q", tempName, capture.Value)
|
||||
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal("v"+tempName, lua.LNumber(val))
|
||||
groupLogger.Trace("Set Lua global v%s = %f", tempName, val)
|
||||
} else {
|
||||
groupLogger.Trace("Value %q is not numeric, skipping v%s assignment", capture.Value, tempName)
|
||||
}
|
||||
} else {
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal(capture.Name, lua.LNumber(val))
|
||||
groupLogger.Trace("Set Lua global %s = %f (numeric)", capture.Name, val)
|
||||
} else {
|
||||
L.SetGlobal(capture.Name, lua.LString(capture.Value))
|
||||
groupLogger.Trace("Set Lua global %s = %q (string)", capture.Name, capture.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toLuaLogger.Debug("Finished setting capture groups as Lua variables")
|
||||
return nil
|
||||
}
|
||||
|
||||
// FromLua implements the Processor interface for RegexProcessor
|
||||
func fromLua(L *lua.LState, captureGroups []*CaptureGroup) ([]*CaptureGroup, error) {
|
||||
fromLuaLogger := logger.WithPrefix("fromLua")
|
||||
fromLuaLogger.Debug("Retrieving modifications from Lua for capture groups")
|
||||
fromLuaLogger.Trace("Initial capture groups: %v", captureGroups)
|
||||
|
||||
captureIndex := 0
|
||||
for _, capture := range captureGroups {
|
||||
if capture.Name == "" {
|
||||
capture.Name = fmt.Sprintf("%d", captureIndex+1)
|
||||
groupLogger := fromLuaLogger.WithField("originalCaptureName", capture.Name).WithField("originalValue", utils.LimitString(capture.Value, 50))
|
||||
groupLogger.Debug("Processing capture group to retrieve updated value")
|
||||
|
||||
vVarName := fmt.Sprintf("v%s", capture.Name)
|
||||
sVarName := fmt.Sprintf("s%s", capture.Name)
|
||||
if capture.Name == "" {
|
||||
// This case means it was an unnamed capture group originally.
|
||||
// We need to reconstruct the original temporary name to fetch its updated value.
|
||||
// The name will be set to an integer if it was empty, then incremented.
|
||||
// So, we use the captureIndex to get the correct 'vX' and 'sX' variables.
|
||||
tempName := fmt.Sprintf("%d", captureIndex+1)
|
||||
groupLogger.Debug("Retrieving updated value for unnamed group (temp name: %q)", tempName)
|
||||
|
||||
vVarName := fmt.Sprintf("v%s", tempName)
|
||||
sVarName := fmt.Sprintf("s%s", tempName)
|
||||
captureIndex++
|
||||
|
||||
vLuaVal := L.GetGlobal(vVarName)
|
||||
sLuaVal := L.GetGlobal(sVarName)
|
||||
|
||||
groupLogger.Trace("Lua values for unnamed group: v=%v, s=%v", vLuaVal, sLuaVal)
|
||||
|
||||
if sLuaVal.Type() == lua.LTString {
|
||||
capture.Updated = sLuaVal.String()
|
||||
groupLogger.Trace("Updated value from s%s (string): %q", tempName, capture.Updated)
|
||||
}
|
||||
// Numbers have priority
|
||||
if vLuaVal.Type() == lua.LTNumber {
|
||||
capture.Updated = vLuaVal.String()
|
||||
groupLogger.Trace("Updated value from v%s (numeric): %q", tempName, capture.Updated)
|
||||
}
|
||||
} else {
|
||||
// Easy shit
|
||||
capture.Updated = L.GetGlobal(capture.Name).String()
|
||||
// Easy shit, directly use the named capture group
|
||||
updatedValue := L.GetGlobal(capture.Name)
|
||||
if updatedValue.Type() != lua.LTNil {
|
||||
capture.Updated = updatedValue.String()
|
||||
groupLogger.Trace("Updated value for named group %q: %q", capture.Name, capture.Updated)
|
||||
} else {
|
||||
groupLogger.Debug("Named capture group %q not found in Lua globals or is nil. Keeping original value.", capture.Name)
|
||||
capture.Updated = capture.Value // Keep original if not found or nil
|
||||
}
|
||||
}
|
||||
groupLogger.Debug("Finished processing capture group. Original: %q, Updated: %q", utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
|
||||
}
|
||||
|
||||
fromLuaLogger.Debug("Finished retrieving modifications from Lua")
|
||||
fromLuaLogger.Trace("Final updated capture groups: %v", captureGroups)
|
||||
return captureGroups, nil
|
||||
}
|
||||
|
||||
// estimatePatternComplexity gives a rough estimate of regex pattern complexity
|
||||
// This can help identify potentially problematic patterns
|
||||
func estimatePatternComplexity(pattern string) int {
|
||||
estimateComplexityLogger := logger.WithPrefix("estimatePatternComplexity").WithField("pattern", utils.LimitString(pattern, 100))
|
||||
estimateComplexityLogger.Debug("Estimating regex pattern complexity")
|
||||
complexity := len(pattern)
|
||||
|
||||
// Add complexity for potentially expensive operations
|
||||
@@ -376,5 +495,6 @@ func estimatePatternComplexity(pattern string) int {
|
||||
complexity += strings.Count(pattern, "\\1") * 3 // Backreferences
|
||||
complexity += strings.Count(pattern, "{") * 2 // Counted repetition
|
||||
|
||||
estimateComplexityLogger.Debug("Estimated pattern complexity: %d", complexity)
|
||||
return complexity
|
||||
}
|
||||
|
||||
87
processor/regex_named_capture_test.go
Normal file
87
processor/regex_named_capture_test.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// Test named capture group fallback when value is not in Lua
|
||||
func TestNamedCaptureGroupFallback(t *testing.T) {
|
||||
pattern := `value = (?P<myvalue>\d+)`
|
||||
input := `value = 42`
|
||||
// Don't set myvalue in Lua, but do something else so we get a match
|
||||
lua := `v1 = v1 * 2 -- Set v1 but not myvalue, test fallback`
|
||||
|
||||
cmd := utils.ModifyCommand{
|
||||
Name: "test_fallback",
|
||||
Regex: pattern,
|
||||
Lua: lua,
|
||||
}
|
||||
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatchIndex(input)
|
||||
assert.NotNil(t, matches)
|
||||
|
||||
replacements, err := ProcessRegex(input, cmd, "test.txt")
|
||||
|
||||
// Should not error
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Since only v1 is set, myvalue should keep original
|
||||
// Should have 1 replacement for v1
|
||||
if replacements != nil {
|
||||
assert.GreaterOrEqual(t, len(replacements), 0)
|
||||
}
|
||||
}
|
||||
|
||||
// Test named capture groups with nil value in Lua
|
||||
func TestNamedCaptureGroupNilInLua(t *testing.T) {
|
||||
pattern := `value = (?P<num>\d+)`
|
||||
input := `value = 123`
|
||||
// Set num to nil explicitly, and also set v1 to get a modification
|
||||
lua := `v1 = v1 .. "_test"; num = nil -- v1 modified, num set to nil`
|
||||
|
||||
cmd := utils.ModifyCommand{
|
||||
Name: "test_nil",
|
||||
Regex: pattern,
|
||||
Lua: lua,
|
||||
}
|
||||
|
||||
replacements, err := ProcessRegex(input, cmd, "test.txt")
|
||||
|
||||
// Should not error
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Should have replacements for v1, num should fallback to original
|
||||
if replacements != nil {
|
||||
assert.GreaterOrEqual(t, len(replacements), 0)
|
||||
}
|
||||
}
|
||||
|
||||
// Test multiple named capture groups with some undefined
|
||||
func TestMixedNamedCaptureGroups(t *testing.T) {
|
||||
pattern := `(?P<key>\w+) = (?P<value>\d+)`
|
||||
input := `count = 100`
|
||||
lua := `key = key .. "_modified" -- Only modify key, leave value undefined`
|
||||
|
||||
cmd := utils.ModifyCommand{
|
||||
Name: "test_mixed",
|
||||
Regex: pattern,
|
||||
Lua: lua,
|
||||
}
|
||||
|
||||
replacements, err := ProcessRegex(input, cmd, "test.txt")
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, replacements)
|
||||
|
||||
// Apply replacements
|
||||
result, _ := utils.ExecuteModifications(replacements, input)
|
||||
|
||||
// key should be modified, value should remain unchanged
|
||||
assert.Contains(t, result, "count_modified")
|
||||
assert.Contains(t, result, "100")
|
||||
}
|
||||
@@ -2,9 +2,8 @@ package processor
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"cook/utils"
|
||||
"io"
|
||||
"modify/utils"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
@@ -31,14 +30,14 @@ func normalizeWhitespace(s string) string {
|
||||
return re.ReplaceAllString(strings.TrimSpace(s), " ")
|
||||
}
|
||||
|
||||
func ApiAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
||||
func APIAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
||||
command := utils.ModifyCommand{
|
||||
Regex: regex,
|
||||
Lua: lua,
|
||||
LogLevel: "TRACE",
|
||||
}
|
||||
|
||||
commands, err := ProcessRegex(content, command)
|
||||
commands, err := ProcessRegex(content, command, "test")
|
||||
if err != nil {
|
||||
return "", 0, 0, err
|
||||
}
|
||||
@@ -80,12 +79,12 @@ func TestSimpleValueMultiplication(t *testing.T) {
|
||||
</item>
|
||||
</config>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(\d+)</value>`, "v1 = v1*1.5")
|
||||
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(\d+)</value>`, "v1 = v1*1.5")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestShorthandNotation(t *testing.T) {
|
||||
@@ -101,12 +100,12 @@ func TestShorthandNotation(t *testing.T) {
|
||||
</item>
|
||||
</config>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(\d+)</value>`, "v1*1.5")
|
||||
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(\d+)</value>`, "v1*1.5")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestShorthandNotationFloats(t *testing.T) {
|
||||
@@ -122,12 +121,12 @@ func TestShorthandNotationFloats(t *testing.T) {
|
||||
</item>
|
||||
</config>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(\d+\.\d+)</value>`, "v1*1.5")
|
||||
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(\d+\.\d+)</value>`, "v1*1.5")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestArrayNotation(t *testing.T) {
|
||||
@@ -147,12 +146,12 @@ func TestArrayNotation(t *testing.T) {
|
||||
</prices>
|
||||
</config>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<price>(\d+)</price>`, "v1*2")
|
||||
result, mods, matches, err := APIAdaptor(content, `(?s)<price>(\d+)</price>`, "v1*2")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
||||
assert.Equal(t, 3, mods, "Expected 3 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestMultipleNumericMatches(t *testing.T) {
|
||||
@@ -168,12 +167,12 @@ func TestMultipleNumericMatches(t *testing.T) {
|
||||
<entry>400</entry>
|
||||
</data>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `<entry>(\d+)</entry>`, "v1*2")
|
||||
result, mods, matches, err := APIAdaptor(content, `<entry>(\d+)</entry>`, "v1*2")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
||||
assert.Equal(t, 3, mods, "Expected 3 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestMultipleStringMatches(t *testing.T) {
|
||||
@@ -187,12 +186,12 @@ func TestMultipleStringMatches(t *testing.T) {
|
||||
<name>Mary_modified</name>
|
||||
</data>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `<name>([A-Za-z]+)</name>`, `s1 = s1 .. "_modified"`)
|
||||
result, mods, matches, err := APIAdaptor(content, `<name>([A-Za-z]+)</name>`, `s1 = s1 .. "_modified"`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestStringUpperCase(t *testing.T) {
|
||||
@@ -206,12 +205,12 @@ func TestStringUpperCase(t *testing.T) {
|
||||
<user>MARY</user>
|
||||
</users>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `<user>([A-Za-z]+)</user>`, `s1 = string.upper(s1)`)
|
||||
result, mods, matches, err := APIAdaptor(content, `<user>([A-Za-z]+)</user>`, `s1 = string.upper(s1)`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestStringConcatenation(t *testing.T) {
|
||||
@@ -225,15 +224,14 @@ func TestStringConcatenation(t *testing.T) {
|
||||
<product>Banana_fruit</product>
|
||||
</products>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `<product>([A-Za-z]+)</product>`, `s1 = s1 .. "_fruit"`)
|
||||
result, mods, matches, err := APIAdaptor(content, `<product>([A-Za-z]+)</product>`, `s1 = s1 .. "_fruit"`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
// Added from main_test.go
|
||||
func TestDecimalValues(t *testing.T) {
|
||||
content := `
|
||||
<config>
|
||||
@@ -253,9 +251,9 @@ func TestDecimalValues(t *testing.T) {
|
||||
`
|
||||
|
||||
regex := regexp.MustCompile(`(?s)<value>([0-9.]+)</value>.*?<multiplier>([0-9.]+)</multiplier>`)
|
||||
luaExpr := BuildLuaScript("v1 = v1 * v2")
|
||||
luaExpr := BuildLuaScript("v1 = v1 * v2", "")
|
||||
|
||||
result, _, _, err := ApiAdaptor(content, regex.String(), luaExpr)
|
||||
result, _, _, err := APIAdaptor(content, regex.String(), luaExpr)
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
|
||||
normalizedModified := normalizeWhitespace(result)
|
||||
@@ -281,9 +279,9 @@ func TestLuaMathFunctions(t *testing.T) {
|
||||
`
|
||||
|
||||
regex := regexp.MustCompile(`(?s)<value>(\d+)</value>`)
|
||||
luaExpr := BuildLuaScript("v1 = math.sqrt(v1)")
|
||||
luaExpr := BuildLuaScript("v1 = math.sqrt(v1)", "")
|
||||
|
||||
modifiedContent, _, _, err := ApiAdaptor(content, regex.String(), luaExpr)
|
||||
modifiedContent, _, _, err := APIAdaptor(content, regex.String(), luaExpr)
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
|
||||
normalizedModified := normalizeWhitespace(modifiedContent)
|
||||
@@ -309,9 +307,9 @@ func TestDirectAssignment(t *testing.T) {
|
||||
`
|
||||
|
||||
regex := regexp.MustCompile(`(?s)<value>(\d+)</value>`)
|
||||
luaExpr := BuildLuaScript("=0")
|
||||
luaExpr := BuildLuaScript("=0", "")
|
||||
|
||||
modifiedContent, _, _, err := ApiAdaptor(content, regex.String(), luaExpr)
|
||||
modifiedContent, _, _, err := APIAdaptor(content, regex.String(), luaExpr)
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
|
||||
normalizedModified := normalizeWhitespace(modifiedContent)
|
||||
@@ -367,10 +365,10 @@ func TestStringAndNumericOperations(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Compile the regex pattern with multiline support
|
||||
pattern := "(?s)" + tt.regexPattern
|
||||
luaExpr := BuildLuaScript(tt.luaExpression)
|
||||
luaExpr := BuildLuaScript(tt.luaExpression, "")
|
||||
|
||||
// Process with our function
|
||||
result, modCount, _, err := ApiAdaptor(tt.input, pattern, luaExpr)
|
||||
result, modCount, _, err := APIAdaptor(tt.input, pattern, luaExpr)
|
||||
assert.NoError(t, err, "Process function failed: %v", err)
|
||||
|
||||
// Check results
|
||||
@@ -428,10 +426,10 @@ func TestEdgeCases(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Make sure the regex can match across multiple lines
|
||||
pattern := "(?s)" + tt.regexPattern
|
||||
luaExpr := BuildLuaScript(tt.luaExpression)
|
||||
luaExpr := BuildLuaScript(tt.luaExpression, "")
|
||||
|
||||
// Process with our function
|
||||
result, modCount, _, err := ApiAdaptor(tt.input, pattern, luaExpr)
|
||||
result, modCount, _, err := APIAdaptor(tt.input, pattern, luaExpr)
|
||||
assert.NoError(t, err, "Process function failed: %v", err)
|
||||
|
||||
// Check results
|
||||
@@ -454,12 +452,12 @@ func TestNamedCaptureGroups(t *testing.T) {
|
||||
</item>
|
||||
</config>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(?<amount>\d+)</value>`, "amount = amount * 2")
|
||||
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(?<amount>\d+)</value>`, "amount = amount * 2")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureGroupsNum(t *testing.T) {
|
||||
@@ -475,12 +473,12 @@ func TestNamedCaptureGroupsNum(t *testing.T) {
|
||||
</item>
|
||||
</config>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content, `(?s)<value>(?<amount>!num)</value>`, "amount = amount * 2")
|
||||
result, mods, matches, err := APIAdaptor(content, `(?s)<value>(?<amount>!num)</value>`, "amount = amount * 2")
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestMultipleNamedCaptureGroups(t *testing.T) {
|
||||
@@ -496,7 +494,7 @@ func TestMultipleNamedCaptureGroups(t *testing.T) {
|
||||
<quantity>15</quantity>
|
||||
</product>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`(?s)<name>(?<prodName>[^<]+)</name>.*?<price>(?<prodPrice>\d+\.\d+)</price>.*?<quantity>(?<prodQty>\d+)</quantity>`,
|
||||
`prodName = string.upper(prodName)
|
||||
prodPrice = round(prodPrice + 8, 2)
|
||||
@@ -505,7 +503,7 @@ func TestMultipleNamedCaptureGroups(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 3, matches, "Expected 3 matches, got %d", matches)
|
||||
assert.Equal(t, 3, mods, "Expected 3 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestMixedIndexedAndNamedCaptures(t *testing.T) {
|
||||
@@ -519,7 +517,7 @@ func TestMixedIndexedAndNamedCaptures(t *testing.T) {
|
||||
<data>VALUE</data>
|
||||
</entry>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`(?s)<id>(\d+)</id>.*?<data>(?<dataField>[^<]+)</data>`,
|
||||
`v1 = v1 * 2
|
||||
dataField = string.upper(dataField)`)
|
||||
@@ -527,7 +525,7 @@ func TestMixedIndexedAndNamedCaptures(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestComplexNestedNamedCaptures(t *testing.T) {
|
||||
@@ -551,14 +549,14 @@ func TestComplexNestedNamedCaptures(t *testing.T) {
|
||||
</contact>
|
||||
</person>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`(?s)<details>.*?<name>(?<fullName>[^<]+)</name>.*?<age>(?<age>\d+)</age>`,
|
||||
`fullName = string.upper(fullName) .. " (" .. age .. ")"`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureWithVariableReadback(t *testing.T) {
|
||||
@@ -572,7 +570,7 @@ func TestNamedCaptureWithVariableReadback(t *testing.T) {
|
||||
<mana>300</mana>
|
||||
</stats>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`(?s)<health>(?<hp>\d+)</health>.*?<mana>(?<mp>\d+)</mana>`,
|
||||
`hp = hp * 1.5
|
||||
mp = mp * 1.5`)
|
||||
@@ -580,7 +578,7 @@ func TestNamedCaptureWithVariableReadback(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureWithSpecialCharsInName(t *testing.T) {
|
||||
@@ -588,14 +586,14 @@ func TestNamedCaptureWithSpecialCharsInName(t *testing.T) {
|
||||
|
||||
expected := `<data value="84" min="10" max="100" />`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<data value="(?<val_1>\d+)"`,
|
||||
`val_1 = val_1 * 2`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestEmptyNamedCapture(t *testing.T) {
|
||||
@@ -603,14 +601,14 @@ func TestEmptyNamedCapture(t *testing.T) {
|
||||
|
||||
expected := `<tag attr="default" />`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`attr="(?<value>.*?)"`,
|
||||
`value = value == "" and "default" or value`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestMultipleNamedCapturesInSameLine(t *testing.T) {
|
||||
@@ -618,7 +616,7 @@ func TestMultipleNamedCapturesInSameLine(t *testing.T) {
|
||||
|
||||
expected := `<rect x="20" y="40" width="200" height="100" />`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`x="(?<x>\d+)" y="(?<y>\d+)" width="(?<w>\d+)" height="(?<h>\d+)"`,
|
||||
`x = x * 2
|
||||
y = y * 2
|
||||
@@ -628,7 +626,7 @@ func TestMultipleNamedCapturesInSameLine(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 4, matches, "Expected 4 matches, got %d", matches)
|
||||
assert.Equal(t, 4, mods, "Expected 4 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestConditionalNamedCapture(t *testing.T) {
|
||||
@@ -642,14 +640,14 @@ func TestConditionalNamedCapture(t *testing.T) {
|
||||
<item status="inactive" count="10" />
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<item status="(?<status>[^"]+)" count="(?<count>\d+)"`,
|
||||
`count = status == "active" and count * 2 or count`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 matches, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestLuaFunctionsOnNamedCaptures(t *testing.T) {
|
||||
@@ -663,7 +661,7 @@ func TestLuaFunctionsOnNamedCaptures(t *testing.T) {
|
||||
<user name="JANE SMITH" role="admin" />
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<user name="(?<name>[^"]+)" role="(?<role>[^"]+)"`,
|
||||
`-- Capitalize first letters for regular users
|
||||
if role == "user" then
|
||||
@@ -681,7 +679,7 @@ func TestLuaFunctionsOnNamedCaptures(t *testing.T) {
|
||||
// might need additional transformations before comparison
|
||||
normalizedResult := normalizeWhitespace(result)
|
||||
normalizedExpected := normalizeWhitespace(expected)
|
||||
assert.Equal(t, normalizedExpected, normalizedResult, "Expected content to be different")
|
||||
assert.Equal(t, normalizedExpected, normalizedResult, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureWithMath(t *testing.T) {
|
||||
@@ -693,7 +691,7 @@ func TestNamedCaptureWithMath(t *testing.T) {
|
||||
<item price="19.99" quantity="3" total="59.97" />
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<item price="(?<price>\d+\.\d+)" quantity="(?<qty>\d+)"!any$`,
|
||||
`-- Calculate and add total
|
||||
replacement = string.format('<item price="%s" quantity="%s" total="%.2f" />',
|
||||
@@ -705,7 +703,7 @@ func TestNamedCaptureWithMath(t *testing.T) {
|
||||
|
||||
result = normalizeWhitespace(result)
|
||||
expected = normalizeWhitespace(expected)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureWithGlobals(t *testing.T) {
|
||||
@@ -713,7 +711,7 @@ func TestNamedCaptureWithGlobals(t *testing.T) {
|
||||
|
||||
expected := `<temp unit="F">77</temp>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<temp unit="(?<unit>[CF]?)">(?<value>\d+)</temp>`,
|
||||
`if unit == "C" then
|
||||
value = value * 9/5 + 32
|
||||
@@ -726,7 +724,7 @@ func TestNamedCaptureWithGlobals(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestMixedDynamicAndNamedCaptures(t *testing.T) {
|
||||
@@ -740,7 +738,7 @@ func TestMixedDynamicAndNamedCaptures(t *testing.T) {
|
||||
<color rgb="0,255,0" name="GREEN" hex="#00FF00" />
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<color rgb="(?<r>\d+),(?<g>\d+),(?<b>\d+)" name="(?<colorName>[^"]+)" />`,
|
||||
`-- Uppercase the name
|
||||
colorName = string.upper(colorName)
|
||||
@@ -758,7 +756,7 @@ func TestMixedDynamicAndNamedCaptures(t *testing.T) {
|
||||
|
||||
result = normalizeWhitespace(result)
|
||||
expected = normalizeWhitespace(expected)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCapturesWithMultipleReferences(t *testing.T) {
|
||||
@@ -766,7 +764,7 @@ func TestNamedCapturesWithMultipleReferences(t *testing.T) {
|
||||
|
||||
expected := `<text format="uppercase" length="11">HELLO WORLD</text>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<text>(?<content>[^<]+)</text>`,
|
||||
`local uppercaseContent = string.upper(content)
|
||||
local contentLength = string.len(content)
|
||||
@@ -776,7 +774,7 @@ func TestNamedCapturesWithMultipleReferences(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureWithJsonData(t *testing.T) {
|
||||
@@ -784,7 +782,7 @@ func TestNamedCaptureWithJsonData(t *testing.T) {
|
||||
|
||||
expected := `<data>{"name":"JOHN","age":30}</data>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<data>(?<json>\{.*?\})</data>`,
|
||||
`-- Parse JSON (simplified, assumes valid JSON)
|
||||
local name = json:match('"name":"([^"]+)"')
|
||||
@@ -794,7 +792,7 @@ func TestNamedCaptureWithJsonData(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestNamedCaptureInXML(t *testing.T) {
|
||||
@@ -814,7 +812,7 @@ func TestNamedCaptureInXML(t *testing.T) {
|
||||
</product>
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`(?s)<price currency="(?<currency>[^"]+)">(?<price>\d+\.\d+)</price>.*?<stock>(?<stock>\d+)</stock>`,
|
||||
`-- Add 20% to price if USD
|
||||
if currency == "USD" then
|
||||
@@ -827,7 +825,7 @@ func TestNamedCaptureInXML(t *testing.T) {
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 2, matches, "Expected 2 matches, got %d", matches)
|
||||
assert.Equal(t, 2, mods, "Expected 2 modifications, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
func TestComprehensiveNamedCaptures(t *testing.T) {
|
||||
@@ -871,7 +869,7 @@ func TestComprehensiveNamedCaptures(t *testing.T) {
|
||||
</products>
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`(?s)<product sku="(?<sku>[^"]+)" status="(?<status>[^"]+)"[^>]*>\s*<name>(?<product_name>[^<]+)</name>\s*<price currency="(?<currency>[^"]+)">(?<price>\d+\.\d+)</price>\s*<quantity>(?<qty>\d+)</quantity>`,
|
||||
`-- Only process in-stock items
|
||||
if status == "in-stock" then
|
||||
@@ -905,7 +903,7 @@ func TestComprehensiveNamedCaptures(t *testing.T) {
|
||||
// Normalize whitespace for comparison
|
||||
normalizedResult := normalizeWhitespace(result)
|
||||
normalizedExpected := normalizeWhitespace(expected)
|
||||
assert.Equal(t, normalizedExpected, normalizedResult, "Expected content to be different")
|
||||
assert.Equal(t, normalizedExpected, normalizedResult, "result mismatch")
|
||||
}
|
||||
|
||||
func TestVariousNamedCaptureFormats(t *testing.T) {
|
||||
@@ -925,7 +923,7 @@ func TestVariousNamedCaptureFormats(t *testing.T) {
|
||||
</data>
|
||||
`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`<entry id="(?<id_num>\d+)" value="(?<val>\d+)"(?: status="(?<status>[^"]*)")? />`,
|
||||
`-- Prefix the ID with "ID-"
|
||||
id_num = "ID-" .. id_num
|
||||
@@ -956,7 +954,7 @@ func TestVariousNamedCaptureFormats(t *testing.T) {
|
||||
|
||||
normalizedResult := normalizeWhitespace(result)
|
||||
normalizedExpected := normalizeWhitespace(expected)
|
||||
assert.Equal(t, normalizedExpected, normalizedResult, "Expected content to be different")
|
||||
assert.Equal(t, normalizedExpected, normalizedResult, "result mismatch")
|
||||
}
|
||||
|
||||
func TestSimpleNamedCapture(t *testing.T) {
|
||||
@@ -964,14 +962,14 @@ func TestSimpleNamedCapture(t *testing.T) {
|
||||
|
||||
expected := `<product name="WIDGET" price="19.99"/>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(content,
|
||||
result, mods, matches, err := APIAdaptor(content,
|
||||
`name="(?<product_name>[^"]+)"`,
|
||||
`product_name = string.upper(product_name)`)
|
||||
|
||||
assert.NoError(t, err, "Error processing content: %v", err)
|
||||
assert.Equal(t, 1, matches, "Expected 1 match, got %d", matches)
|
||||
assert.Equal(t, 1, mods, "Expected 1 modification, got %d", mods)
|
||||
assert.Equal(t, expected, result, "Expected content to be different")
|
||||
assert.Equal(t, expected, result, "result mismatch")
|
||||
}
|
||||
|
||||
// Pattern without "(?s)" prefix gets modified to include it
|
||||
|
||||
745
processor/surgical_json_test.go
Normal file
745
processor/surgical_json_test.go
Normal file
@@ -0,0 +1,745 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func assertProcessJSONResult(t *testing.T, content, expected string, command utils.ModifyCommand) {
|
||||
t.Helper()
|
||||
command.Raw = true
|
||||
|
||||
commands, err := ProcessJSON(content, command, "test.json")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, commands)
|
||||
|
||||
result, _ := utils.ExecuteModifications(commands, content)
|
||||
assert.Equal(t, expected, result, "result mismatch (-want +got):\n%s", cmp.Diff(expected, result))
|
||||
}
|
||||
|
||||
func TestSurgicalJSONEditing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
content string
|
||||
luaCode string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Modify single field",
|
||||
content: `{
|
||||
"name": "test",
|
||||
"value": 42,
|
||||
"description": "original"
|
||||
}`,
|
||||
luaCode: `
|
||||
data.value = 84
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"name": "test",
|
||||
"value": 84,
|
||||
"description": "original"
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "Add new field",
|
||||
content: `{
|
||||
"name": "test",
|
||||
"value": 42
|
||||
}`,
|
||||
luaCode: `
|
||||
data.newField = "added"
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"name": "test",
|
||||
"value": 42
|
||||
,"newField": "added"}`, // sjson.Set() adds new fields in compact format
|
||||
},
|
||||
{
|
||||
name: "Modify nested field",
|
||||
content: `{
|
||||
"config": {
|
||||
"settings": {
|
||||
"enabled": false,
|
||||
"timeout": 30
|
||||
}
|
||||
}
|
||||
}`,
|
||||
luaCode: `
|
||||
data.config.settings.enabled = true
|
||||
data.config.settings.timeout = 60
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"config": {
|
||||
"settings": {
|
||||
"enabled": true,
|
||||
"timeout": 60
|
||||
}
|
||||
}
|
||||
}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: tt.luaCode,
|
||||
}
|
||||
|
||||
assertProcessJSONResult(t, tt.content, tt.expected, command)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSurgicalJSONPreservesFormatting(t *testing.T) {
|
||||
content := `{
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"Description": "",
|
||||
"DisplayName": "",
|
||||
"FlavorText": "",
|
||||
"Icon": "None",
|
||||
"MaxStack": 1,
|
||||
"Override_Glow_Icon": "None",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false
|
||||
},
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Rows": [
|
||||
{
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber",
|
||||
"Weight": 10
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"Description": "",
|
||||
"DisplayName": "",
|
||||
"FlavorText": "",
|
||||
"Icon": "None",
|
||||
"MaxStack": 1,
|
||||
"Override_Glow_Icon": "None",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false
|
||||
},
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Rows": [
|
||||
{
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber",
|
||||
"Weight": 500
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
-- Modify the weight of the first item
|
||||
data.Rows[1].Weight = 500
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
assertProcessJSONResult(t, content, expected, command)
|
||||
}
|
||||
|
||||
func TestSurgicalJSONPreservesFormattingForProcessorRecipe(t *testing.T) {
|
||||
content := `
|
||||
{
|
||||
"RowStruct": "/Script/Icarus.ProcessorRecipe",
|
||||
"Defaults": {
|
||||
"bForceDisableRecipe": false,
|
||||
"Requirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_Talents"
|
||||
},
|
||||
"SessionRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"CharacterRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"RequiredMillijoules": 2500,
|
||||
"RecipeSets": [],
|
||||
"ResourceCostMultipliers": [],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Container": {
|
||||
"Value": "None"
|
||||
},
|
||||
"ResourceInputs": [],
|
||||
"bSelectOutputItemRandomly": false,
|
||||
"bContainsContainer": false,
|
||||
"ItemIconOverride": {
|
||||
"ItemStaticData": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"ItemDynamicData": [],
|
||||
"ItemCustomStats": [],
|
||||
"CustomProperties": {
|
||||
"StaticWorldStats": [],
|
||||
"StaticWorldHeldStats": [],
|
||||
"Stats": [],
|
||||
"Alterations": [],
|
||||
"LivingItemSlots": []
|
||||
},
|
||||
"DatabaseGUID": "",
|
||||
"ItemOwnerLookupId": -1,
|
||||
"RuntimeTags": {
|
||||
"GameplayTags": []
|
||||
}
|
||||
},
|
||||
"Outputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemTemplate"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"ResourceOutputs": [],
|
||||
"Refundable": "Inherit",
|
||||
"ExperienceMultiplier": 1,
|
||||
"Audio": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CraftingAudioData"
|
||||
}
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"Name": "Biofuel1",
|
||||
"RecipeSets": [
|
||||
{
|
||||
"RowName": "Composter",
|
||||
"DataTableName": "D_RecipeSets"
|
||||
}
|
||||
],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "Raw_Meat",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 2,
|
||||
"DynamicProperties": []
|
||||
},
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "Tree_Sap",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Outputs": [],
|
||||
"Audio": {
|
||||
"RowName": "Composter"
|
||||
},
|
||||
"ResourceOutputs": [
|
||||
{
|
||||
"Type": {
|
||||
"Value": "Biofuel"
|
||||
},
|
||||
"RequiredUnits": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
expected := `
|
||||
{
|
||||
"RowStruct": "/Script/Icarus.ProcessorRecipe",
|
||||
"Defaults": {
|
||||
"bForceDisableRecipe": false,
|
||||
"Requirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_Talents"
|
||||
},
|
||||
"SessionRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"CharacterRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"RequiredMillijoules": 2500,
|
||||
"RecipeSets": [],
|
||||
"ResourceCostMultipliers": [],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Container": {
|
||||
"Value": "None"
|
||||
},
|
||||
"ResourceInputs": [],
|
||||
"bSelectOutputItemRandomly": false,
|
||||
"bContainsContainer": false,
|
||||
"ItemIconOverride": {
|
||||
"ItemStaticData": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"ItemDynamicData": [],
|
||||
"ItemCustomStats": [],
|
||||
"CustomProperties": {
|
||||
"StaticWorldStats": [],
|
||||
"StaticWorldHeldStats": [],
|
||||
"Stats": [],
|
||||
"Alterations": [],
|
||||
"LivingItemSlots": []
|
||||
},
|
||||
"DatabaseGUID": "",
|
||||
"ItemOwnerLookupId": -1,
|
||||
"RuntimeTags": {
|
||||
"GameplayTags": []
|
||||
}
|
||||
},
|
||||
"Outputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemTemplate"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"ResourceOutputs": [],
|
||||
"Refundable": "Inherit",
|
||||
"ExperienceMultiplier": 1,
|
||||
"Audio": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CraftingAudioData"
|
||||
}
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"Name": "Biofuel1",
|
||||
"RecipeSets": [
|
||||
{
|
||||
"RowName": "Composter",
|
||||
"DataTableName": "D_RecipeSets"
|
||||
}
|
||||
],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "Raw_Meat",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 2,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Outputs": [],
|
||||
"Audio": {
|
||||
"RowName": "Composter"
|
||||
},
|
||||
"ResourceOutputs": [
|
||||
{
|
||||
"Type": {
|
||||
"Value": "Biofuel"
|
||||
},
|
||||
"RequiredUnits": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
-- Define regex patterns for matching recipe names
|
||||
local function matchesPattern(name, pattern)
|
||||
local matches = re(pattern, name)
|
||||
-- Check if matches table has any content (index 0 or 1 should exist if there's a match)
|
||||
return matches and (matches[0] or matches[1])
|
||||
end
|
||||
|
||||
-- Selection pattern for recipes that get multiplied
|
||||
local selectionPattern = "(?-s)(Bulk_)?(Pistol|Rifle).*?Round.*?|(Carbon|Composite)_Paste.*|(Gold|Copper)_Wire|(Ironw|Copper)_Nail|(Platinum|Steel|Cold_Steel|Titanium)_Ingot|.*?Shotgun_Shell.*?|.*_Arrow|.*_Bolt|.*_Fertilizer_?\\d*|.*_Grenade|.*_Pill|.*_Tonic|Aluminum|Ammo_Casing|Animal_Fat|Carbon_Fiber|Composites|Concrete_Mix|Cured_Leather_?\\d?|Electronics|Epoxy_?\\d?|Glass\\d?|Gunpowder\\w*|Health_.*|Titanium_Plate|Organic_Resin|Platinum_Sheath|Refined_[a-zA-Z]+|Rope|Shotgun_Casing|Steel_Bloom\\d?|Tree_Sap\\w*"
|
||||
|
||||
-- Ingot pattern for recipes that get count set to 1
|
||||
local ingotPattern = "(?-s)(Platinum|Steel|Cold_Steel|Titanium)_Ingot|Aluminum|Refined_[a-zA-Z]+|Glass\\d?"
|
||||
|
||||
local factor = 16
|
||||
local bonus = 0.5
|
||||
|
||||
for _, row in ipairs(data.Rows) do
|
||||
local recipeName = row.Name
|
||||
|
||||
-- Special case: Biofuel recipes - remove Tree_Sap input
|
||||
if string.find(recipeName, "Biofuel") then
|
||||
if row.Inputs then
|
||||
for i = #row.Inputs, 1, -1 do
|
||||
local input = row.Inputs[i]
|
||||
if input.Element and input.Element.RowName and string.find(input.Element.RowName, "Tree_Sap") then
|
||||
table.remove(row.Inputs, i)
|
||||
print("Removing input 'Tree_Sap' from processor recipe '" .. recipeName .. "'")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Ingot recipes: set input and output counts to 1
|
||||
if matchesPattern(recipeName, ingotPattern) then
|
||||
if row.Inputs then
|
||||
for _, input in ipairs(row.Inputs) do
|
||||
input.Count = 1
|
||||
end
|
||||
end
|
||||
if row.Outputs then
|
||||
for _, output in ipairs(row.Outputs) do
|
||||
output.Count = 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Selected recipes: multiply inputs by factor, outputs by factor * (1 + bonus)
|
||||
if matchesPattern(recipeName, selectionPattern) then
|
||||
if row.Inputs then
|
||||
for _, input in ipairs(row.Inputs) do
|
||||
local oldCount = input.Count
|
||||
input.Count = input.Count * factor
|
||||
print("Recipe " .. recipeName .. " Input.Count: " .. oldCount .. " -> " .. input.Count)
|
||||
end
|
||||
end
|
||||
|
||||
if row.Outputs then
|
||||
for _, output in ipairs(row.Outputs) do
|
||||
local oldCount = output.Count
|
||||
output.Count = math.floor(output.Count * factor * (1 + bonus))
|
||||
print("Recipe " .. recipeName .. " Output.Count: " .. oldCount .. " -> " .. output.Count)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
|
||||
assertProcessJSONResult(t, content, expected, command)
|
||||
}
|
||||
|
||||
func TestSurgicalJSONUpdatesMaxStackValues(t *testing.T) {
|
||||
original := `{
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"DisplayName": "",
|
||||
"Icon": "None",
|
||||
"Override_Glow_Icon": "None",
|
||||
"Description": "",
|
||||
"FlavorText": "",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false,
|
||||
"MaxStack": 1
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Weight": 10,
|
||||
"MaxStack": 200,
|
||||
"Name": "Item_Fiber"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"DisplayName": "",
|
||||
"Icon": "None",
|
||||
"Override_Glow_Icon": "None",
|
||||
"Description": "",
|
||||
"FlavorText": "",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false,
|
||||
"MaxStack": 1
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Weight": 10,
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
for _, row in ipairs(data.Rows) do
|
||||
if row.MaxStack then
|
||||
if string.find(row.Name, "Carrot") or string.find(row.Name, "Potato") then
|
||||
row.MaxStack = 25
|
||||
else
|
||||
row.MaxStack = row.MaxStack * 10000
|
||||
if row.MaxStack > 1000000 then
|
||||
row.MaxStack = 1000000
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
|
||||
assertProcessJSONResult(t, original, expected, command)
|
||||
}
|
||||
|
||||
func TestSurgicalJSONAddsAdditionalStatsObject(t *testing.T) {
|
||||
original := `
|
||||
{
|
||||
"Rows": [
|
||||
{
|
||||
"Name": "Deep_Mining_Drill_Biofuel",
|
||||
"Meshable": {
|
||||
"RowName": "Mesh_Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Itemable": {
|
||||
"RowName": "Item_Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Interactable": {
|
||||
"RowName": "Deployable"
|
||||
},
|
||||
"Focusable": {
|
||||
"RowName": "Focusable_1H"
|
||||
},
|
||||
"Highlightable": {
|
||||
"RowName": "Generic"
|
||||
},
|
||||
"Actionable": {
|
||||
"RowName": "Deployable"
|
||||
},
|
||||
"Usable": {
|
||||
"RowName": "Place"
|
||||
},
|
||||
"Deployable": {
|
||||
"RowName": "Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Durable": {
|
||||
"RowName": "Deployable_750"
|
||||
},
|
||||
"Inventory": {
|
||||
"RowName": "Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Decayable": {
|
||||
"RowName": "Decay_MetaItem"
|
||||
},
|
||||
"Generator": {
|
||||
"RowName": "Deep_Mining_Biofuel_Drill"
|
||||
},
|
||||
"Resource": {
|
||||
"RowName": "Simple_Internal_Flow_Only"
|
||||
},
|
||||
"Manual_Tags": {
|
||||
"GameplayTags": [
|
||||
{
|
||||
"TagName": "Item.Machine"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Generated_Tags": {
|
||||
"GameplayTags": [
|
||||
{
|
||||
"TagName": "Item.Machine"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Meshable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Itemable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Interactable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Highlightable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Actionable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Usable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Deployable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Durable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Inventory"
|
||||
}
|
||||
],
|
||||
"ParentTags": []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
expected := `
|
||||
{
|
||||
"Rows": [
|
||||
{
|
||||
"Name": "Deep_Mining_Drill_Biofuel",
|
||||
"Meshable": {
|
||||
"RowName": "Mesh_Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Itemable": {
|
||||
"RowName": "Item_Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Interactable": {
|
||||
"RowName": "Deployable"
|
||||
},
|
||||
"Focusable": {
|
||||
"RowName": "Focusable_1H"
|
||||
},
|
||||
"Highlightable": {
|
||||
"RowName": "Generic"
|
||||
},
|
||||
"Actionable": {
|
||||
"RowName": "Deployable"
|
||||
},
|
||||
"Usable": {
|
||||
"RowName": "Place"
|
||||
},
|
||||
"Deployable": {
|
||||
"RowName": "Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Durable": {
|
||||
"RowName": "Deployable_750"
|
||||
},
|
||||
"Inventory": {
|
||||
"RowName": "Deep_Mining_Drill_Biofuel"
|
||||
},
|
||||
"Decayable": {
|
||||
"RowName": "Decay_MetaItem"
|
||||
},
|
||||
"Generator": {
|
||||
"RowName": "Deep_Mining_Biofuel_Drill"
|
||||
},
|
||||
"Resource": {
|
||||
"RowName": "Simple_Internal_Flow_Only"
|
||||
},
|
||||
"Manual_Tags": {
|
||||
"GameplayTags": [
|
||||
{
|
||||
"TagName": "Item.Machine"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Generated_Tags": {
|
||||
"GameplayTags": [
|
||||
{
|
||||
"TagName": "Item.Machine"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Meshable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Itemable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Interactable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Highlightable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Actionable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Usable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Deployable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Durable"
|
||||
},
|
||||
{
|
||||
"TagName": "Traits.Inventory"
|
||||
}
|
||||
],
|
||||
"ParentTags": []
|
||||
}
|
||||
,"AdditionalStats": {"(Value=\"BaseDeepMiningDrillSpeed_+%\")":4000}}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
for i, row in ipairs(data.Rows) do
|
||||
-- Special case: Deep_Mining_Drill_Biofuel
|
||||
if string.find(row.Name, "Deep_Mining_Drill_Biofuel") then
|
||||
print("[DEBUG] Special case: Deep_Mining_Drill_Biofuel")
|
||||
if not row.AdditionalStats then
|
||||
print("[DEBUG] Creating AdditionalStats table for Deep_Mining_Drill_Biofuel")
|
||||
row.AdditionalStats = {}
|
||||
end
|
||||
print("[DEBUG] Setting BaseDeepMiningDrillSpeed_+% to 4000")
|
||||
row.AdditionalStats["(Value=\\\"BaseDeepMiningDrillSpeed_+%\\\")"] = 4000
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
|
||||
assertProcessJSONResult(t, original, expected, command)
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"io"
|
||||
"modify/logger"
|
||||
"os"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Only modify logger in test mode
|
||||
// This checks if we're running under 'go test'
|
||||
if os.Getenv("GO_TESTING") == "1" || os.Getenv("TESTING") == "1" {
|
||||
// Initialize logger with ERROR level for tests
|
||||
// to minimize noise in test output
|
||||
logger.Init(logger.LevelError)
|
||||
|
||||
// Optionally redirect logger output to discard
|
||||
// This prevents logger output from interfering with test output
|
||||
disableTestLogs := os.Getenv("ENABLE_TEST_LOGS") != "1"
|
||||
if disableTestLogs {
|
||||
// Create a new logger that writes to nowhere
|
||||
silentLogger := logger.New(io.Discard, "", 0)
|
||||
logger.DefaultLogger = silentLogger
|
||||
}
|
||||
}
|
||||
}
|
||||
1681
processor/xml.go
Normal file
1681
processor/xml.go
Normal file
File diff suppressed because it is too large
Load Diff
148
processor/xml_fixture_test.go
Normal file
148
processor/xml_fixture_test.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func loadXMLFixture(t *testing.T, path string) string {
|
||||
t.Helper()
|
||||
|
||||
content, err := os.ReadFile(path)
|
||||
require.NoError(t, err)
|
||||
|
||||
return string(content)
|
||||
}
|
||||
|
||||
func TestProcessXMLWithAfflictionsFixture(t *testing.T) {
|
||||
original := loadXMLFixture(t, "../testfiles/Afflictions.xml")
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "double_maxstrength",
|
||||
Lua: `
|
||||
local function visit(node, fn)
|
||||
fn(node)
|
||||
for _, child in ipairs(node.children or {}) do
|
||||
visit(child, fn)
|
||||
end
|
||||
end
|
||||
|
||||
visit(data, function(node)
|
||||
if node.tag.val == "Affliction" and node.attr.maxstrength ~= nil then
|
||||
node.attr.maxstrength = tostring(tonumber(node.attr.maxstrength.val) * 2)
|
||||
end
|
||||
end)
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessXML(original, command, "Afflictions.xml")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, commands)
|
||||
|
||||
result, count := utils.ExecuteModifications(commands, original)
|
||||
assert.Contains(t, result, `maxstrength="20"`)
|
||||
assert.Contains(t, result, `maxstrength="480"`)
|
||||
assert.Contains(t, result, `maxstrength="12"`)
|
||||
assert.Contains(t, result, `<?xml`)
|
||||
assert.Positive(t, count)
|
||||
|
||||
origLines := len(strings.Split(original, "\n"))
|
||||
resultLines := len(strings.Split(result, "\n"))
|
||||
assert.Equal(t, origLines, resultLines)
|
||||
}
|
||||
|
||||
func TestProcessXMLUpdatesAfflictionsAttributes(t *testing.T) {
|
||||
original := loadXMLFixture(t, "../testfiles/Afflictions.xml")
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "increase_resistance",
|
||||
Lua: `
|
||||
local function visit(node, fn)
|
||||
fn(node)
|
||||
for _, child in ipairs(node.children or {}) do
|
||||
visit(child, fn)
|
||||
end
|
||||
end
|
||||
|
||||
visit(data, function(node)
|
||||
if node.tag.val == "Effect" then
|
||||
if node.attr.minresistance ~= nil then
|
||||
node.attr.minresistance = tostring(tonumber(node.attr.minresistance.val) * 1.5)
|
||||
end
|
||||
if node.attr.maxresistance ~= nil then
|
||||
node.attr.maxresistance = tostring(tonumber(node.attr.maxresistance.val) * 1.5)
|
||||
end
|
||||
end
|
||||
end)
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessXML(original, command, "Afflictions.xml")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, commands)
|
||||
|
||||
_, count := utils.ExecuteModifications(commands, original)
|
||||
assert.GreaterOrEqual(t, count, 10)
|
||||
}
|
||||
|
||||
func TestProcessXMLUpdatesNestedAfflictionAmounts(t *testing.T) {
|
||||
original := loadXMLFixture(t, "../testfiles/Afflictions.xml")
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "modify_effects",
|
||||
Lua: `
|
||||
local function visit(node, fn)
|
||||
fn(node)
|
||||
for _, child in ipairs(node.children or {}) do
|
||||
visit(child, fn)
|
||||
end
|
||||
end
|
||||
|
||||
visit(data, function(node)
|
||||
if node.tag.val == "ReduceAffliction" and node.attr.amount ~= nil then
|
||||
node.attr.amount = tostring(tonumber(node.attr.amount.val) * 2)
|
||||
end
|
||||
end)
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessXML(original, command, "Afflictions.xml")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, commands)
|
||||
|
||||
result, count := utils.ExecuteModifications(commands, original)
|
||||
assert.Contains(t, result, `amount="0.002"`)
|
||||
assert.GreaterOrEqual(t, count, 8)
|
||||
}
|
||||
|
||||
func TestProcessXMLWithKDLShipsFixture(t *testing.T) {
|
||||
original := loadXMLFixture(t, "../testfiles/KDL_Ships_NonTurretDefenses.xml")
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "modify_non_turret_defenses",
|
||||
Lua: `
|
||||
for _, child in ipairs(data.children) do
|
||||
print(child.tag, child.attr.size_scale)
|
||||
if child.attr.size_scale then
|
||||
child.attr.size_scale = child.attr.size_scale * 2
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
commands, err := ProcessXML(original, command, "KDL_Ships_NonTurretDefenses.xml")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, commands)
|
||||
|
||||
result, count := utils.ExecuteModifications(commands, original)
|
||||
assert.Positive(t, count)
|
||||
assert.Contains(t, result, `size_scale="2.6"`)
|
||||
}
|
||||
346
processor/xml_integration_test.go
Normal file
346
processor/xml_integration_test.go
Normal file
@@ -0,0 +1,346 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
)
|
||||
|
||||
// TestRealWorldGameXML tests with game-like XML structure
|
||||
func TestApplyXMLChangesWithGameStyleXML(t *testing.T) {
|
||||
original := `<?xml version="1.0" encoding="utf-8"?>
|
||||
<Items>
|
||||
<Item name="Fiber" identifier="Item_Fiber" category="Resource">
|
||||
<Icon texture="Items/Fiber.png" />
|
||||
<Weight value="0.01" />
|
||||
<MaxStack value="1000" />
|
||||
<Description text="Soft plant fibers useful for crafting." />
|
||||
</Item>
|
||||
<Item name="Wood" identifier="Item_Wood" category="Resource">
|
||||
<Icon texture="Items/Wood.png" />
|
||||
<Weight value="0.05" />
|
||||
<MaxStack value="500" />
|
||||
<Description text="Basic building material." />
|
||||
</Item>
|
||||
</Items>`
|
||||
|
||||
// Parse
|
||||
origElem, err := parseXMLWithPositions(original)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
||||
// Modify: Double all MaxStack values and change Wood weight
|
||||
modElem := deepCopyXMLElement(origElem)
|
||||
|
||||
// Fiber MaxStack: 1000 → 2000
|
||||
fiberItem := modElem.Children[0]
|
||||
fiberMaxStack := fiberItem.Children[2]
|
||||
valueAttr := fiberMaxStack.Attributes["value"]
|
||||
valueAttr.Value = "2000"
|
||||
fiberMaxStack.Attributes["value"] = valueAttr
|
||||
|
||||
// Wood MaxStack: 500 → 1000
|
||||
woodItem := modElem.Children[1]
|
||||
woodMaxStack := woodItem.Children[2]
|
||||
valueAttr2 := woodMaxStack.Attributes["value"]
|
||||
valueAttr2.Value = "1000"
|
||||
woodMaxStack.Attributes["value"] = valueAttr2
|
||||
|
||||
// Wood Weight: 0.05 → 0.10
|
||||
woodWeight := woodItem.Children[1]
|
||||
weightAttr := woodWeight.Attributes["value"]
|
||||
weightAttr.Value = "0.10"
|
||||
woodWeight.Attributes["value"] = weightAttr
|
||||
|
||||
// Generate changes
|
||||
changes := findXMLChanges(origElem, modElem, "")
|
||||
|
||||
if len(changes) != 3 {
|
||||
t.Fatalf("Expected 3 changes, got %d", len(changes))
|
||||
}
|
||||
|
||||
// Apply
|
||||
commands := applyXMLChanges(changes)
|
||||
result, _ := utils.ExecuteModifications(commands, original)
|
||||
|
||||
// Verify changes
|
||||
if !strings.Contains(result, `<MaxStack value="2000"`) {
|
||||
t.Errorf("Failed to update Fiber MaxStack")
|
||||
}
|
||||
if !strings.Contains(result, `<MaxStack value="1000"`) {
|
||||
t.Errorf("Failed to update Wood MaxStack")
|
||||
}
|
||||
if !strings.Contains(result, `<Weight value="0.10"`) {
|
||||
t.Errorf("Failed to update Wood Weight")
|
||||
}
|
||||
|
||||
// Verify formatting preserved (check XML declaration and indentation)
|
||||
if !strings.HasPrefix(result, `<?xml version="1.0" encoding="utf-8"?>`) {
|
||||
t.Errorf("XML declaration not preserved")
|
||||
}
|
||||
if !strings.Contains(result, "\n <Item") {
|
||||
t.Errorf("Indentation not preserved")
|
||||
}
|
||||
}
|
||||
|
||||
// TestAddRemoveMultipleChildren tests adding and removing multiple elements
|
||||
func TestAddRemoveMultipleChildren(t *testing.T) {
|
||||
original := `<inventory>
|
||||
<item name="sword" />
|
||||
<item name="shield" />
|
||||
<item name="potion" />
|
||||
<item name="scroll" />
|
||||
</inventory>`
|
||||
|
||||
// Parse
|
||||
origElem, err := parseXMLWithPositions(original)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
||||
// Remove middle two items, add a new one
|
||||
modElem := deepCopyXMLElement(origElem)
|
||||
|
||||
// Remove shield and potion (indices 1 and 2)
|
||||
modElem.Children = []*XMLElement{
|
||||
modElem.Children[0], // sword
|
||||
modElem.Children[3], // scroll
|
||||
}
|
||||
|
||||
// Add a new item
|
||||
newItem := &XMLElement{
|
||||
Tag: "item",
|
||||
Attributes: map[string]XMLAttribute{
|
||||
"name": {Value: "helmet"},
|
||||
},
|
||||
Children: []*XMLElement{},
|
||||
}
|
||||
modElem.Children = append(modElem.Children, newItem)
|
||||
|
||||
// Generate changes
|
||||
changes := findXMLChanges(origElem, modElem, "")
|
||||
|
||||
// The algorithm compares by matching indices:
|
||||
// orig[0]=sword vs mod[0]=sword (no change)
|
||||
// orig[1]=shield vs mod[1]=scroll (treated as replace - shows as attribute changes)
|
||||
// orig[2]=potion vs mod[2]=helmet (treated as replace)
|
||||
// orig[3]=scroll (removed)
|
||||
// This is fine - the actual edits will be correct
|
||||
|
||||
if len(changes) == 0 {
|
||||
t.Fatalf("Expected changes, got none")
|
||||
}
|
||||
|
||||
// Apply
|
||||
commands := applyXMLChanges(changes)
|
||||
result, _ := utils.ExecuteModifications(commands, original)
|
||||
|
||||
// Verify
|
||||
if strings.Contains(result, `name="shield"`) {
|
||||
t.Errorf("Shield not removed")
|
||||
}
|
||||
if strings.Contains(result, `name="potion"`) {
|
||||
t.Errorf("Potion not removed")
|
||||
}
|
||||
if !strings.Contains(result, `name="sword"`) {
|
||||
t.Errorf("Sword incorrectly removed")
|
||||
}
|
||||
if !strings.Contains(result, `name="scroll"`) {
|
||||
t.Errorf("Scroll incorrectly removed")
|
||||
}
|
||||
if !strings.Contains(result, `name="helmet"`) {
|
||||
t.Errorf("Helmet not added")
|
||||
}
|
||||
}
|
||||
|
||||
// TestModifyAttributesAndText tests changing both attributes and text content
|
||||
func TestModifyAttributesAndText(t *testing.T) {
|
||||
original := `<weapon>
|
||||
<item type="sword" damage="10">Iron Sword</item>
|
||||
<item type="axe" damage="15">Battle Axe</item>
|
||||
</weapon>`
|
||||
|
||||
// Parse
|
||||
origElem, err := parseXMLWithPositions(original)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
||||
// Modify both items
|
||||
modElem := deepCopyXMLElement(origElem)
|
||||
|
||||
// First item: change damage and text
|
||||
item1 := modElem.Children[0]
|
||||
dmgAttr := item1.Attributes["damage"]
|
||||
dmgAttr.Value = "20"
|
||||
item1.Attributes["damage"] = dmgAttr
|
||||
item1.Text = "Steel Sword"
|
||||
|
||||
// Second item: change damage and type
|
||||
item2 := modElem.Children[1]
|
||||
dmgAttr2 := item2.Attributes["damage"]
|
||||
dmgAttr2.Value = "30"
|
||||
item2.Attributes["damage"] = dmgAttr2
|
||||
typeAttr := item2.Attributes["type"]
|
||||
typeAttr.Value = "greataxe"
|
||||
item2.Attributes["type"] = typeAttr
|
||||
|
||||
// Generate and apply changes
|
||||
changes := findXMLChanges(origElem, modElem, "")
|
||||
commands := applyXMLChanges(changes)
|
||||
result, _ := utils.ExecuteModifications(commands, original)
|
||||
|
||||
// Verify
|
||||
if !strings.Contains(result, `damage="20"`) {
|
||||
t.Errorf("First item damage not updated")
|
||||
}
|
||||
if !strings.Contains(result, "Steel Sword") {
|
||||
t.Errorf("First item text not updated")
|
||||
}
|
||||
if !strings.Contains(result, `damage="30"`) {
|
||||
t.Errorf("Second item damage not updated")
|
||||
}
|
||||
if !strings.Contains(result, `type="greataxe"`) {
|
||||
t.Errorf("Second item type not updated")
|
||||
}
|
||||
if strings.Contains(result, "Iron Sword") {
|
||||
t.Errorf("Old text still present")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSelfClosingTagPreservation tests that self-closing tags work correctly
|
||||
func TestSelfClosingTagPreservation(t *testing.T) {
|
||||
original := `<root>
|
||||
<item name="test" />
|
||||
<empty></empty>
|
||||
</root>`
|
||||
|
||||
// Parse
|
||||
origElem, err := parseXMLWithPositions(original)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
||||
// Modify first item's attribute
|
||||
modElem := deepCopyXMLElement(origElem)
|
||||
item := modElem.Children[0]
|
||||
nameAttr := item.Attributes["name"]
|
||||
nameAttr.Value = "modified"
|
||||
item.Attributes["name"] = nameAttr
|
||||
|
||||
// Generate and apply changes
|
||||
changes := findXMLChanges(origElem, modElem, "")
|
||||
commands := applyXMLChanges(changes)
|
||||
result, _ := utils.ExecuteModifications(commands, original)
|
||||
|
||||
// Verify the change was made
|
||||
if !strings.Contains(result, `name="modified"`) {
|
||||
t.Errorf("Attribute not updated: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
// TestNumericAttributeModification tests numeric attribute changes
|
||||
func TestNumericAttributeModification(t *testing.T) {
|
||||
original := `<stats health="100" mana="50" stamina="75.5" />`
|
||||
|
||||
// Parse
|
||||
origElem, err := parseXMLWithPositions(original)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
||||
// Double all numeric values
|
||||
modElem := deepCopyXMLElement(origElem)
|
||||
|
||||
// Helper to modify numeric attributes
|
||||
modifyNumericAttr := func(attrName string, multiplier float64) {
|
||||
if attr, exists := modElem.Attributes[attrName]; exists {
|
||||
if val, ok := parseNumeric(attr.Value); ok {
|
||||
attr.Value = formatNumeric(val * multiplier)
|
||||
modElem.Attributes[attrName] = attr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
modifyNumericAttr("health", 2.0)
|
||||
modifyNumericAttr("mana", 2.0)
|
||||
modifyNumericAttr("stamina", 2.0)
|
||||
|
||||
// Generate and apply changes
|
||||
changes := findXMLChanges(origElem, modElem, "")
|
||||
|
||||
if len(changes) != 3 {
|
||||
t.Fatalf("Expected 3 changes, got %d", len(changes))
|
||||
}
|
||||
|
||||
commands := applyXMLChanges(changes)
|
||||
result, _ := utils.ExecuteModifications(commands, original)
|
||||
|
||||
// Verify numeric changes
|
||||
if !strings.Contains(result, `health="200"`) {
|
||||
t.Errorf("Health not doubled: %s", result)
|
||||
}
|
||||
if !strings.Contains(result, `mana="100"`) {
|
||||
t.Errorf("Mana not doubled: %s", result)
|
||||
}
|
||||
if !strings.Contains(result, `stamina="151"`) {
|
||||
t.Errorf("Stamina not doubled: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
// TestMinimalGitDiff verifies that only changed parts are modified
|
||||
func TestMinimalGitDiff(t *testing.T) {
|
||||
original := `<config>
|
||||
<setting name="volume" value="50" />
|
||||
<setting name="brightness" value="75" />
|
||||
<setting name="contrast" value="100" />
|
||||
</config>`
|
||||
|
||||
// Parse
|
||||
origElem, err := parseXMLWithPositions(original)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse: %v", err)
|
||||
}
|
||||
|
||||
// Change only brightness
|
||||
modElem := deepCopyXMLElement(origElem)
|
||||
brightnessItem := modElem.Children[1]
|
||||
valueAttr := brightnessItem.Attributes["value"]
|
||||
valueAttr.Value = "90"
|
||||
brightnessItem.Attributes["value"] = valueAttr
|
||||
|
||||
// Generate changes
|
||||
changes := findXMLChanges(origElem, modElem, "")
|
||||
|
||||
// Should be exactly 1 change
|
||||
if len(changes) != 1 {
|
||||
t.Fatalf("Expected exactly 1 change for minimal diff, got %d", len(changes))
|
||||
}
|
||||
|
||||
if changes[0].OldValue != "75" || changes[0].NewValue != "90" {
|
||||
t.Errorf("Wrong change detected: %v", changes[0])
|
||||
}
|
||||
|
||||
// Apply
|
||||
commands := applyXMLChanges(changes)
|
||||
result, _ := utils.ExecuteModifications(commands, original)
|
||||
|
||||
// Calculate diff size (rough approximation)
|
||||
diffChars := len(changes[0].OldValue) + len(changes[0].NewValue)
|
||||
if diffChars > 10 {
|
||||
t.Errorf("Diff too large: %d characters changed (expected < 10)", diffChars)
|
||||
}
|
||||
|
||||
// Verify only brightness changed
|
||||
if !strings.Contains(result, `value="50"`) {
|
||||
t.Errorf("Volume incorrectly modified")
|
||||
}
|
||||
if !strings.Contains(result, `value="90"`) {
|
||||
t.Errorf("Brightness not modified")
|
||||
}
|
||||
if !strings.Contains(result, `value="100"`) {
|
||||
t.Errorf("Contrast incorrectly modified")
|
||||
}
|
||||
}
|
||||
137
processor/xml_internal_test.go
Normal file
137
processor/xml_internal_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
func TestProcessXMLErrorPaths(t *testing.T) {
|
||||
_, err := ProcessXML(``, utils.ModifyCommand{Name: "empty_xml", XML: true, Lua: `modified=false`}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "no root element found")
|
||||
|
||||
_, err = ProcessXML(`<root><x></root>`, utils.ModifyCommand{Name: "bad_xml", XML: true, Lua: `modified=false`}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "failed to parse XML")
|
||||
|
||||
_, err = ProcessXML(`<root/>`, utils.ModifyCommand{Name: "bad_lua", XML: true, Lua: `this is not lua`}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "lua script execution failed")
|
||||
}
|
||||
|
||||
func TestXMLElementToLuaTableHandlesNilChildren(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
root := &XMLElement{Tag: "root", Name: XMLName{Local: "root"}}
|
||||
root.Children = []*XMLElement{nil, &XMLElement{Tag: "child", Name: XMLName{Local: "child"}, Parent: root}}
|
||||
|
||||
tbl := xmlElementToLuaTable(L, root, make(map[*XMLElement]*lua.LTable), make(map[*lua.LTable]*XMLElement), false)
|
||||
children, ok := tbl.RawGetString("children").(*lua.LTable)
|
||||
require.True(t, ok)
|
||||
assert.Equal(t, 2, children.Len())
|
||||
}
|
||||
|
||||
func TestProcessXMLXPathHelperErrorPaths(t *testing.T) {
|
||||
input := `<root><item value="1" /></root>`
|
||||
|
||||
_, err := ProcessXML(input, utils.ModifyCommand{
|
||||
Name: "xpath_bad_arg",
|
||||
XML: true,
|
||||
Lua: `
|
||||
xpath("not-a-node", "//item")
|
||||
modified = true
|
||||
`,
|
||||
}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "xpath first argument must be an XML node or list of XML nodes")
|
||||
|
||||
_, err = ProcessXML(input, utils.ModifyCommand{
|
||||
Name: "xpath_bad_expr",
|
||||
XML: true,
|
||||
Lua: `
|
||||
xpath(data, "//*[")
|
||||
modified = true
|
||||
`,
|
||||
}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "invalid xpath expression")
|
||||
|
||||
_, err = ProcessXML(input, utils.ModifyCommand{
|
||||
Name: "xpathrm_bad_arg",
|
||||
XML: true,
|
||||
Lua: `
|
||||
xpathrm("not-a-node", "//item")
|
||||
modified = true
|
||||
`,
|
||||
}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "xpathrm first argument must be an XML node or list of XML nodes")
|
||||
|
||||
_, err = ProcessXML(input, utils.ModifyCommand{
|
||||
Name: "xpathrm_bad_expr",
|
||||
XML: true,
|
||||
Lua: `
|
||||
xpathrm(data, "//*[")
|
||||
modified = true
|
||||
`,
|
||||
}, "test.xml")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "invalid xpath expression")
|
||||
}
|
||||
|
||||
func TestRemoveXMLElementFromParentErrorPaths(t *testing.T) {
|
||||
assert.False(t, removeXMLElementFromParent(nil))
|
||||
|
||||
orphan := &XMLElement{Tag: "x"}
|
||||
assert.False(t, removeXMLElementFromParent(orphan))
|
||||
|
||||
parent := &XMLElement{Tag: "root"}
|
||||
child := &XMLElement{Tag: "child", Parent: parent}
|
||||
other := &XMLElement{Tag: "other", Parent: parent}
|
||||
parent.Children = []*XMLElement{other}
|
||||
assert.False(t, removeXMLElementFromParent(child))
|
||||
|
||||
parent.Children = []*XMLElement{child, other}
|
||||
assert.True(t, removeXMLElementFromParent(child))
|
||||
assert.Equal(t, []*XMLElement{other}, parent.Children)
|
||||
}
|
||||
|
||||
func TestRemoveXMLLuaNodeFromParentNoopPaths(t *testing.T) {
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
root := &XMLElement{Tag: "root"}
|
||||
child := &XMLElement{Tag: "child", Parent: root}
|
||||
|
||||
// nil target
|
||||
removeXMLLuaNodeFromParent(nil, map[*XMLElement]*lua.LTable{})
|
||||
|
||||
// missing parent mapping
|
||||
removeXMLLuaNodeFromParent(child, map[*XMLElement]*lua.LTable{})
|
||||
|
||||
parentTable := L.CreateTable(0, 1)
|
||||
childTable := L.CreateTable(0, 0)
|
||||
childrenNotTable := lua.LString("oops")
|
||||
parentTable.RawSetString("children", childrenNotTable)
|
||||
|
||||
removeXMLLuaNodeFromParent(child, map[*XMLElement]*lua.LTable{
|
||||
root: parentTable,
|
||||
child: childTable,
|
||||
})
|
||||
|
||||
// valid children table but child is not present
|
||||
children := L.CreateTable(0, 0)
|
||||
parentTable.RawSetString("children", children)
|
||||
removeXMLLuaNodeFromParent(child, map[*XMLElement]*lua.LTable{
|
||||
root: parentTable,
|
||||
child: childTable,
|
||||
})
|
||||
|
||||
assert.Equal(t, 0, children.Len())
|
||||
}
|
||||
70
processor/xml_navigator_test.go
Normal file
70
processor/xml_navigator_test.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/antchfx/xpath"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestXMLXPathNavigatorMoveToRoot(t *testing.T) {
|
||||
root := &XMLElement{Tag: "root", Name: XMLName{Local: "root"}}
|
||||
child := &XMLElement{Tag: "child", Name: XMLName{Local: "child"}, Parent: root}
|
||||
root.Children = []*XMLElement{child}
|
||||
|
||||
nav := newXMLXPathNavigator(root, child)
|
||||
assert.Equal(t, xpath.ElementNode, nav.NodeType())
|
||||
assert.Equal(t, "child", nav.LocalName())
|
||||
|
||||
nav.MoveToRoot()
|
||||
assert.Equal(t, xpath.RootNode, nav.NodeType())
|
||||
assert.Equal(t, "", nav.LocalName())
|
||||
}
|
||||
|
||||
func TestXMLXPathNavigatorMoveToParentBranches(t *testing.T) {
|
||||
root := &XMLElement{Tag: "root", Name: XMLName{Local: "root"}}
|
||||
child := &XMLElement{Tag: "child", Name: XMLName{Local: "child"}, Parent: root}
|
||||
root.Children = []*XMLElement{child}
|
||||
|
||||
t.Run("root node returns false", func(t *testing.T) {
|
||||
nav := newXMLXPathNavigator(root, root)
|
||||
nav.nodeType = xpath.RootNode
|
||||
assert.False(t, nav.MoveToParent())
|
||||
})
|
||||
|
||||
t.Run("attribute node moves to element", func(t *testing.T) {
|
||||
nav := newXMLXPathNavigator(root, child)
|
||||
nav.nodeType = xpath.AttributeNode
|
||||
nav.attrIndex = 1
|
||||
assert.True(t, nav.MoveToParent())
|
||||
assert.Equal(t, xpath.ElementNode, nav.NodeType())
|
||||
assert.Equal(t, -1, nav.attrIndex)
|
||||
})
|
||||
|
||||
t.Run("text node with nil current returns false", func(t *testing.T) {
|
||||
nav := &xmlXPathNavigator{root: root, current: nil, nodeType: xpath.TextNode, attrIndex: 0}
|
||||
assert.False(t, nav.MoveToParent())
|
||||
assert.Equal(t, xpath.ElementNode, nav.NodeType())
|
||||
assert.Equal(t, -1, nav.attrIndex)
|
||||
})
|
||||
|
||||
t.Run("element with no parent moves to root", func(t *testing.T) {
|
||||
nav := newXMLXPathNavigator(root, root)
|
||||
assert.True(t, nav.MoveToParent())
|
||||
assert.Equal(t, xpath.RootNode, nav.NodeType())
|
||||
assert.Equal(t, -1, nav.attrIndex)
|
||||
})
|
||||
|
||||
t.Run("element with parent moves up one level", func(t *testing.T) {
|
||||
nav := newXMLXPathNavigator(root, child)
|
||||
assert.True(t, nav.MoveToParent())
|
||||
assert.Equal(t, xpath.ElementNode, nav.NodeType())
|
||||
assert.Equal(t, root, nav.current)
|
||||
assert.Equal(t, -1, nav.attrIndex)
|
||||
})
|
||||
|
||||
t.Run("element with nil current returns false", func(t *testing.T) {
|
||||
nav := &xmlXPathNavigator{root: root, current: nil, nodeType: xpath.ElementNode, attrIndex: 0}
|
||||
assert.False(t, nav.MoveToParent())
|
||||
})
|
||||
}
|
||||
1445
processor/xml_test.go
Normal file
1445
processor/xml_test.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,15 @@
|
||||
package regression
|
||||
|
||||
import (
|
||||
"modify/processor"
|
||||
"modify/utils"
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func ApiAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
||||
@@ -15,7 +19,7 @@ func ApiAdaptor(content string, regex string, lua string) (string, int, int, err
|
||||
LogLevel: "TRACE",
|
||||
}
|
||||
|
||||
commands, err := processor.ProcessRegex(content, command)
|
||||
commands, err := processor.ProcessRegex(content, command, "test")
|
||||
if err != nil {
|
||||
return "", 0, 0, err
|
||||
}
|
||||
@@ -82,56 +86,92 @@ func TestTalentsMechanicOutOfRange(t *testing.T) {
|
||||
</Talent>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(given, `<Talent identifier="quickfixer">!anyvalue="(?<movementspeed>!num)"!anyvalue="(?<duration>!num)"!anyvalue="(?<repairspeed>!num)"!anyamount="(?<durationv>!num)"`, "movementspeed=round(movementspeed*1.5, 2) duration=round(duration*2, 2) repairspeed=round(repairspeed*2, 2) durationv=duration")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error processing content: %v", err)
|
||||
}
|
||||
|
||||
if matches != 4 {
|
||||
t.Errorf("Expected 4 matches, got %d", matches)
|
||||
}
|
||||
|
||||
if mods != 4 {
|
||||
t.Errorf("Expected 4 modifications, got %d", mods)
|
||||
}
|
||||
|
||||
if result != actual {
|
||||
t.Errorf("expected %s, got %s", actual, result)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 4, matches)
|
||||
assert.Equal(t, 4, mods)
|
||||
assert.Equal(t, actual, result)
|
||||
}
|
||||
|
||||
func TestIndexExplosions_ShouldNotPanic(t *testing.T) {
|
||||
func TestIndexExplosionsDoesNotPanic(t *testing.T) {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting current working directory: %v", err)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
given, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItems.xml"))
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading file: %v", err)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
expected, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItemsExpected.xml"))
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading file: %v", err)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
result, _, _, err := ApiAdaptor(string(given), `(?-s)LightComponent!anyrange="(!num)"`, "*4")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error processing content: %v", err)
|
||||
}
|
||||
|
||||
// We don't really care how many god damn matches there are as long as the result is correct
|
||||
// if matches != 45 {
|
||||
// t.Errorf("Expected 45 match, got %d", matches)
|
||||
// }
|
||||
//
|
||||
// if mods != 45 {
|
||||
// t.Errorf("Expected 45 modification, got %d", mods)
|
||||
// }
|
||||
|
||||
if string(result) != string(expected) {
|
||||
t.Errorf("expected %s, got %s", expected, result)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, string(expected), result)
|
||||
}
|
||||
|
||||
func TestXPathContextIsNodeRelativeInTalentsFile(t *testing.T) {
|
||||
cwd, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
|
||||
contentBytes, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "TalentsAssistant.xml"))
|
||||
require.NoError(t, err)
|
||||
content := string(contentBytes)
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "xpath_context_relative_regression",
|
||||
Lua: `
|
||||
local talentModifiers = {
|
||||
crewlayabout = function(node)
|
||||
local replaceNodes = xpath(node, "//Replace[@tag='[amount]']")
|
||||
if #replaceNodes ~= 3 then
|
||||
error(string.format("expected 3 [amount] nodes for crewlayabout, got %d", #replaceNodes))
|
||||
end
|
||||
for _, replaceNode in ipairs(replaceNodes) do
|
||||
local value = replaceNode.attr.value.val
|
||||
replaceNode.attr.value = tostring(tonumber(value) * 2)
|
||||
end
|
||||
end,
|
||||
}
|
||||
|
||||
for _, talent in ipairs(data.children) do
|
||||
local callback = talentModifiers[talent.attr.identifier.val]
|
||||
if callback then
|
||||
callback(talent)
|
||||
end
|
||||
end
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
replacements, err := processor.ProcessXML(content, command, "TalentsAssistant.xml")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, replacements)
|
||||
|
||||
result, _ := utils.ExecuteModifications(replacements, content)
|
||||
|
||||
crewlayabout := extractTalentBlock(result, "crewlayabout")
|
||||
require.NotEmpty(t, crewlayabout)
|
||||
assert.Contains(t, crewlayabout, `value="160"`)
|
||||
assert.Contains(t, crewlayabout, `value="60"`)
|
||||
assert.Contains(t, crewlayabout, `value="40"`)
|
||||
|
||||
loyalassistant := extractTalentBlock(result, "loyalassistant")
|
||||
require.NotEmpty(t, loyalassistant)
|
||||
assert.Contains(t, loyalassistant, `value="2"`)
|
||||
}
|
||||
|
||||
func extractTalentBlock(content, identifier string) string {
|
||||
idx := strings.Index(content, `identifier="`+identifier+`"`)
|
||||
if idx < 0 {
|
||||
return ""
|
||||
}
|
||||
start := strings.LastIndex(content[:idx], "<Talent")
|
||||
if start < 0 {
|
||||
return ""
|
||||
}
|
||||
endRel := strings.Index(content[idx:], "</Talent>")
|
||||
if endRel < 0 {
|
||||
return ""
|
||||
}
|
||||
end := idx + endRel + len("</Talent>")
|
||||
return content[start:end]
|
||||
}
|
||||
|
||||
@@ -16,8 +16,7 @@ fi
|
||||
echo "Tag: $TAG"
|
||||
|
||||
echo "Building the thing..."
|
||||
go build -o chef.exe .
|
||||
go install .
|
||||
./build.sh "$TAG"
|
||||
|
||||
echo "Creating a release..."
|
||||
TOKEN="$GITEA_API_KEY"
|
||||
|
||||
54
reset.go
Normal file
54
reset.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newResetCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "reset",
|
||||
Short: "Restore all snapshotted files to original state",
|
||||
Args: cobra.NoArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
logger.Info("Resetting all files to their original state from database")
|
||||
db, err := utils.GetDB()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get database: %w", err)
|
||||
}
|
||||
if err := resetAllFiles(db); err != nil {
|
||||
return fmt.Errorf("failed to reset all files: %w", err)
|
||||
}
|
||||
logger.Info("Successfully reset all files to original state")
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func resetAllFiles(db utils.DB) error {
|
||||
resetAllFilesLogger := logger.WithPrefix("resetAllFiles")
|
||||
resetAllFilesLogger.Debug("Starting reset all files operation")
|
||||
fileSnapshots, err := db.GetAllFiles()
|
||||
if err != nil {
|
||||
resetAllFilesLogger.Error("Failed to get all file snapshots from database: %v", err)
|
||||
return err
|
||||
}
|
||||
resetAllFilesLogger.Debug("Found %d files in database to reset", len(fileSnapshots))
|
||||
|
||||
for _, fileSnapshot := range fileSnapshots {
|
||||
resetAllFilesLogger.Debug("Resetting file %q", fileSnapshot.FilePath)
|
||||
err = os.WriteFile(fileSnapshot.FilePath, fileSnapshot.FileData, 0644)
|
||||
if err != nil {
|
||||
resetAllFilesLogger.Warning("Failed to write file %q to disk: %v", fileSnapshot.FilePath, err)
|
||||
continue
|
||||
}
|
||||
resetAllFilesLogger.Debug("File %q written to disk successfully", fileSnapshot.FilePath)
|
||||
}
|
||||
resetAllFilesLogger.Debug("Finished reset all files operation")
|
||||
return nil
|
||||
}
|
||||
115
reset_test.go
Normal file
115
reset_test.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type fakeResetDB struct {
|
||||
files []utils.FileSnapshot
|
||||
err error
|
||||
}
|
||||
|
||||
func (f fakeResetDB) DB() *gorm.DB { return nil }
|
||||
func (f fakeResetDB) Raw(sql string, args ...any) *gorm.DB { return &gorm.DB{} }
|
||||
func (f fakeResetDB) SaveFile(filePath string, fileData []byte) error { return nil }
|
||||
func (f fakeResetDB) GetFile(filePath string) ([]byte, error) { return nil, f.err }
|
||||
func (f fakeResetDB) GetAllFiles() ([]utils.FileSnapshot, error) { return f.files, f.err }
|
||||
|
||||
func TestResetAllFiles(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "reset-all-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
file1 := filepath.Join(tmpDir, "file1.txt")
|
||||
file2 := filepath.Join(tmpDir, "file2.txt")
|
||||
|
||||
err = os.WriteFile(file1, []byte("original1"), 0644)
|
||||
assert.NoError(t, err)
|
||||
err = os.WriteFile(file2, []byte("original2"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
db, err := utils.GetDB()
|
||||
assert.NoError(t, err)
|
||||
defer closeTestDB(t, db)
|
||||
err = db.SaveFile(file1, []byte("original1"))
|
||||
assert.NoError(t, err)
|
||||
err = db.SaveFile(file2, []byte("original2"))
|
||||
assert.NoError(t, err)
|
||||
|
||||
err = os.WriteFile(file1, []byte("modified1"), 0644)
|
||||
assert.NoError(t, err)
|
||||
err = os.WriteFile(file2, []byte("modified2"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
data, _ := os.ReadFile(file1)
|
||||
assert.Equal(t, "modified1", string(data))
|
||||
|
||||
err = resetAllFiles(db)
|
||||
assert.NoError(t, err)
|
||||
|
||||
data, _ = os.ReadFile(file1)
|
||||
assert.Equal(t, "original1", string(data))
|
||||
|
||||
data, _ = os.ReadFile(file2)
|
||||
assert.Equal(t, "original2", string(data))
|
||||
}
|
||||
|
||||
func TestResetAllFilesGetAllFilesError(t *testing.T) {
|
||||
err := resetAllFiles(fakeResetDB{err: assert.AnError})
|
||||
assert.ErrorIs(t, err, assert.AnError)
|
||||
}
|
||||
|
||||
func TestResetAllFilesContinuesOnWriteError(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
goodFile := filepath.Join(tmpDir, "good.txt")
|
||||
badFile := filepath.Join(tmpDir, "missing", "bad.txt")
|
||||
require.NoError(t, os.WriteFile(goodFile, []byte("stale"), 0644))
|
||||
|
||||
db := fakeResetDB{files: []utils.FileSnapshot{
|
||||
{FilePath: badFile, FileData: []byte("x")},
|
||||
{FilePath: goodFile, FileData: []byte("fresh")},
|
||||
}}
|
||||
|
||||
require.NoError(t, resetAllFiles(db))
|
||||
|
||||
data, err := os.ReadFile(goodFile)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "fresh", string(data))
|
||||
}
|
||||
|
||||
func TestResetCommandRunE(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "reset-cmd-*")
|
||||
require.NoError(t, err)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
file := filepath.Join(tmpDir, "file.txt")
|
||||
require.NoError(t, os.WriteFile(file, []byte("original"), 0644))
|
||||
|
||||
db, err := utils.GetDB()
|
||||
require.NoError(t, err)
|
||||
defer closeTestDB(t, db)
|
||||
require.NoError(t, db.SaveFile(file, []byte("original")))
|
||||
require.NoError(t, os.WriteFile(file, []byte("modified"), 0644))
|
||||
|
||||
cmd := newResetCmd()
|
||||
require.NoError(t, cmd.RunE(cmd, nil))
|
||||
|
||||
data, err := os.ReadFile(file)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "original", string(data))
|
||||
}
|
||||
243
runchef.go
Normal file
243
runchef.go
Normal file
@@ -0,0 +1,243 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
cyutils "git.site.quack-lab.dev/dave/cyutils"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func runChef(args []string, cmd *cobra.Command) {
|
||||
parallelFlag, _ := cmd.Flags().GetInt("parallel")
|
||||
filterFlag, _ := cmd.Flags().GetString("filter")
|
||||
|
||||
db, commands, err := loadRunInputs(args, filterFlag, cmd)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
associations, err := prepareAssociations(commands, db)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
commandLoggers := buildCommandLoggers(commands)
|
||||
|
||||
startTime := time.Now()
|
||||
processAssociations(parallelFlag, associations, db, commandLoggers)
|
||||
processingTime := time.Since(startTime)
|
||||
|
||||
logRunSummary(processingTime)
|
||||
printCommandSummaryTable()
|
||||
}
|
||||
|
||||
func loadRunInputs(args []string, filterFlag string, cmd *cobra.Command) (utils.DB, []utils.ModifyCommand, error) {
|
||||
logger.Debug("Getting database connection")
|
||||
db, err := utils.GetDB()
|
||||
if err != nil {
|
||||
logger.Error("Failed to get database: %v", err)
|
||||
return nil, nil, err
|
||||
}
|
||||
logger.Debug("Database connection established")
|
||||
|
||||
logger.Debug("Loading commands from arguments")
|
||||
logger.Trace("Arguments: %v", args)
|
||||
commands, variables, err := utils.LoadCommands(args)
|
||||
if err != nil || len(commands) == 0 {
|
||||
logger.Error("Failed to load commands: %v", err)
|
||||
cmd.Usage()
|
||||
if err == nil {
|
||||
err = ErrNothingToDo
|
||||
}
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if len(variables) > 0 {
|
||||
logger.Info("Loaded %d global variables", len(variables))
|
||||
processor.SetVariables(variables)
|
||||
}
|
||||
logger.Info("Loaded %d commands", len(commands))
|
||||
|
||||
if filterFlag != "" {
|
||||
logger.Info("Filtering commands by name: %s", filterFlag)
|
||||
commands = utils.FilterCommands(commands, filterFlag)
|
||||
logger.Info("Filtered %d commands", len(commands))
|
||||
}
|
||||
|
||||
for _, command := range commands {
|
||||
logger.Trace("Command: %s", command.Name)
|
||||
if len(command.Regexes) > 0 {
|
||||
logger.Trace("Regexes: %v", command.Regexes)
|
||||
} else {
|
||||
logger.Trace("Regex: %s", command.Regex)
|
||||
}
|
||||
logger.Trace("Files: %v", command.Files)
|
||||
logger.Trace("Lua: %s", command.Lua)
|
||||
logger.Trace("NoReset: %t", command.NoReset)
|
||||
logger.Trace("Isolate: %t", command.Isolate)
|
||||
logger.Trace("LogLevel: %s", command.LogLevel)
|
||||
}
|
||||
|
||||
return db, commands, nil
|
||||
}
|
||||
|
||||
func prepareAssociations(commands []utils.ModifyCommand, db utils.DB) (map[string]utils.FileCommandAssociation, error) {
|
||||
logger.Debug("Aggregating globs and deduplicating")
|
||||
globs := utils.AggregateGlobs(commands)
|
||||
logger.Debug("Aggregated %d globs before deduplication", utils.CountGlobsBeforeDedup(commands))
|
||||
|
||||
logger.Info("Found %d unique file patterns", len(globs))
|
||||
logger.Debug("Expanding glob patterns to files")
|
||||
files, err := utils.ExpandGlobs(globs)
|
||||
if err != nil {
|
||||
logger.Error("Failed to expand file patterns: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
logger.Info("Found %d files to process", len(files))
|
||||
logger.Trace("Files to process: %v", files)
|
||||
|
||||
logger.Debug("Associating files with commands")
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
if err != nil {
|
||||
logger.Error("Failed to associate files with commands: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
logger.Debug("Files associated with commands")
|
||||
logger.Trace("File-command associations: %v", associations)
|
||||
|
||||
for file, assoc := range associations {
|
||||
cmdNames := make([]string, 0, len(assoc.Commands))
|
||||
for _, c := range assoc.Commands {
|
||||
cmdNames = append(cmdNames, c.Name)
|
||||
}
|
||||
isoNames := make([]string, 0, len(assoc.IsolateCommands))
|
||||
for _, c := range assoc.IsolateCommands {
|
||||
isoNames = append(isoNames, c.Name)
|
||||
}
|
||||
logger.Debug("File %q has %d regular and %d isolate commands", file, len(assoc.Commands), len(assoc.IsolateCommands))
|
||||
logger.Trace("\tRegular: %v", cmdNames)
|
||||
logger.Trace("\tIsolate: %v", isoNames)
|
||||
}
|
||||
|
||||
logger.Debug("Resetting files where necessary")
|
||||
err = utils.ResetWhereNecessary(associations, db)
|
||||
if err != nil {
|
||||
logger.Error("Failed to reset files where necessary: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
logger.Debug("Files reset where necessary")
|
||||
|
||||
return associations, nil
|
||||
}
|
||||
|
||||
func buildCommandLoggers(commands []utils.ModifyCommand) map[string]*logger.Logger {
|
||||
commandLoggers := make(map[string]*logger.Logger)
|
||||
for _, command := range commands {
|
||||
cmdName := command.Name
|
||||
if cmdName == "" {
|
||||
if len(command.Regex) > 20 {
|
||||
cmdName = command.Regex[:17] + "..."
|
||||
} else {
|
||||
cmdName = command.Regex
|
||||
}
|
||||
}
|
||||
|
||||
cmdLogLevel := logger.ParseLevel(command.LogLevel)
|
||||
commandLoggers[command.Name] = logger.WithField("command", cmdName)
|
||||
commandLoggers[command.Name].SetLevel(cmdLogLevel)
|
||||
|
||||
logger.Debug("Created logger for command %q with log level %s", cmdName, cmdLogLevel.String())
|
||||
}
|
||||
return commandLoggers
|
||||
}
|
||||
|
||||
func processAssociations(parallelFlag int, associations map[string]utils.FileCommandAssociation, db utils.DB, commandLoggers map[string]*logger.Logger) {
|
||||
logger.Debug("Starting file processing with %d parallel workers", parallelFlag)
|
||||
|
||||
cyutils.WithWorkers(parallelFlag, associations, func(worker int, file string, association utils.FileCommandAssociation) {
|
||||
processFile(worker, file, association, db, commandLoggers)
|
||||
})
|
||||
}
|
||||
|
||||
func processFile(worker int, file string, association utils.FileCommandAssociation, db utils.DB, commandLoggers map[string]*logger.Logger) {
|
||||
fileStartTime := time.Now()
|
||||
|
||||
logger.Debug("[worker %d] Reading file %q", worker, file)
|
||||
fileData, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
logger.Error("Failed to read file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
fileDataStr := string(fileData)
|
||||
logger.Trace("File %q content: %s", file, utils.LimitString(fileDataStr, 500))
|
||||
|
||||
isChanged := false
|
||||
logger.Debug("Running isolate commands for file %q", file)
|
||||
fileDataStr, err = RunIsolateCommands(association, file, fileDataStr)
|
||||
if err != nil && err != ErrNothingToDo {
|
||||
logger.Error("Failed to run isolate commands for file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
if err != ErrNothingToDo {
|
||||
isChanged = true
|
||||
}
|
||||
|
||||
logger.Debug("Running other commands for file %q", file)
|
||||
fileDataStr, err = RunOtherCommands(file, fileDataStr, association, commandLoggers)
|
||||
if err != nil && err != ErrNothingToDo {
|
||||
logger.Error("Failed to run other commands for file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
if err != ErrNothingToDo {
|
||||
isChanged = true
|
||||
}
|
||||
|
||||
if isChanged {
|
||||
logger.Debug("Saving file %q to database", file)
|
||||
err = db.SaveFile(file, fileData)
|
||||
if err != nil {
|
||||
logger.Error("Failed to save file %q to database: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
logger.Debug("File %q saved to database", file)
|
||||
}
|
||||
|
||||
logger.Debug("Writing file %q", file)
|
||||
err = os.WriteFile(file, []byte(fileDataStr), 0644)
|
||||
if err != nil {
|
||||
logger.Error("Failed to write file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
logger.Debug("File %q written", file)
|
||||
|
||||
atomic.AddInt64(&stats.ProcessedFiles, 1)
|
||||
logger.Debug("[worker %d] File %q processed in %v", worker, file, time.Since(fileStartTime))
|
||||
}
|
||||
|
||||
func logRunSummary(processingTime time.Duration) {
|
||||
logger.Info("Processing completed in %v", processingTime)
|
||||
processedFiles := atomic.LoadInt64(&stats.ProcessedFiles)
|
||||
if processedFiles > 0 {
|
||||
logger.Info("Average time per file: %v", processingTime/time.Duration(processedFiles))
|
||||
}
|
||||
|
||||
totalModifications := atomic.LoadInt64(&stats.TotalModifications)
|
||||
if totalModifications == 0 {
|
||||
logger.Warning("No modifications were made in any files")
|
||||
} else {
|
||||
failedFiles := atomic.LoadInt64(&stats.FailedFiles)
|
||||
logger.Info("Operation complete! Modified %d values in %d/%d files",
|
||||
totalModifications, processedFiles, processedFiles+failedFiles)
|
||||
}
|
||||
}
|
||||
11
test_surgical.yml
Normal file
11
test_surgical.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
- name: SurgicalWeightTest
|
||||
json: true
|
||||
lua: |
|
||||
-- This demonstrates surgical JSON editing
|
||||
-- Only the Weight field of Item_Fiber will be modified
|
||||
data.Rows[1].Weight = 999
|
||||
modified = true
|
||||
files:
|
||||
- 'D_Itemable.json'
|
||||
noreset: true
|
||||
loglevel: INFO
|
||||
74
testfiles/Afflictions.xml
Normal file
74
testfiles/Afflictions.xml
Normal file
@@ -0,0 +1,74 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Afflictions>
|
||||
<Affliction name="" identifier="Cozy_Fire" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="10" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="10" strengthchange="-1.0">
|
||||
<ReduceAffliction type="damage" amount="0.001" />
|
||||
<ReduceAffliction type="bleeding" amount="0.001" />
|
||||
<ReduceAffliction type="burn" amount="0.001" />
|
||||
<ReduceAffliction type="bloodloss" amount="0.001" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Cozy_Fire.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="The_Bast_Defense" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="10" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="10" strengthchange="-1.0" resistancefor="damage" minresistance="0.05" maxresistance="0.05"></Effect>
|
||||
<icon texture="%ModDir%/Placable/The_Bast_Defense.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Clairvoyance" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="240" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="240" strengthchange="-1.0" resistancefor="stun" minresistance="0.15" maxresistance="0.15"></Effect>
|
||||
<icon texture="%ModDir%/Placable/Clairvoyance.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Heart_Lamp" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="10" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="10" strengthchange="-1.0">
|
||||
<ReduceAffliction type="damage" amount="0.001" />
|
||||
<ReduceAffliction type="bleeding" amount="0.001" />
|
||||
<ReduceAffliction type="burn" amount="0.001" />
|
||||
<ReduceAffliction type="bloodloss" amount="0.001" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Heart_Lamp.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Star_in_a_Bottle_buff" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="10" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="10" strengthchange="-1.0" resistancefor="stun" minresistance="0.1" maxresistance="0.1"></Effect>
|
||||
<icon texture="%ModDir%/Placable/Star_in_a_Bottle_buff.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="HappyF" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="10" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="10" strengthchange="-1.0" resistancefor="stun" minresistance="0.05" maxresistance="0.05" minspeedmultiplier="1.1" maxspeedmultiplier="1.1"></Effect>
|
||||
<icon texture="%ModDir%/Placable/Happy.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="SharpenedF" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="240" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="240" strengthchange="-1.0">
|
||||
<StatValue stattype="MeleeAttackMultiplier" value="0.25" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Sharpened.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Sugar_RushF" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="240" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="240" strengthchange="-1.0" minspeedmultiplier="1.2" maxspeedmultiplier="1.2">
|
||||
<StatValue stattype="MeleeAttackSpeed" value="0.05" />
|
||||
<StatValue stattype="RangedAttackSpeed" value="0.05" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Sugar_Rush.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Crimson_Effigy_buff" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="240" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="240" strengthchange="-1.0" resistancefor="damage" minresistance="-0.1" maxresistance="-0.1">
|
||||
<StatValue stattype="MeleeAttackSpeed" value="0.15" />
|
||||
<StatValue stattype="RangedAttackSpeed" value="0.15" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Crimson_Effigy_buff.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Corruption_Effigy_buff" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="240" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="240" strengthchange="-1.0" minvitalitydecrease="0.2" multiplybymaxvitality="true" maxvitalitydecrease="0.2" resistancefor="damage" minresistance="0.1" maxresistance="0.1">
|
||||
<StatValue stattype="AttackMultiplier" value="0.2" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Corruption_Effigy_buff.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Effigy_of_Decay_buff" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="240" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="240" strengthchange="-1.0" resistancefor="oxygenlow" minresistance="1" maxresistance="1">
|
||||
<StatusEffect target="Character" SpeedMultiplier="1.1" OxygenAvailable="1000.0" setvalue="true" />
|
||||
<AbilityFlag flagtype="ImmuneToPressure" />
|
||||
</Effect>
|
||||
<icon texture="%ModDir%/Placable/Effigy_of_Decay_buff.png" sourcerect="0,0,64,64" origin="0,0" />
|
||||
</Affliction>
|
||||
<Affliction name="" identifier="Chlorophyte_Extractinator" type="strengthbuff" limbspecific="false" isbuff="true" maxstrength="6" hideiconafterdelay="true">
|
||||
<Effect minstrength="0" maxstrength="6" strengthchange="-1.0"></Effect>
|
||||
<icon texture="%ModDir%/Extractinator/Chlorophyte_Extractinator.png" sourcerect="0,0,144,152" origin="0,0" />
|
||||
</Affliction>
|
||||
</Afflictions>
|
||||
517
testfiles/KDL_Ships_NonTurretDefenses.xml
Normal file
517
testfiles/KDL_Ships_NonTurretDefenses.xml
Normal file
@@ -0,0 +1,517 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
<entity name="TractorArray"
|
||||
thematic_groups="OtherDefense"
|
||||
visuals="assets/_finalgamemeshes/turrets/tractorturret/tractor_turret.prefab"
|
||||
icon_name="Ships1/TractorTurret"
|
||||
size_scale="1.3"
|
||||
visuals_scale_multiplier="2" is_non_turret_defense="true"
|
||||
category="Ship"
|
||||
collision_priority="1000" construction_priority="50"
|
||||
display_name="Tractor Array"
|
||||
display_name_for_sidebar="Tractor"
|
||||
description="Locks onto nearby enemy ships, preventing them from moving (but not firing)."
|
||||
behavior="Stationary"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Sentries,Turret"
|
||||
build_sidebar_categories_i_am_part_of="OtherDefenses" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
max_count_seeded_as_ai_non_turret_defense="10"
|
||||
cost_for_ai_to_purchase="150"
|
||||
hull_points="60000" shield_points="60000" speed="Immobile"
|
||||
metal_cost="7500" energy_consumption="1500"
|
||||
fuel_use_type="FuelRadon"
|
||||
armor_mm="50" albedo="0.45" mass_tx="0.3"
|
||||
ship_or_structure_explosion_sfx="Turret_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="ShipLostOnOtherPlanet_Explosion"
|
||||
voice_group="Turret"
|
||||
fleet_membership="Planetary"
|
||||
absolute_max_cap_when_in_fleets="20"
|
||||
priority_as_ai_target="LowGradeAnnoyance" priority_as_frd_target="LowGradeAnnoyance" priority_to_protect="SlightlyLessExpendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="100" cap="6"/>
|
||||
<fleet_membership name="GeneralTurrets" ship_cap_group="OtherDefense" weight="100" cap="12"/>
|
||||
<fleet_membership name="OtherDefenseFocus" ship_cap_group="OtherDefense" weight="100" cap="24"/>
|
||||
<fleet_membership name="PlayerHomeCommand" ship_cap_group="OtherDefense" weight="100" cap="20"/>
|
||||
<fleet_membership name="PlayerEconomicCommand" ship_cap_group="OtherDefense" weight="100" cap="4"/>
|
||||
<fleet_membership name="PlayerMilitaryCommand" ship_cap_group="OtherDefense" weight="100" cap="16"/>
|
||||
<fleet_membership name="PlayerLogisticalCommand" ship_cap_group="OtherDefense" weight="100" cap="8"/>
|
||||
<ai_ship_group_membership name="TurretTrapWormholeSentinels_TractorAmbushes" weight="5"/>
|
||||
<ai_ship_group_membership name="TurretTrapWormholeSentinels_FreeforAll" weight="33"/>
|
||||
<ai_ship_group_membership name="TurretTrapWormholeSentinels_Plasma" weight="10"/>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="100"/>
|
||||
<ai_ship_group_membership name="TractorArrays" weight="100"/>
|
||||
<ai_ship_group_membership name="TutorialNonTurretDefenses_FreeforAll" weight="100"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="100"/>
|
||||
<system name="T" display_name="Tractor Beam"
|
||||
category="Passive"
|
||||
tractor_range="Small5" tractor_count="42" tractor_count_added_per_mark="21" tractor_hits_albedo_less_than="0.4" tractor_hits_engine_gx_less_than="18"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
<entity name="EnsnarerTractorArray" copy_from="TractorArray" strength_multiplier="0.5" max_count_seeded_as_ai_non_turret_defense="5" thematic_groups="AIOtherDefense" >
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="100"/>
|
||||
<ai_ship_group_membership name="EnsnarerTractorArrays" weight="100"/>
|
||||
<ai_ship_group_membership name="EnsnarerTurretTrapWormholeSentinels_TractorAmbushes" weight="50"/>
|
||||
<system name="T" display_name="Tractor Beam"
|
||||
category="Passive"
|
||||
tractor_range="Normal4" tractor_count="63" tractor_count_added_per_mark="31" tractor_hits_albedo_less_than="0.5" tractor_hits_engine_gx_less_than="10"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="GhostTractorArray" copy_from="TractorArray" max_count_seeded_as_ai_non_turret_defense="5" thematic_groups="AIOtherDefense">
|
||||
<ai_ship_group_membership name="GhostNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="GhostTractorArrays" weight="100"/>
|
||||
<ai_ship_group_membership name="GhostTurretTrapWormholeSentinels_TractorAmbushes" weight="5"/>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive" cloaking_points="2000" cloaking_points_added_per_mark="1000"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
|
||||
|
||||
|
||||
<entity name="ForcefieldGenerator"
|
||||
thematic_groups="OtherDefense" npc_ship_cap_type="SpecialDefenses"
|
||||
tags="ForcefieldGenerator"
|
||||
visuals="assets/_finalgamemeshes/buildings/forcefieldgenerator/forcefieldgeneratorlod0.prefab"
|
||||
icon_name="Ships1/Shield_Generator"
|
||||
category="Ship"
|
||||
size_scale="1"
|
||||
visuals_scale_multiplier="4" is_stationary_forcefield="true" barred_from_vertex_animated_phasing="true"
|
||||
collision_priority="20000" construction_priority="400" emergency_repair_priority_if_below_percentage="70" emergency_repair_only_if_engineer_modulus_of="4"
|
||||
shield_size_scale="5"
|
||||
display_name="Forcefield Generator"
|
||||
display_name_for_sidebar="Forcefield"
|
||||
description="Protects nearby ships and structures. Units protected by this will deal half the usual damage. Slightly mobile, and thus vulnerable to being moved by ships with the Norris Effect."
|
||||
behavior="Stationary"
|
||||
voice_group="Turret"
|
||||
moves_back_after_being_norrised="true"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Forcefields"
|
||||
build_sidebar_categories_i_am_part_of="OtherDefenses" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
cost_for_ai_to_purchase="2000"
|
||||
hull_points="50000" shield_points="350000" speed="AlmostNone"
|
||||
metal_cost="80000" energy_consumption="7500"
|
||||
armor_mm="85" albedo="0.45" engine_gx="3" mass_tx="5"
|
||||
fleet_membership="Planetary"
|
||||
ship_or_structure_explosion_sfx="StructureMidsize_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="StructureLostOnOtherPlanet_Explosion"
|
||||
absolute_max_cap_when_in_fleets="6"
|
||||
priority_as_ai_target="ShieldGenerator" priority_as_frd_target="NotDangerous" priority_to_protect="MidFavoriteToy"
|
||||
>
|
||||
<fleet_membership name="PlayerEconomicCommand" ship_cap_group="OtherDefense" weight="100" cap="2"/>
|
||||
<fleet_membership name="PlayerLogisticalCommand" ship_cap_group="OtherDefense" weight="100" cap="3"/>
|
||||
<fleet_membership name="PlayerMilitaryCommand" ship_cap_group="OtherDefense" weight="100" cap="4"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="500"/>
|
||||
</entity>
|
||||
|
||||
<entity name="StartingForcefieldGenerator" thematic_groups="OtherDefense,Human"
|
||||
display_name="Home Forcefield Generator"
|
||||
display_name_for_sidebar="Home Forcefield"
|
||||
tags="StartingForcefieldGenerator"
|
||||
copy_from="ForcefieldGenerator" starting_mark_level="Mark1" metal_cost="160000" hull_points="100000" shield_points="500000" shield_size_scale="5" mass_tx="6"
|
||||
destroys_self_until_not_over_ship_cap_if_planetary_command="false" is_auto_scrapping_skipped_when_command_station_type_changes="true" absolute_max_cap_when_in_fleets="3"
|
||||
collision_priority="11000" construction_priority="400" mark_level_scale_style="PlayerHomeForcefield" speed="Immobile" moves_back_after_being_norrised="false"
|
||||
description="Protects nearby ships and structures. Units protected by this will deal half the usual damage. Completely immobile, thus unable to be pushed by the Norris Effect."
|
||||
>
|
||||
<fleet_membership name="PlayerHomeCommand" ship_cap_group="OtherDefense" weight="100" cap="1"/>
|
||||
</entity>
|
||||
|
||||
<entity name="OutguardForcefieldGenerator" copy_from="ForcefieldGenerator" starting_mark_level="Mark2" tech_upgrades_that_benefit_me="Expatriate" thematic_groups="OtherDefense,Outguard"
|
||||
display_name="Outguard Forcefield Generator"
|
||||
display_name_for_sidebar="Forcefield"
|
||||
shield_points="500000" my_forcefield_has_no_penalty_for_enemies_firing_out_of_it="true"
|
||||
collision_priority="9000" construction_priority="400" absolute_max_cap_when_in_fleets="6" speed="Immobile" moves_back_after_being_norrised="false"
|
||||
description="Protects nearby ships and structures. Completely immobile, thus unable to be pushed by the Norris Effect."
|
||||
>
|
||||
</entity>
|
||||
|
||||
<entity name="AIForcefieldGenerator" copy_from="ForcefieldGenerator" max_mark_level="7" thematic_groups="AIOtherDefense"
|
||||
display_name="AI Forcefield Generator"
|
||||
display_name_for_sidebar="Forcefield"
|
||||
shield_points="500000" my_forcefield_has_no_penalty_for_enemies_firing_out_of_it="true"
|
||||
collision_priority="9000" construction_priority="400" absolute_max_cap_when_in_fleets="6" moves_back_after_being_norrised="true"
|
||||
max_count_seeded_as_ai_non_turret_defense="2"
|
||||
description="Protects nearby ships and structures."
|
||||
>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="8"/>
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="8"/>
|
||||
<ai_ship_group_membership name="Forcefields" weight="100"/>
|
||||
</entity>
|
||||
|
||||
<entity name="ImmobileForcefieldGenerator" copy_from="ForcefieldGenerator"
|
||||
display_name="Immobile Forcefield Generator"
|
||||
display_name_for_sidebar="Forcefield Immob"
|
||||
speed="Immobile" construction_priority="400" moves_back_after_being_norrised="false"
|
||||
description="Protects nearby ships and structures. Units protected by this will deal half the usual damage. Completely immobile, thus unable to be pushed by the Norris Effect.">
|
||||
<fleet_membership name="PlayerMilitaryCommand" ship_cap_group="OtherDefense" weight="100" cap="0"/>
|
||||
</entity>
|
||||
|
||||
<entity name="ZenithForcefieldGenerator" copy_from="ForcefieldGenerator" is_auto_scrapping_skipped_when_command_station_type_changes="true" thematic_groups="OtherDefense,Zenith"
|
||||
metal_cost="800000" display_name="Zenith Forcefield Generator"
|
||||
display_name_for_sidebar="Forcefield Zenith"
|
||||
description="Majorly upgraded forcefield that protects nearby ships and structures. Units protected by this will deal half the usual damage. Completely immobile, thus unable to be pushed by the Norris Effect. The Trader won't sell you this if you already have two in the galaxy."
|
||||
collision_priority="11000" construction_priority="400" starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Alien"
|
||||
shield_points="1050000" shield_size_scale="6.3" absolute_max_cap_when_in_fleets="1"
|
||||
build_sidebar_categories_i_am_part_of="OtherDefenses" destroys_self_until_not_over_ship_cap_if_planetary_command="false"
|
||||
base_galaxy_wide_cap_for_players_constructing="2" galaxy_wide_cap_match_string="ZFFZT"
|
||||
speed="Immobile" moves_back_after_being_norrised="false"
|
||||
>
|
||||
<fleet_membership name="ZenithPurchases" ship_cap_group="OtherDefense" weight="100" cap="1"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="1000"/>
|
||||
</entity>
|
||||
|
||||
|
||||
|
||||
|
||||
<entity name="TachyonArray"
|
||||
thematic_groups="OtherDefense"
|
||||
visuals="assets/_finalgamemeshes/turrets/tachyonarray/tachyonarray.prefab"
|
||||
icon_name="Ships1/AITachyonSentinel"
|
||||
size_scale="1.5"
|
||||
visuals_scale_multiplier="2" is_non_turret_defense="true"
|
||||
category="Ship"
|
||||
collision_priority="1000"
|
||||
display_name="Tachyon Array"
|
||||
display_name_for_sidebar="Tachyon"
|
||||
description="Reveals nearby cloaked enemy ships."
|
||||
behavior="Stationary"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Sentries,Turret"
|
||||
build_sidebar_categories_i_am_part_of="OtherDefenses" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
cost_for_ai_to_purchase="200"
|
||||
hull_points="19000" shield_points="30000" speed="Immobile"
|
||||
metal_cost="5000" energy_consumption="100"
|
||||
fuel_use_type="FuelRadon" fuel_use="200"
|
||||
armor_mm="50" albedo="0.45" mass_tx="0.3"
|
||||
ship_or_structure_explosion_sfx="StructureMidsize_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="StructureLostOnOtherPlanet_Explosion"
|
||||
voice_group="Turret"
|
||||
fleet_membership="Planetary"
|
||||
absolute_max_cap_when_in_fleets="12"
|
||||
priority_as_ai_target="NotDangerous" priority_as_frd_target="NotDangerous" priority_to_protect="SlightlyLessExpendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="100" cap="4"/>
|
||||
<fleet_membership name="GeneralTurrets" ship_cap_group="OtherDefense" weight="15" cap="6"/>
|
||||
<fleet_membership name="OtherDefenseFocus" ship_cap_group="OtherDefense" weight="15" cap="8"/>
|
||||
<fleet_membership name="PlayerHomeCommand" ship_cap_group="OtherDefense" weight="100" cap="6"/>
|
||||
<fleet_membership name="PlayerMilitaryCommand" ship_cap_group="OtherDefense" weight="100" cap="4"/>
|
||||
<fleet_membership name="PlayerLogisticalCommand" ship_cap_group="OtherDefense" weight="100" cap="6"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="100"/>
|
||||
<system name="T" display_name="Tachyon Array"
|
||||
category="Passive" tachyon_points="100" tachyon_range="Normal1" tachyon_points_added_per_mark="50" tachyon_hits_albedo_less_than="0.75"></system>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive" min_mark_level_to_function="2"
|
||||
cloaking_points="0" cloaking_points_added_per_mark="600"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="AITachyonArray" copy_from="TachyonArray" display_name="AI Tachyon Array" max_count_seeded_as_ai_non_turret_defense="5" thematic_groups="AIOtherDefense"
|
||||
absolute_max_cap_when_in_fleets="6"
|
||||
>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="GhostNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="TutorialNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive" max_mark_level_to_function="0"
|
||||
cloaking_points="0" cloaking_points_added_per_mark="600"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
|
||||
|
||||
|
||||
<entity name="FocusedGravityGenerator"
|
||||
thematic_groups="OtherDefense"
|
||||
visuals="assets/_finalgamemeshes/buildings/focusedgravitygenerator/focusedgravitygenerator.prefab"
|
||||
icon_name="Ships1/FocusedGravityGenerator"
|
||||
category="Ship"
|
||||
size_scale="2.40"
|
||||
visuals_scale_multiplier="4" is_non_turret_defense="true"
|
||||
display_name="Focused Gravity Generator"
|
||||
display_name_for_sidebar="Gravity Gen"
|
||||
description="Slows down hostile ships in an area around itself. The effect stacks."
|
||||
behavior="Stationary"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Sentries,Turret"
|
||||
build_sidebar_categories_i_am_part_of="OtherDefenses" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
max_count_seeded_as_ai_non_turret_defense="3"
|
||||
cost_for_ai_to_purchase="500"
|
||||
hull_points="60000" shield_points="60000" speed="Immobile"
|
||||
metal_cost="10000" energy_consumption="1200"
|
||||
fuel_use_type="FuelRadon"
|
||||
armor_mm="50" albedo="0.45" mass_tx="0.3"
|
||||
ship_or_structure_explosion_sfx="Turret_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="ShipLostOnOtherPlanet_Explosion"
|
||||
voice_group="Turret"
|
||||
fleet_membership="Planetary"
|
||||
absolute_max_cap_when_in_fleets="12"
|
||||
priority_as_ai_target="BigTurret" priority_as_frd_target="LowGradeAnnoyance" priority_to_protect="SlightlyLessExpendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="30" cap="4"/>
|
||||
<fleet_membership name="GeneralTurrets" ship_cap_group="OtherDefense" weight="30" cap="6"/>
|
||||
<fleet_membership name="OtherDefenseFocus" ship_cap_group="OtherDefense" weight="30" cap="8"/>
|
||||
<fleet_membership name="PlayerLogisticalCommand" ship_cap_group="OtherDefense" weight="100" cap="2"/>
|
||||
<fleet_membership name="PlayerEconomicCommand" ship_cap_group="OtherDefense" weight="100" cap="1"/>
|
||||
<ai_ship_group_membership name="TurretTrapWormholeSentinels_GravityPikes" weight="1"/>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="FocusedGravityGenerators" weight="10"/>
|
||||
<ai_ship_group_membership name="TutorialNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="100"/>
|
||||
<system name="G" display_name="Gravitic Core"
|
||||
category="Passive"
|
||||
base_gravity_speed_multiplier="0.5" subtracted_gravity_speed_multiplier_per_mark="0" gravity_hits_engine_gx_less_than="14" gravity_range="Normal6"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="EnsnarerFocusedGravityGenerator" copy_from="FocusedGravityGenerator" >
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="EnsnarerFocusedGravityGenerators" weight="100"/>
|
||||
<ai_ship_group_membership name="EnsnarerTurretTrapWormholeSentinels_GravityPikes" weight="100"/>
|
||||
<system name="T" display_name="Tractor Beam"
|
||||
category="Passive"
|
||||
tractor_range="Normal4" tractor_count="42" tractor_count_added_per_mark="21" tractor_hits_albedo_less_than="0.5" tractor_hits_engine_gx_less_than="10"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="GhostFocusedGravityGenerator" copy_from="FocusedGravityGenerator" >
|
||||
<ai_ship_group_membership name="GhostNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="GhostFocusedGravityGenerators" weight="100"/>
|
||||
<ai_ship_group_membership name="GhostTurretTrapWormholeSentinels_GravityPikes" weight="100"/>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive" cloaking_points="2000" cloaking_points_added_per_mark="1000"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
|
||||
|
||||
|
||||
<entity name="Minefield"
|
||||
thematic_groups="Mine"
|
||||
visuals="Assets/_FinalGameMeshes/Specials/Mines/MinefieldRegular.prefab"
|
||||
icon_name="Ships1/Minefield"
|
||||
special_entity_type="Minefield" category="Ship"
|
||||
size_scale="2.5"
|
||||
visuals_scale_multiplier="0.8"
|
||||
collision_priority="1"
|
||||
display_name="Minefield"
|
||||
display_name_for_sidebar="Mine"
|
||||
description="Cloaked explosive that detonates for high damage against a single nearby target, losing 10% of its maximum health each time."
|
||||
behavior="Stationary"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Minefields,Generalist"
|
||||
build_sidebar_categories_i_am_part_of="Minefields" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
max_count_seeded_as_ai_non_turret_defense="15"
|
||||
cost_for_ai_to_purchase="100" strength_multiplier="0.1"
|
||||
hull_points="100" shield_points="0" speed="Immobile"
|
||||
metal_cost="4000" energy_consumption="400"
|
||||
armor_mm="30" albedo="0.3" mass_tx="0.3"
|
||||
voice_group="Silent"
|
||||
fleet_membership="Planetary"
|
||||
ship_or_structure_explosion_sfx="Turret_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="ShipLostOnOtherPlanet_Explosion"
|
||||
absolute_max_cap_when_in_fleets="200"
|
||||
priority_as_ai_target="NormalTurret" priority_as_frd_target="NormalTurret" priority_to_protect="Expendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="100" cap="15"/>
|
||||
<fleet_membership name="Minelayer" ship_cap_group="OtherDefense" weight="100" cap="32"/>
|
||||
<fleet_membership name="OtherDefenseFocus" ship_cap_group="OtherDefense" weight="10" cap="14"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="150"/>
|
||||
<system name="B4" display_name="Mine"
|
||||
category="Weapon" firing_timing="OnlyInRange"
|
||||
damage_per_shot="20000" range="Tiny3" shot_speed="Instant" rate_of_fire="Devourer" fires_salvos_sequentially="true"
|
||||
shot_type_data="ExplosiveFlakBurst" cloaking_percent_loss_from_firing="0.05"
|
||||
health_change_by_max_health_divided_by_this_per_attack="-10"
|
||||
>
|
||||
</system>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive"
|
||||
cloaking_points="2000" cloaking_points_added_per_mark="1000"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="AIMinefield" copy_from="Minefield" only_cloaked_when_owning_planet="true" thematic_groups="Hidden"
|
||||
>
|
||||
<ai_ship_group_membership name="BasicMinefieldWormholeSentinels" weight="100"/>
|
||||
<ai_ship_group_membership name="Mines" weight="100"/>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="GhostNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
</entity>
|
||||
|
||||
|
||||
|
||||
<entity name="AreaMinefield"
|
||||
thematic_groups="Mine"
|
||||
visuals="Assets/_FinalGameMeshes/Specials/Mines/MinefieldArea.prefab"
|
||||
icon_name="Ships1/Minefield" icon_overlay="Overlays1/Minefield_Grenade"
|
||||
special_entity_type="Minefield" category="Ship"
|
||||
size_scale="2.5"
|
||||
visuals_scale_multiplier="0.8"
|
||||
collision_priority="1"
|
||||
display_name="Area Minefield"
|
||||
display_name_for_sidebar="Area Mine"
|
||||
description="Cloaked explosive that detonates for high damage against nearby targets, losing 20% of its maximum health each time."
|
||||
behavior="Stationary"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Minefields,Splash"
|
||||
build_sidebar_categories_i_am_part_of="Minefields" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
max_count_seeded_as_ai_non_turret_defense="15"
|
||||
cost_for_ai_to_purchase="100" strength_multiplier="0.2"
|
||||
hull_points="100" shield_points="0" speed="Immobile"
|
||||
metal_cost="4000" energy_consumption="400"
|
||||
armor_mm="30" albedo="0.3" mass_tx="0.3"
|
||||
voice_group="Silent"
|
||||
fleet_membership="Planetary"
|
||||
ship_or_structure_explosion_sfx="Turret_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="ShipLostOnOtherPlanet_Explosion"
|
||||
absolute_max_cap_when_in_fleets="200"
|
||||
priority_as_ai_target="NormalTurret" priority_as_frd_target="NormalTurret" priority_to_protect="Expendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="100" cap="20"/>
|
||||
<fleet_membership name="Minelayer" ship_cap_group="OtherDefense" weight="100" cap="32"/>
|
||||
<fleet_membership name="OtherDefenseFocus" ship_cap_group="OtherDefense" weight="10" cap="14"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="150"/>
|
||||
<system name="W1" display_name="Area Mine"
|
||||
category="Weapon" firing_timing="OnlyInRange"
|
||||
damage_per_shot="1500" range="Tiny3" shot_speed="Instant" rate_of_fire="Devourer" fires_salvos_sequentially="true"
|
||||
shot_type_data="ExplosiveFlakBurst"
|
||||
area_of_effect_size="1200"
|
||||
maximum_number_of_targets_hit_per_shot="20" cloaking_percent_loss_from_firing="0.05"
|
||||
health_change_by_max_health_divided_by_this_per_attack="-5"
|
||||
>
|
||||
</system>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive"
|
||||
cloaking_points="2000" cloaking_points_added_per_mark="1000"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="AIAreaMinefield" copy_from="AreaMinefield" only_cloaked_when_owning_planet="true" thematic_groups="Hidden"
|
||||
>
|
||||
<ai_ship_group_membership name="BasicMinefieldWormholeSentinels" weight="5"/>
|
||||
<ai_ship_group_membership name="Mines" weight="100"/>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="GhostNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
</entity>
|
||||
|
||||
<entity name="ParalysisMinefield"
|
||||
thematic_groups="Mine"
|
||||
visuals="Assets/_FinalGameMeshes/Specials/Mines/MinefieldParalysis.prefab"
|
||||
icon_name="Ships1/Minefield" icon_overlay="Overlays1/Minefield_Paralysis"
|
||||
special_entity_type="Minefield" category="Ship"
|
||||
size_scale="2.5"
|
||||
collision_priority="1"
|
||||
visuals_scale_multiplier="0.8"
|
||||
display_name="Paralysis Minefield"
|
||||
display_name_for_sidebar="Paralysis Mine"
|
||||
description="Cloaked explosive that detonates for low damage and paralysis against nearby targets, losing 20% of its maximum health each time."
|
||||
behavior="Stationary"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Minefields,Subterfuge"
|
||||
build_sidebar_categories_i_am_part_of="Minefields" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
max_count_seeded_as_ai_non_turret_defense="15"
|
||||
cost_for_ai_to_purchase="100" strength_multiplier="0.2"
|
||||
hull_points="100" shield_points="0" speed="Immobile"
|
||||
metal_cost="4000" energy_consumption="400"
|
||||
armor_mm="30" albedo="0.3" mass_tx="0.3"
|
||||
voice_group="Silent"
|
||||
fleet_membership="Planetary"
|
||||
ship_or_structure_explosion_sfx="Turret_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="ShipLostOnOtherPlanet_Explosion"
|
||||
absolute_max_cap_when_in_fleets="200"
|
||||
priority_as_ai_target="NormalTurret" priority_as_frd_target="NormalTurret" priority_to_protect="Expendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="100" cap="20"/>
|
||||
<fleet_membership name="Minelayer" ship_cap_group="OtherDefense" weight="100" cap="32"/>
|
||||
<fleet_membership name="OtherDefenseFocus" ship_cap_group="OtherDefense" weight="10" cap="14"/>
|
||||
<fleet_membership name="PlayerLogisticalCommand" ship_cap_group="OtherDefense" weight="100" cap="20"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="150"/>
|
||||
<system name="W1" display_name="Paralysis Mine"
|
||||
category="Weapon" firing_timing="OnlyInRange"
|
||||
damage_per_shot="100" range="Tiny3" shot_speed="Instant" rate_of_fire="Devourer" fires_salvos_sequentially="true"
|
||||
base_paralysis_seconds_per_shot="4" added_paralysis_seconds_per_shot_per_mark="2" paralysis_to_ships_mass_tx_less_than="4"
|
||||
shot_type_data="ExplosiveFlakBurst" added_target_evaluator="ParalyzerAsPrimary"
|
||||
area_of_effect_size="1200"
|
||||
maximum_number_of_targets_hit_per_shot="20" cloaking_percent_loss_from_firing="0.05"
|
||||
health_change_by_max_health_divided_by_this_per_attack="-5"
|
||||
>
|
||||
</system>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive"
|
||||
cloaking_points="2000" cloaking_points_added_per_mark="1000"
|
||||
>
|
||||
</system>
|
||||
</entity>
|
||||
|
||||
<entity name="AIParalysisMinefield" copy_from="ParalysisMinefield" only_cloaked_when_owning_planet="true" thematic_groups="Hidden"
|
||||
>
|
||||
<ai_ship_group_membership name="BasicMinefieldWormholeSentinels" weight="1"/>
|
||||
<ai_ship_group_membership name="Mines" weight="100"/>
|
||||
<ai_ship_group_membership name="AnyNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="EnsnarerNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
<ai_ship_group_membership name="GhostNonTurretDefenses_FreeforAll" weight="10"/>
|
||||
</entity>
|
||||
|
||||
|
||||
<entity name="SentryFrigate"
|
||||
thematic_groups="Support,OtherDefense"
|
||||
special_entity_type="Frigate" gimbal_icon_size_multiplier="1.25"
|
||||
visuals="assets/_finalgamemeshes/starships/scoutstarship/scoutstarship.prefab"
|
||||
icon_name="Ships1/Starship_Light" icon_overlay="Overlays1/Starship_Sentry"
|
||||
size_scale="4.8"
|
||||
visuals_scale_multiplier="2.4"
|
||||
category="Ship"
|
||||
y_offset_of_icon="10"
|
||||
collision_priority="500"
|
||||
display_name="Sentry Frigate"
|
||||
display_name_for_sidebar="Sentry FFL"
|
||||
description="Excellent at revealing cloaked enemies."
|
||||
build_sidebar_categories_i_am_part_of="StationKeepers" destroys_self_until_not_over_ship_cap_if_planetary_command="true"
|
||||
starting_mark_level="Mark1" tech_upgrades_that_benefit_me="Sentries"
|
||||
behavior="Attacker_Full"
|
||||
cost_for_ai_to_purchase="600"
|
||||
hull_points="25000" shield_points="10000" speed="VeryHigh1"
|
||||
metal_cost="40000" energy_consumption="5000"
|
||||
fuel_use_type="FuelRadon"
|
||||
armor_mm="20" engine_gx="20" albedo="0.7" mass_tx="4"
|
||||
voice_group="Starship"
|
||||
fleet_membership="Planetary"
|
||||
ship_or_structure_explosion_sfx="ShipStarship_Explosion"
|
||||
ship_or_structure_explosion_if_on_other_planet_sfx="ShipLargeLostOnOtherPlanet_Explosion"
|
||||
ship_or_structure_explosion_sfx_happens_if_not_mine_and_on_other_planet="true"
|
||||
absolute_max_cap_when_in_fleets="4"
|
||||
priority_as_ai_target="NormalStarship" priority_as_frd_target="NotDangerous" priority_to_protect="SlightlyLessExpendable"
|
||||
>
|
||||
<fleet_membership name="AddedToCommandStation" ship_cap_group="OtherDefense" weight="25" cap="2"/>
|
||||
<fleet_membership name="PlayerMilitaryCommand" ship_cap_group="OtherDefense" weight="100" cap="2"/>
|
||||
<fleet_membership name="PlayerLogisticalCommand" ship_cap_group="OtherDefense" weight="100" cap="2"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="500"/>
|
||||
<system name="T" display_name="Stronger Tachyon Array"
|
||||
category="Passive" tachyon_points="50" tachyon_points_added_per_mark="25" tachyon_range="High1" tachyon_hits_albedo_less_than="1.1"></system>
|
||||
<system name="C" display_name="Cloaking Device"
|
||||
category="Passive" cloaking_points="2000" ></system>
|
||||
</entity>
|
||||
|
||||
<!--Note that build_sidebar_categories_i_am_part_of won't be actually relevant on the combat sentry frigates, since they're part of the fleets a different way.
|
||||
They are being used in the "Frigate" ship cap category, too, since in the fleets that they are in that is basically the role they play
|
||||
even though they have no guns. The only purpose of noting "frigate" is to specify which "type cap" groups they are in, anyway.-->
|
||||
<entity name="CombatSentryFrigate" copy_from="SentryFrigate" thematic_groups="Support"
|
||||
display_name="Combat Sentry Frigate"
|
||||
display_name_for_sidebar="Combat Sentry FFL" fleet_membership="CrossPlanetary"
|
||||
hull_points="30000" shield_points="15000" never_leaves_remains="true" immune_to_swapping_between_fleets="true" is_combatant_despite_no_weapons="true"
|
||||
absolute_max_cap_when_in_fleets="4"
|
||||
fuel_use_type="FuelArgon"
|
||||
>
|
||||
<system name="T" display_name="Weaker Tachyon Array"
|
||||
category="Passive" tachyon_points="50" tachyon_points_added_per_mark="25" tachyon_range="High1" tachyon_hits_albedo_less_than="0.75"></system>
|
||||
<fleet_membership name="CombatSentryFrigates" ship_cap_group="Frigate" weight="100" cap="2"/>
|
||||
<metal_flow purpose="SelfConstruction" throughput="0" increase_per_mark_level="0"/><!--Make these build by not being able to construct.-->
|
||||
</entity>
|
||||
|
||||
|
||||
</root>
|
||||
770
testfiles/TalentsAssistant.xml
Normal file
770
testfiles/TalentsAssistant.xml
Normal file
@@ -0,0 +1,770 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Talents>
|
||||
<Talent identifier="disloyalscum">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.disloyalscum">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffectsToApprenticeship invert="true">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="disloyalscum" strength="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectsToApprenticeship>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="loyalassistant">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,3" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.loyalassistant">
|
||||
<Replace tag="[amount]" value="2" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffectsToApprenticeship>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="loyalassistant" strength="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectsToApprenticeship>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="journeyman">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,3" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.journeyman">
|
||||
<Replace tag="[skillamount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[skillspeedboost]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffectsToApprenticeship>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="journeyman" strength="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectsToApprenticeship>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Conditions>
|
||||
<AbilityConditionHasTalent identifier="apprenticeship_captain" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="HelmSkillBonus" value="10"/>
|
||||
<CharacterAbilityGiveStat stattype="HelmSkillGainSpeed" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Conditions>
|
||||
<AbilityConditionHasTalent identifier="apprenticeship_doctor" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MedicalSkillBonus" value="10"/>
|
||||
<CharacterAbilityGiveStat stattype="MedicalSkillGainSpeed" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Conditions>
|
||||
<AbilityConditionHasTalent identifier="apprenticeship_security" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="WeaponsSkillBonus" value="10"/>
|
||||
<CharacterAbilityGiveStat stattype="WeaponsSkillGainSpeed" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Conditions>
|
||||
<AbilityConditionHasTalent identifier="apprenticeship_engineer" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="ElectricalSkillBonus" value="10"/>
|
||||
<CharacterAbilityGiveStat stattype="ElectricalSkillGainSpeed" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Conditions>
|
||||
<AbilityConditionHasTalent identifier="apprenticeship_mechanic" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalSkillBonus" value="10"/>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalSkillGainSpeed" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="apprenticeship">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="3,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.apprenticeshipbase"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="apprenticeship_captain" nameidentifier="jobname.captain" coloroverride="0.65,0.73,0.8">
|
||||
<Icon texture="Content/UI/MainIconsAtlas.png" sourcerect="384,256,128,128" origin="0.5,0.5"/>
|
||||
<Description tag="talentdescription.apprenticeshipjob">
|
||||
<Replace tag="[job]" value="jobname.captain" color="0.65,0.73,0.84"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalskill">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.helmskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="Apprenticeship" statidentifier="captain" value="1" />
|
||||
<CharacterAbilityGiveStat stattype="HelmSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="apprenticeship_doctor" nameidentifier="jobname.medicaldoctor" coloroverride="0.81,0.31,0.19">
|
||||
<Icon texture="Content/UI/MainIconsAtlas.png" sourcerect="512,256,128,128" origin="0.5,0.5"/>
|
||||
<Description tag="talentdescription.apprenticeshipjob">
|
||||
<Replace tag="[job]" value="jobname.medicaldoctor" color="0.81,0.31,0.19"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalskill">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.medicalskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="Apprenticeship" statidentifier="medicaldoctor" value="1" />
|
||||
<CharacterAbilityGiveStat stattype="MedicalSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="apprenticeship_security" nameidentifier="jobname.securityofficer" coloroverride="0.57,0.48,0.43">
|
||||
<Icon texture="Content/UI/MainIconsAtlas.png" sourcerect="256,256,128,128" origin="0.5,0.5"/>
|
||||
<Description tag="talentdescription.apprenticeshipjob">
|
||||
<Replace tag="[job]" value="jobname.securityofficer" color="0.57,0.48,0.43"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalskill">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.weaponsskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="Apprenticeship" statidentifier="securityofficer" value="1" />
|
||||
<CharacterAbilityGiveStat stattype="WeaponsSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="apprenticeship_mechanic" nameidentifier="jobname.mechanic" coloroverride="0.5,0.86,0.9">
|
||||
<Icon texture="Content/UI/MainIconsAtlas.png" sourcerect="896,256,128,128" origin="0.5,0.5"/>
|
||||
<Description tag="talentdescription.apprenticeshipjob">
|
||||
<Replace tag="[job]" value="jobname.mechanic" color="0.5,0.86,0.9"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalskill">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.mechanicalskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="Apprenticeship" statidentifier="mechanic" value="1" />
|
||||
<CharacterAbilityGiveStat stattype="MechanicalSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="apprenticeship_engineer" nameidentifier="jobname.engineer" coloroverride="0.88,0.72,0.43">
|
||||
<Icon texture="Content/UI/MainIconsAtlas.png" sourcerect="768,256,128,128" origin="0.5,0.5"/>
|
||||
<Description tag="talentdescription.apprenticeshipjob">
|
||||
<Replace tag="[job]" value="jobname.engineer" color="0.88,0.72,0.43"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalskill">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.electricalskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="Apprenticeship" statidentifier="engineer" value="1" />
|
||||
<CharacterAbilityGiveStat stattype="ElectricalSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="logisticssystems">
|
||||
<Icon texture="Content/UI/TalentsIcons1.png" sheetindex="0,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.handhelditemfinder,entityname.makeshiftshelf" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.letitdrainreminder">
|
||||
<Replace tag="[itemcount]" value="3" color="gui.green"/>
|
||||
</Description>
|
||||
<Migrations>
|
||||
<AddStat version="1.0.10.0" statidentifier="makeshiftshelf" stattype="MaxAttachableCount" value="3" />
|
||||
</Migrations>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="makeshiftshelf" stattype="MaxAttachableCount" value="3" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="makeshiftshelf"/>
|
||||
<AddedRecipe itemidentifier="handhelditemfinder"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="thefriendswemade">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="2,0" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.thefriendswemade"/>
|
||||
<Description tag="talentdescription.thefriendswemadereminder">
|
||||
<Replace tag="[amount]" value="4" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.petraptoregg" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="petraptoregg"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="revengesquad">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.revengesquad">
|
||||
<Replace tag="[amount]" value="60" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="120" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnDieToCharacter">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffectsToAllies requiresalive="false">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character">
|
||||
<Affliction identifier="revengesquad" amount="120"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectsToAllies>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="bagitup">
|
||||
<Icon texture="Content/UI/TalentsIcons4.png" sheetindex="0,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.backpack" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="backpack"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="peerlearning">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="0,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.peerlearning">
|
||||
<Replace tag="[amount]" value="100" color="gui.green"/>
|
||||
</Description>
|
||||
<!-- give XP to the character itself -->
|
||||
<AbilityGroupEffect abilityeffecttype="OnGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionLowestLevel />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="1.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<!-- allies too -->
|
||||
<AbilityGroupEffect abilityeffecttype="OnAllyGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionLowestLevel />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="1.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<!--<Talent identifier="thearrival">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.thearrival">
|
||||
<Replace tag="[amount]" value="1" color="gui.green"/>
|
||||
<Replace tag="[faction]" value="faction.clowns" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnUseRangedWeapon">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="thearrival_[placeholder]" placeholder="LocationIndex" min="1" invert="true" />
|
||||
<AbilityConditionItem tags="hornitem"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="thearrival_[placeholder]" placeholder="LocationIndex" value="1" setvalue="true" />
|
||||
<CharacterAbilityGiveReputation identifier="clowns" amount="1" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>-->
|
||||
|
||||
<Talent identifier="mailman">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="0,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.mailman">
|
||||
<Replace tag="[xpbonus]" value="25" color="gui.green"/>
|
||||
<Replace tag="[moneybonus]" value="250" color="gui.green"/>
|
||||
<Replace tag="[missiontype]" value="missiontype.cargo" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionMission missiontype="Cargo"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="1.25"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnGainMissionMoney">
|
||||
<Conditions>
|
||||
<AbilityConditionMission missiontype="Cargo"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveMoney amount="250"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="thewaitinglist">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="1,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.thewaitinglist">
|
||||
<Replace tag="[amount]" value="150" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval>
|
||||
<Conditions>
|
||||
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyResistance resistanceid="oxygenlow" multiplier="0.25" />
|
||||
<CharacterAbilityModifyResistance resistanceid="bleeding" multiplier="0.25" />
|
||||
<CharacterAbilityModifyResistance resistanceid="bloodloss" multiplier="0.25" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="anappleaday">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="3,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.anappleaday">
|
||||
<Replace tag="[amount]" value="200" color="gui.green"/>
|
||||
<Replace tag="[amount2]" value="75" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveResistance resistanceid="skilllossdeath" multiplier="0" />
|
||||
<CharacterAbilityGiveResistance resistanceid="skilllossrespawn" multiplier="0.25" />
|
||||
<CharacterAbilityGiveResistance resistanceid="reaperstax" multiplier="0" /> <!-- Legacy -->
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="skedaddle">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.skedaddle">
|
||||
<Replace tag="[value]" value="10" color="gui.green"/>
|
||||
<Replace tag="[attackedvalue]" value="30" color="gui.green"/>
|
||||
<Replace tag="[seconds]" value="5" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MovementSpeed" value="0.1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttacked">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="skedaddle" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="insurancepolicy">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="0,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.insurancepolicy">
|
||||
<Replace tag="[amount]" value="400" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnDieToCharacter">
|
||||
<Conditions>
|
||||
<AbilityConditionCharacter invert="true" targettypes="Ally" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityInsurancePolicy moneypermission="200" requiresalive="false" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="nonthreatening">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.feigndeath">
|
||||
<Replace tag="[seconds]" value="2" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval effectdelay="2" >
|
||||
<Conditions>
|
||||
<AbilityConditionRagdolled/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyFlag flagtype="IgnoredByEnemyAI"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="tastytarget">
|
||||
<Icon texture="Content/UI/TalentsIcons4.png" sheetindex="1,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.tastytarget"/>
|
||||
<Description tag="talentdescription.damagereduction">
|
||||
<Replace tag="[amount]" value="30" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="tastytarget" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="starterquest">
|
||||
<Icon texture="Content/UI/TalentsIcons4.png" sheetindex="5,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.starterquest" />
|
||||
|
||||
<Description tag="talentdescription.starterquest.killcrawler">
|
||||
<Replace tag="[xpbonus]" value="400" color="gui.green"/>
|
||||
<Replace tag="[skillbonus]" value="10" color="gui.green"/>
|
||||
<Replace tag="[skill]" value="stattypenames.weaponsskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.starterquest.repairjunctionbox">
|
||||
<Replace tag="[xpbonus]" value="300" color="gui.green"/>
|
||||
<Replace tag="[skillbonus]" value="10" color="gui.green"/>
|
||||
<Replace tag="[skill]" value="stattypenames.electricalskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.starterquest.weldleak">
|
||||
<Replace tag="[xpbonus]" value="200" color="gui.green"/>
|
||||
<Replace tag="[skillbonus]" value="10" color="gui.green"/>
|
||||
<Replace tag="[skill]" value="stattypenames.mechanicalskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
|
||||
<!-- Give the player stats that tracks if the rewards should be given -->
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="starterquest_killcrawler" value="1" maxvalue="1" setvalue="true" />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="starterquest_repairjunctionbox" value="1" maxvalue="1" setvalue="true" />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="starterquest_weldleak" value="1" maxvalue="1" setvalue="true" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<!-- Give the player the rewards when the crawler is killed -->
|
||||
<AbilityGroupEffect abilityeffecttype="OnKillCharacter">
|
||||
<Conditions>
|
||||
<AbilityConditionCharacter>
|
||||
<Conditional group="eq crawler" />
|
||||
</AbilityConditionCharacter>
|
||||
<AbilityConditionHasPermanentStat statidentifier="starterquest_killcrawler" min="1"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveExperience amount="400"/>
|
||||
<CharacterAbilityGivePermanentStat stattype="WeaponsSkillBonus" statidentifier="starterquest" value="10" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="starterquest_killcrawler" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<!-- Give the player the rewards when repairing a junction box -->
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="junctionbox"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="starterquest_repairjunctionbox" min="1"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveExperience amount="300"/>
|
||||
<CharacterAbilityGivePermanentStat stattype="ElectricalSkillBonus" statidentifier="starterquest" value="10" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="starterquest_repairjunctionbox" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<!-- Give the player the rewards when welding a leak -->
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairedOutsideLeak">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="starterquest_weldleak" min="1"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveExperience amount="200"/>
|
||||
<CharacterAbilityGivePermanentStat stattype="MechanicalSkillBonus" statidentifier="starterquest" value="10" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="starterquest_weldleak" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="indenturedservitude">
|
||||
<Icon texture="Content/UI/TalentsIcons4.png" sheetindex="4,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.assistanthiringprice">
|
||||
<Replace tag="[amount]" value="40" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.otherassistantxpbonus">
|
||||
<Replace tag="[bonus]" value="10" color="gui.green"/>
|
||||
<Replace tag="[max]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="HireCostMultiplier" statidentifier="assistant" value="-0.2" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRoundEnd">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="indenturedservitude" targetallies="true" value="0" setvalue="true" maxvalue="5" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAllyGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionCharacter targetAbilityTarget="true">
|
||||
<Conditional jobidentifier="assistant" />
|
||||
</AbilityConditionCharacter>
|
||||
<AbilityConditionHasPermanentStat statidentifier="indenturedservitude" targetAbilityTarget="true" min="5" invert="true"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="1.1"/>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="indenturedservitude" targetAbilityTarget="true" value="1" maxvalue="5" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="jengamaster">
|
||||
<Icon texture="Content/UI/TalentsIcons4.png" sheetindex="2,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.jengamaster">
|
||||
<Replace tag="[amount]" value="8" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemSelected">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="container" />
|
||||
<AbilityConditionItemIsStatic />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStat stattype="ExtraStackSize" value="4" stackable="false" save="true"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="mule">
|
||||
<Icon texture="Content/UI/TalentsIcons4.png" sheetindex="3,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.mule">
|
||||
<Replace tag="[amount]" value="4" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="InventoryExtraStackSize" statidentifier="Material" value="2" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="crewlayabout">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="7,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.reductiontostattypeself">
|
||||
<Replace tag="[stattype]" value="stattypenames.repairspeed" color="gui.orange"/>
|
||||
<Replace tag="[amount]" value="80" color="gui.red"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.reductiontoallskills">
|
||||
<Replace tag="[amount]" value="30" color="gui.red"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.crewlayabout">
|
||||
<Replace tag="[repairspeed]" value="25" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="1">
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffectsToAllies maxdistance="600" allowself="false">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="pickinguptheslack" strength="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectsToAllies>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="ElectricalSkillBonus" value="-15"/>
|
||||
<CharacterAbilityGiveStat stattype="HelmSkillBonus" value="-15"/>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalSkillBonus" value="-15"/>
|
||||
<CharacterAbilityGiveStat stattype="MedicalSkillBonus" value="-15"/>
|
||||
<CharacterAbilityGiveStat stattype="WeaponsSkillBonus" value="-15"/>
|
||||
<CharacterAbilityGiveStat stattype="RepairSpeed" value="-0.4"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="graduationceremony">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="2,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockapprenticeshiptalents"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.ceremonialsword" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="ceremonialsword"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="graduationceremony" min="1" invert="true"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityUnlockApprenticeshipTalentTree />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="graduationceremony" value="1" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="playingcatchup">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,4" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.playingcatchup">
|
||||
<Replace tag="[levelvalue]" value="2" color="gui.green"/>
|
||||
<Replace tag="[experiencevalue]" value="100" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionLevelsBehindHighest levelsbehind="2" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="enrollintoclowncollege">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.enrollintoclowncollege">
|
||||
<Replace tag="[afflictionname]" value="afflictionname.clownpower" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.clownpowerbenefits"/>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.movementspeed" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.clowncrate" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="clowncrate"/>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionHasItem tags="clowngear,clownmask" requireall="true"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="clownpower" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="waterprankster">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.clowndivingmask" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.clownpowerbenefits"/>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="100" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.swimmingspeed" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval>
|
||||
<Conditions>
|
||||
<AbilityConditionHasAffliction afflictionidentifier="clownpower" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyStat stattype="SwimmingSpeed" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AddedRecipe itemidentifier="clowndivingmask"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="psychoclown">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="7,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.psychoclown">
|
||||
<Replace tag="[afflictionname]" value="afflictionname.clownpower" color="gui.orange"/>
|
||||
<Replace tag="[maxattackspeed]" value="150" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval>
|
||||
<Conditions>
|
||||
<AbilityConditionHasAffliction afflictionidentifier="clownpower" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityPsychoClown stattype="MeleeAttackSpeed" maxvalue="1.5" afflictionidentifier="psychosis"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="inspiringtunes">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,3" sheetelementsize="128,128"/>
|
||||
<!-- TODO mention it works on all instruments -->
|
||||
<Description tag="talentdescription.inspiringtunes">
|
||||
<Replace tag="[skillamount]" value="15" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnUseRangedWeapon">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="hornitem,instrument"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffectsToAllies maxdistance="600" allowself="false">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="inspiringtunes" strength="10.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectsToAllies>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="chonkyhonks">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.chonkyhonks">
|
||||
<Replace tag="[chance]" value="50" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="1" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnUseRangedWeapon">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="hornitem"/>
|
||||
<AbilityConditionHasAffliction afflictionidentifier="clownpower" />
|
||||
<AbilityConditionServerRandom randomchance="0.5" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<!-- TODO loud sound? -->
|
||||
<CharacterAbilityApplyStatusEffectToNonHumans maxdistance="600">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="stun" strength="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffectToNonHumans>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="truepotential">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="1,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.truepotentialtemporary">
|
||||
<Replace tag="[affliction]" value="afflictionname.combatstimulant" color="gui.orange"/>
|
||||
<Replace tag="[duration]" value="15" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.clownexosuit" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionCharacterUnconcious />
|
||||
<AbilityConditionHasAffliction afflictionidentifier="clownpower" />
|
||||
<AbilityConditionAttackData itemidentifier="toyhammer" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveAffliction affliction="combatstimulant" strength="15" setvalue="true" />
|
||||
<CharacterAbilityReduceAffliction affliction="oxygenlow" amount="200" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="clownexosuit"/>
|
||||
</Talent>
|
||||
|
||||
</Talents>
|
||||
8
testfiles/format_test_cook.yml
Normal file
8
testfiles/format_test_cook.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
- name: "JSONFormattingTest"
|
||||
json: true
|
||||
lua: |
|
||||
data.version = "2.0.0"
|
||||
data.enabled = true
|
||||
data.settings.timeout = 60
|
||||
return true
|
||||
files: ["testfiles/test3.json"]
|
||||
15
testfiles/json_global_test_cook.yml
Normal file
15
testfiles/json_global_test_cook.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
# Test with global JSON flag (no json: true in commands)
|
||||
- name: "JSONArrayMultiply"
|
||||
lua: |
|
||||
for i, item in ipairs(data.items) do
|
||||
data.items[i].value = item.value * 2
|
||||
end
|
||||
return true
|
||||
files: ["testfiles/test2.json"]
|
||||
|
||||
- name: "JSONObjectUpdate"
|
||||
lua: |
|
||||
data.version = "3.0.0"
|
||||
data.enabled = false
|
||||
return true
|
||||
files: ["testfiles/test2.json"]
|
||||
32
testfiles/json_test_cook.yml
Normal file
32
testfiles/json_test_cook.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
# Global modifiers
|
||||
- modifiers:
|
||||
multiply: 2.0
|
||||
new_version: "2.0.0"
|
||||
|
||||
# JSON mode examples
|
||||
- name: "JSONArrayMultiply"
|
||||
json: true
|
||||
lua: |
|
||||
for i, item in ipairs(data.items) do
|
||||
data.items[i].value = item.value * $multiply
|
||||
end
|
||||
return true
|
||||
files: ["testfiles/test.json"]
|
||||
|
||||
- name: "JSONObjectUpdate"
|
||||
json: true
|
||||
lua: |
|
||||
data.version = $new_version
|
||||
data.enabled = true
|
||||
return true
|
||||
files: ["testfiles/test.json"]
|
||||
|
||||
- name: "JSONNestedModify"
|
||||
json: true
|
||||
lua: |
|
||||
if data.settings and data.settings.performance then
|
||||
data.settings.performance.multiplier = data.settings.performance.multiplier * 1.5
|
||||
data.settings.performance.enabled = true
|
||||
end
|
||||
return true
|
||||
files: ["testfiles/test.json"]
|
||||
30
testfiles/test.json
Normal file
30
testfiles/test.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "test-config",
|
||||
"version": "1.0.0",
|
||||
"enabled": false,
|
||||
"settings": {
|
||||
"timeout": 30,
|
||||
"retries": 3,
|
||||
"performance": {
|
||||
"multiplier": 1.0,
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "item1",
|
||||
"value": 10
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "item2",
|
||||
"value": 20
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "item3",
|
||||
"value": 30
|
||||
}
|
||||
]
|
||||
}
|
||||
30
testfiles/test2.json
Normal file
30
testfiles/test2.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"enabled": false,
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "item1",
|
||||
"value": 80
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "item2",
|
||||
"value": 160
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "item3",
|
||||
"value": 240
|
||||
}
|
||||
],
|
||||
"name": "test-config",
|
||||
"settings": {
|
||||
"performance": {
|
||||
"enabled": true,
|
||||
"multiplier": 1.5
|
||||
},
|
||||
"retries": 3,
|
||||
"timeout": 30
|
||||
},
|
||||
"version": "3.0.0"
|
||||
}
|
||||
25
testfiles/test3.json
Normal file
25
testfiles/test3.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "item1",
|
||||
"value": 10
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "item2",
|
||||
"value": 20
|
||||
}
|
||||
],
|
||||
"name": "test-config",
|
||||
"settings": {
|
||||
"performance": {
|
||||
"enabled": false,
|
||||
"multiplier": 1
|
||||
},
|
||||
"retries": 3,
|
||||
"timeout": 60
|
||||
},
|
||||
"version": "2.0.0"
|
||||
}
|
||||
25
testfiles/test3_backup.json
Normal file
25
testfiles/test3_backup.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "item1",
|
||||
"value": 10
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "item2",
|
||||
"value": 20
|
||||
}
|
||||
],
|
||||
"name": "test-config",
|
||||
"settings": {
|
||||
"performance": {
|
||||
"enabled": false,
|
||||
"multiplier": 1
|
||||
},
|
||||
"retries": 3,
|
||||
"timeout": 60
|
||||
},
|
||||
"version": "2.0.0"
|
||||
}
|
||||
25
testfiles/test4.json
Normal file
25
testfiles/test4.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "test-config",
|
||||
"version": "1.0.0",
|
||||
"enabled": false,
|
||||
"settings": {
|
||||
"timeout": 30,
|
||||
"retries": 3,
|
||||
"performance": {
|
||||
"multiplier": 1.0,
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "item1",
|
||||
"value": 10
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "item2",
|
||||
"value": 20
|
||||
}
|
||||
]
|
||||
}
|
||||
529
toml_test.go
Normal file
529
toml_test.go
Normal file
@@ -0,0 +1,529 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestTOMLLoadBasic(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-basic-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a simple TOML test file
|
||||
tomlContent := `[[commands]]
|
||||
name = "SimpleTest"
|
||||
regex = "test = !num"
|
||||
lua = "v1 * 2"
|
||||
files = ["test.txt"]
|
||||
|
||||
[[commands]]
|
||||
name = "AnotherTest"
|
||||
regex = "value = (!num)"
|
||||
lua = "v1 + 10"
|
||||
files = ["*.txt"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test loading TOML commands
|
||||
commands, _, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err, "Should load TOML commands without error")
|
||||
assert.Len(t, commands, 2, "Should load 2 commands from TOML")
|
||||
|
||||
// Verify first command
|
||||
assert.Equal(t, "SimpleTest", commands[0].Name, "First command name should match")
|
||||
assert.Equal(t, "test = !num", commands[0].Regex, "First command regex should match")
|
||||
assert.Equal(t, "v1 * 2", commands[0].Lua, "First command Lua should match")
|
||||
assert.Equal(t, []string{"test.txt"}, commands[0].Files, "First command files should match")
|
||||
|
||||
// Verify second command
|
||||
assert.Equal(t, "AnotherTest", commands[1].Name, "Second command name should match")
|
||||
assert.Equal(t, "value = (!num)", commands[1].Regex, "Second command regex should match")
|
||||
assert.Equal(t, "v1 + 10", commands[1].Lua, "Second command Lua should match")
|
||||
assert.Equal(t, []string{"*.txt"}, commands[1].Files, "Second command files should match")
|
||||
}
|
||||
|
||||
func TestTOMLGlobalModifiers(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-global-modifiers-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create TOML content with global variables
|
||||
tomlContent := `[variables]
|
||||
multiplier = 3
|
||||
prefix = "TEST_"
|
||||
enabled = true
|
||||
|
||||
[[commands]]
|
||||
name = "UseGlobalModifiers"
|
||||
regex = "value = !num"
|
||||
lua = "v1 * multiplier; s1 = prefix .. s1"
|
||||
files = ["test.txt"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test loading TOML commands
|
||||
commands, variables, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err, "Should load TOML commands without error")
|
||||
assert.Len(t, commands, 1, "Should load 1 command from TOML")
|
||||
assert.Len(t, variables, 3, "Should load 3 variables")
|
||||
|
||||
// Verify variables
|
||||
assert.Equal(t, int64(3), variables["multiplier"], "Multiplier should be 3")
|
||||
assert.Equal(t, "TEST_", variables["prefix"], "Prefix should be TEST_")
|
||||
assert.Equal(t, true, variables["enabled"], "Enabled should be true")
|
||||
|
||||
// Verify regular command
|
||||
assert.Equal(t, "UseGlobalModifiers", commands[0].Name, "Regular command name should match")
|
||||
assert.Equal(t, "value = !num", commands[0].Regex, "Regular command regex should match")
|
||||
}
|
||||
|
||||
func TestTOMLMultilineRegex(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-multiline-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create TOML content with multiline regex using literal strings
|
||||
tomlContent := `[variables]
|
||||
factor = 2.5
|
||||
|
||||
[[commands]]
|
||||
name = "MultilineTest"
|
||||
regex = '''
|
||||
\[config\.settings\]
|
||||
|
||||
depth = !num
|
||||
|
||||
width = !num
|
||||
|
||||
height = !num'''
|
||||
lua = "v1 * factor"
|
||||
files = ["test.conf"]
|
||||
isolate = true
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
// Create test file that matches the multiline pattern
|
||||
testContent := `[config.settings]
|
||||
|
||||
depth = 10
|
||||
|
||||
width = 20
|
||||
|
||||
height = 30
|
||||
`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.conf")
|
||||
err = os.WriteFile(testFile, []byte(testContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write test file: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test loading TOML commands
|
||||
commands, variables, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err, "Should load TOML commands without error")
|
||||
assert.Len(t, commands, 1, "Should load 1 command from TOML")
|
||||
assert.Len(t, variables, 1, "Should load 1 variable")
|
||||
|
||||
// Verify the multiline regex command
|
||||
multilineCmd := commands[0]
|
||||
assert.Equal(t, "MultilineTest", multilineCmd.Name, "Command name should match")
|
||||
assert.Contains(t, multilineCmd.Regex, "\\[config\\.settings\\]", "Regex should contain escaped brackets")
|
||||
assert.Contains(t, multilineCmd.Regex, "depth = !num", "Regex should contain depth pattern")
|
||||
assert.Contains(t, multilineCmd.Regex, "width = !num", "Regex should contain width pattern")
|
||||
assert.Contains(t, multilineCmd.Regex, "height = !num", "Regex should contain height pattern")
|
||||
assert.Contains(t, multilineCmd.Regex, "\n", "Regex should contain newlines")
|
||||
assert.True(t, multilineCmd.Isolate, "Isolate should be true")
|
||||
|
||||
// Verify the regex preserves proper structure
|
||||
expectedLines := []string{
|
||||
"\\[config\\.settings\\]",
|
||||
"depth = !num",
|
||||
"width = !num",
|
||||
"height = !num",
|
||||
}
|
||||
|
||||
for _, line := range expectedLines {
|
||||
assert.Contains(t, multilineCmd.Regex, line, "Regex should contain: "+line)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTOMLComplexRegexPatterns(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-complex-regex-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create TOML content with complex regex patterns
|
||||
tomlContent := `[[commands]]
|
||||
name = "ComplexPatterns"
|
||||
regexes = [
|
||||
"\\[section\\.([^\\]]+)\\]",
|
||||
"(?P<key>\\w+)\\s*=\\s*(?P<value>\\d+\\.\\d+)",
|
||||
"network\\.(\\w+)\\.(enable|disable)"
|
||||
]
|
||||
lua = "if is_number(value) then value = num(value) * 1.1 end; return true"
|
||||
files = ["*.conf", "*.ini"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test loading TOML commands
|
||||
commands, _, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err, "Should load TOML commands without error")
|
||||
assert.Len(t, commands, 1, "Should load 1 command from TOML")
|
||||
|
||||
// Verify the complex regex command
|
||||
cmd := commands[0]
|
||||
assert.Equal(t, "ComplexPatterns", cmd.Name, "Command name should match")
|
||||
assert.Len(t, cmd.Regexes, 3, "Should have 3 regex patterns")
|
||||
|
||||
// Verify each regex pattern
|
||||
assert.Equal(t, `\[section\.([^\]]+)\]`, cmd.Regexes[0], "First regex should match section pattern")
|
||||
assert.Equal(t, `(?P<key>\w+)\s*=\s*(?P<value>\d+\.\d+)`, cmd.Regexes[1], "Second regex should match key-value pattern")
|
||||
assert.Equal(t, `network\.(\w+)\.(enable|disable)`, cmd.Regexes[2], "Third regex should match network pattern")
|
||||
|
||||
assert.Equal(t, []string{"*.conf", "*.ini"}, cmd.Files, "Files should match")
|
||||
}
|
||||
|
||||
func TestTOMLJSONMode(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-json-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create TOML content with JSON mode commands
|
||||
tomlContent := `[[commands]]
|
||||
name = "JSONMultiply"
|
||||
json = true
|
||||
lua = "for i, item in ipairs(data.items) do data.items[i].value = item.value * 2 end; return true"
|
||||
files = ["data.json"]
|
||||
|
||||
[[commands]]
|
||||
name = "JSONObjectUpdate"
|
||||
json = true
|
||||
lua = "data.version = '2.0.0'; data.enabled = true; return true"
|
||||
files = ["config.json"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test loading TOML commands
|
||||
commands, _, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err, "Should load TOML commands without error")
|
||||
assert.Len(t, commands, 2, "Should load 2 commands from TOML")
|
||||
|
||||
// Verify first JSON command
|
||||
cmd1 := commands[0]
|
||||
assert.Equal(t, "JSONMultiply", cmd1.Name, "First command name should match")
|
||||
assert.True(t, cmd1.JSON, "First command should have JSON mode enabled")
|
||||
assert.Equal(t, "for i, item in ipairs(data.items) do data.items[i].value = item.value * 2 end; return true", cmd1.Lua, "First command Lua should match")
|
||||
assert.Equal(t, []string{"data.json"}, cmd1.Files, "First command files should match")
|
||||
|
||||
// Verify second JSON command
|
||||
cmd2 := commands[1]
|
||||
assert.Equal(t, "JSONObjectUpdate", cmd2.Name, "Second command name should match")
|
||||
assert.True(t, cmd2.JSON, "Second command should have JSON mode enabled")
|
||||
assert.Equal(t, "data.version = '2.0.0'; data.enabled = true; return true", cmd2.Lua, "Second command Lua should match")
|
||||
assert.Equal(t, []string{"config.json"}, cmd2.Files, "Second command files should match")
|
||||
}
|
||||
|
||||
func TestTOMLXMLMode(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "toml-xml-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
tomlContent := `[[commands]]
|
||||
name = "XMLUpdate"
|
||||
xml = true
|
||||
lua = "data.attr.version = '2'; return true"
|
||||
files = ["data.xml"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
commands, _, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, commands, 1)
|
||||
assert.Equal(t, "XMLUpdate", commands[0].Name)
|
||||
assert.True(t, commands[0].XML)
|
||||
assert.Equal(t, []string{"data.xml"}, commands[0].Files)
|
||||
}
|
||||
|
||||
func TestTOMLEndToEndIntegration(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-integration-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create comprehensive TOML content
|
||||
tomlContent := `[variables]
|
||||
multiplier = 4
|
||||
base_value = 100
|
||||
|
||||
[[commands]]
|
||||
name = "IntegrationTest"
|
||||
regex = '''
|
||||
\[kinetics\.stressValues\.v2\.capacity\]
|
||||
|
||||
steam_engine = !num
|
||||
|
||||
water_wheel = !num
|
||||
|
||||
copper_valve_handle = !num'''
|
||||
lua = "v1 * multiplier"
|
||||
files = ["test.txt"]
|
||||
isolate = true
|
||||
|
||||
[[commands]]
|
||||
name = "SimplePattern"
|
||||
regex = "enabled = (true|false)"
|
||||
lua = "= false"
|
||||
files = ["test.txt"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write TOML test file: %v", err)
|
||||
}
|
||||
|
||||
// Create test file that matches the patterns
|
||||
testContent := `[kinetics.stressValues.v2.capacity]
|
||||
|
||||
steam_engine = 256
|
||||
|
||||
water_wheel = 64
|
||||
|
||||
copper_valve_handle = 16
|
||||
|
||||
some_other_setting = enabled = true
|
||||
`
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.txt")
|
||||
err = os.WriteFile(testFile, []byte(testContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write test file: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test the complete workflow using the main function
|
||||
commands, variables, err := utils.LoadCommands([]string{"test.toml"})
|
||||
assert.NoError(t, err, "Should load TOML commands without error")
|
||||
assert.Len(t, commands, 2, "Should load 2 commands")
|
||||
assert.Len(t, variables, 2, "Should load 2 variables")
|
||||
|
||||
// Associate files with commands
|
||||
files := []string{"test.txt"}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
assert.NoError(t, err, "Should associate files with commands")
|
||||
|
||||
// Verify associations
|
||||
association := associations["test.txt"]
|
||||
assert.Len(t, association.IsolateCommands, 1, "Should have 1 isolate command")
|
||||
assert.Len(t, association.Commands, 1, "Should have 1 regular command")
|
||||
assert.Equal(t, "IntegrationTest", association.IsolateCommands[0].Name, "Isolate command should match")
|
||||
assert.Equal(t, "SimplePattern", association.Commands[0].Name, "Regular command should match")
|
||||
|
||||
}
|
||||
|
||||
func TestTOMLErrorHandling(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "toml-error-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test 1: Invalid TOML syntax
|
||||
invalidTOML := `[[commands]]
|
||||
name = "Invalid"
|
||||
regex = "test = !num"
|
||||
lua = "v1 * 2"
|
||||
files = ["test.txt"
|
||||
# Missing closing bracket
|
||||
`
|
||||
|
||||
invalidFile := filepath.Join(tmpDir, "invalid.toml")
|
||||
err = os.WriteFile(invalidFile, []byte(invalidTOML), 0644)
|
||||
assert.NoError(t, err, "Should write invalid TOML file")
|
||||
|
||||
commands, _, err := utils.LoadCommandsFromTomlFiles("invalid.toml")
|
||||
assert.Error(t, err, "Should return error for invalid TOML syntax")
|
||||
assert.Nil(t, commands, "Should return nil commands for invalid TOML")
|
||||
assert.Contains(t, err.Error(), "failed to unmarshal TOML file", "Error should mention TOML unmarshaling")
|
||||
|
||||
// Test 2: Non-existent file
|
||||
commands, _, err = utils.LoadCommandsFromTomlFiles("nonexistent.toml")
|
||||
assert.NoError(t, err, "Should handle non-existent file without error")
|
||||
assert.Empty(t, commands, "Should return empty commands for non-existent file")
|
||||
|
||||
// Test 3: Empty TOML file returns no commands (not an error)
|
||||
emptyFile := filepath.Join(tmpDir, "empty.toml")
|
||||
err = os.WriteFile(emptyFile, []byte(""), 0644)
|
||||
assert.NoError(t, err, "Should write empty TOML file")
|
||||
|
||||
commands, _, err = utils.LoadCommandsFromTomlFiles("empty.toml")
|
||||
assert.NoError(t, err, "Empty TOML should not return error")
|
||||
assert.Empty(t, commands, "Should return empty commands for empty TOML")
|
||||
}
|
||||
|
||||
func TestYAMLToTOMLConversion(t *testing.T) {
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "yaml-to-toml-conversion-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Change to temp directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Create a test YAML file
|
||||
yamlContent := `variables:
|
||||
multiplier: 2.5
|
||||
prefix: "CONV_"
|
||||
|
||||
commands:
|
||||
- name: "ConversionTest"
|
||||
regex: "value = !num"
|
||||
lua: "v1 * 3"
|
||||
files: ["test.txt"]
|
||||
loglevel: DEBUG
|
||||
- name: "AnotherTest"
|
||||
regex: "enabled = (true|false)"
|
||||
lua: "= false"
|
||||
files: ["*.conf"]
|
||||
`
|
||||
|
||||
yamlFile := filepath.Join(tmpDir, "test.yml")
|
||||
err = os.WriteFile(yamlFile, []byte(yamlContent), 0644)
|
||||
assert.NoError(t, err, "Should write YAML test file")
|
||||
|
||||
// Test conversion
|
||||
err = ConvertYAMLToTOML("test.yml")
|
||||
assert.NoError(t, err, "Should convert YAML to TOML without error")
|
||||
|
||||
// Check that TOML file was created
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
_, err = os.Stat(tomlFile)
|
||||
assert.NoError(t, err, "TOML file should exist after conversion")
|
||||
|
||||
// Read and verify TOML content
|
||||
tomlData, err := os.ReadFile(tomlFile)
|
||||
assert.NoError(t, err, "Should read TOML file")
|
||||
|
||||
tomlContent := string(tomlData)
|
||||
assert.Contains(t, tomlContent, `name = "ConversionTest"`, "TOML should contain first command name")
|
||||
assert.Contains(t, tomlContent, `name = "AnotherTest"`, "TOML should contain second command name")
|
||||
assert.Contains(t, tomlContent, `[variables]`, "TOML should contain variables section")
|
||||
assert.Contains(t, tomlContent, `multiplier = 2.5`, "TOML should contain multiplier")
|
||||
assert.Contains(t, tomlContent, `prefix = "CONV_"`, "TOML should contain prefix")
|
||||
|
||||
// Test that converted TOML loads correctly
|
||||
commands, variables, err := utils.LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err, "Should load converted TOML without error")
|
||||
assert.Len(t, commands, 2, "Should load 2 commands from converted TOML")
|
||||
assert.Len(t, variables, 2, "Should have 2 variables")
|
||||
|
||||
// Variables are now loaded separately, not as part of commands
|
||||
|
||||
// Test skip functionality - run conversion again
|
||||
err = ConvertYAMLToTOML("test.yml")
|
||||
assert.NoError(t, err, "Should handle existing TOML file without error")
|
||||
|
||||
// Verify original TOML file wasn't modified
|
||||
originalTomlData, err := os.ReadFile(tomlFile)
|
||||
assert.NoError(t, err, "Should read TOML file again")
|
||||
assert.Equal(t, tomlData, originalTomlData, "TOML file content should be unchanged")
|
||||
|
||||
}
|
||||
701
tui/app.go
Normal file
701
tui/app.go
Normal file
@@ -0,0 +1,701 @@
|
||||
package tui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
)
|
||||
|
||||
type Callbacks struct {
|
||||
RunSelected func([]string) (RunReport, error)
|
||||
RunAll func() (RunReport, error)
|
||||
FormatFile func(string) (string, error)
|
||||
ResetAll func() (string, error)
|
||||
DumpAll func() (string, error)
|
||||
Escape func(bool) (string, error)
|
||||
OnWatchSync func([]string) error
|
||||
OnWatchTick func() (WatchReloadResult, error)
|
||||
}
|
||||
|
||||
type Inputs struct {
|
||||
Rows []Row
|
||||
CookFiles []string
|
||||
Version string
|
||||
WatchEvents <-chan struct{}
|
||||
WatchErrors <-chan error
|
||||
LogEvents <-chan LogEvent
|
||||
WatchEnabled bool
|
||||
InitialMessage string
|
||||
}
|
||||
|
||||
type App struct {
|
||||
cb Callbacks
|
||||
rows []Row
|
||||
idx int
|
||||
rangeMode bool
|
||||
rangeStart int
|
||||
status string
|
||||
summaries []CommandSummary
|
||||
screen tcell.Screen
|
||||
pane int
|
||||
runBusy bool
|
||||
runLabel string
|
||||
fmtBusy bool
|
||||
watchEnabled bool
|
||||
watchBusy bool
|
||||
watchEvents <-chan struct{}
|
||||
watchErrors <-chan error
|
||||
logEvents <-chan LogEvent
|
||||
spinnerFrame int
|
||||
version string
|
||||
expanded map[string]bool
|
||||
logs []string
|
||||
commandLogs map[string][]string
|
||||
logScroll int
|
||||
}
|
||||
|
||||
func New(inputs Inputs, cb Callbacks) *App {
|
||||
a := &App{
|
||||
cb: cb,
|
||||
rows: inputs.Rows,
|
||||
status: inputs.InitialMessage,
|
||||
watchEnabled: inputs.WatchEnabled,
|
||||
watchEvents: inputs.WatchEvents,
|
||||
watchErrors: inputs.WatchErrors,
|
||||
logEvents: inputs.LogEvents,
|
||||
version: inputs.Version,
|
||||
expanded: map[string]bool{},
|
||||
commandLogs: map[string][]string{},
|
||||
}
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowFile {
|
||||
a.expanded[a.rows[i].File] = true
|
||||
}
|
||||
}
|
||||
if len(a.rows) == 0 {
|
||||
a.status = "No entries loaded"
|
||||
}
|
||||
a.logs = append(a.logs, a.status)
|
||||
return a
|
||||
}
|
||||
|
||||
func (a *App) Run() error {
|
||||
s, err := tcell.NewScreen()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := s.Init(); err != nil {
|
||||
return err
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
s.Clear()
|
||||
a.postWatchEventsToScreen(s)
|
||||
a.postLogEventsToScreen(s)
|
||||
a.postSpinnerTicksToScreen(s)
|
||||
|
||||
for {
|
||||
a.draw(s)
|
||||
ev := s.PollEvent()
|
||||
switch event := ev.(type) {
|
||||
case *tcell.EventResize:
|
||||
s.Sync()
|
||||
case *tcell.EventInterrupt:
|
||||
switch data := event.Data().(type) {
|
||||
case string:
|
||||
a.handleInterrupt(data)
|
||||
case LogEvent:
|
||||
a.appendLogEvent(data)
|
||||
case logBatchEvent:
|
||||
a.appendLogEvents(data.events)
|
||||
case formatDoneEvent:
|
||||
a.handleFormatDone(data)
|
||||
case resetDoneEvent:
|
||||
a.handleResetDone(data)
|
||||
case dumpDoneEvent:
|
||||
a.handleDumpDone(data)
|
||||
case runDoneEvent:
|
||||
a.handleRunDone(data)
|
||||
}
|
||||
case *tcell.EventKey:
|
||||
if a.handleKey(event) {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type runDoneEvent struct {
|
||||
kind string
|
||||
report RunReport
|
||||
err error
|
||||
}
|
||||
|
||||
type logBatchEvent struct {
|
||||
events []LogEvent
|
||||
}
|
||||
|
||||
type formatDoneEvent struct {
|
||||
message string
|
||||
err error
|
||||
}
|
||||
|
||||
type resetDoneEvent struct {
|
||||
message string
|
||||
err error
|
||||
}
|
||||
|
||||
type dumpDoneEvent struct {
|
||||
message string
|
||||
err error
|
||||
}
|
||||
|
||||
func (a *App) postWatchEventsToScreen(screen tcell.Screen) {
|
||||
if a.watchEvents != nil {
|
||||
go func() {
|
||||
for range a.watchEvents {
|
||||
screen.PostEventWait(tcell.NewEventInterrupt("watch:tick"))
|
||||
}
|
||||
}()
|
||||
}
|
||||
if a.watchErrors != nil {
|
||||
go func() {
|
||||
for err := range a.watchErrors {
|
||||
screen.PostEventWait(tcell.NewEventInterrupt("watch:err:" + err.Error()))
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) postLogEventsToScreen(screen tcell.Screen) {
|
||||
if a.logEvents == nil {
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
flush := func(batch []LogEvent) []LogEvent {
|
||||
if len(batch) == 0 {
|
||||
return batch
|
||||
}
|
||||
toSend := make([]LogEvent, len(batch))
|
||||
copy(toSend, batch)
|
||||
screen.PostEventWait(tcell.NewEventInterrupt(logBatchEvent{events: toSend}))
|
||||
return batch[:0]
|
||||
}
|
||||
|
||||
batch := make([]LogEvent, 0, 128)
|
||||
ticker := time.NewTicker(33 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case logEvent, ok := <-a.logEvents:
|
||||
if !ok {
|
||||
flush(batch)
|
||||
return
|
||||
}
|
||||
batch = append(batch, logEvent)
|
||||
if len(batch) >= 128 {
|
||||
batch = flush(batch)
|
||||
}
|
||||
case <-ticker.C:
|
||||
batch = flush(batch)
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (a *App) postSpinnerTicksToScreen(screen tcell.Screen) {
|
||||
go func() {
|
||||
ticker := time.NewTicker(120 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
for range ticker.C {
|
||||
screen.PostEventWait(tcell.NewEventInterrupt("spin"))
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (a *App) handleInterrupt(msg string) {
|
||||
if msg == "spin" {
|
||||
a.spinnerFrame = (a.spinnerFrame + 1) % len(spinnerFrames)
|
||||
return
|
||||
}
|
||||
if msg == "watch:tick" {
|
||||
if !a.watchEnabled || a.cb.OnWatchTick == nil || a.runBusy {
|
||||
return
|
||||
}
|
||||
a.logs = append(a.logs, "watch tick")
|
||||
a.watchBusy = true
|
||||
a.setWatchedBusy(true)
|
||||
watchResult, err := a.cb.OnWatchTick()
|
||||
a.watchBusy = false
|
||||
a.setWatchedBusy(false)
|
||||
if err != nil {
|
||||
a.status = "Watch error: " + err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
if len(watchResult.Rows) > 0 {
|
||||
a.rows = watchResult.Rows
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowFile {
|
||||
if _, exists := a.expanded[a.rows[i].File]; !exists {
|
||||
a.expanded[a.rows[i].File] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if a.idx >= len(a.rows) {
|
||||
a.idx = len(a.rows) - 1
|
||||
}
|
||||
if a.idx < 0 {
|
||||
a.idx = 0
|
||||
}
|
||||
}
|
||||
a.summaries = watchResult.Report.Summaries
|
||||
a.applySummaryToRows(watchResult.Report.Summaries)
|
||||
if watchResult.Message != "" {
|
||||
a.status = watchResult.Message
|
||||
} else {
|
||||
a.status = fmt.Sprintf("Watch rerun complete: %d command(s)", watchResult.TriggeredRuns)
|
||||
}
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(msg, "watch:err:") {
|
||||
a.status = "Watcher error: " + strings.TrimPrefix(msg, "watch:err:")
|
||||
a.logs = append(a.logs, a.status)
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) appendLogEvent(event LogEvent) {
|
||||
if event.Line == "" {
|
||||
return
|
||||
}
|
||||
a.logs = append(a.logs, event.Line)
|
||||
if event.CommandID == "" {
|
||||
return
|
||||
}
|
||||
a.commandLogs[event.CommandID] = append(a.commandLogs[event.CommandID], event.Line)
|
||||
}
|
||||
|
||||
func (a *App) appendLogEvents(events []LogEvent) {
|
||||
for i := range events {
|
||||
a.appendLogEvent(events[i])
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) handleRunDone(event runDoneEvent) {
|
||||
a.runBusy = false
|
||||
a.runLabel = ""
|
||||
if event.err != nil {
|
||||
a.status = event.kind + " failed: " + event.err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.summaries = event.report.Summaries
|
||||
a.applySummaryToRows(event.report.Summaries)
|
||||
a.status = fmt.Sprintf("%s: files=%d modified=%d failed=%d elapsed=%dms", event.kind, event.report.Processed, event.report.Modified, event.report.Failed, event.report.ElapsedMs)
|
||||
a.logs = append(a.logs, a.status)
|
||||
}
|
||||
|
||||
func (a *App) setWatchedBusy(busy bool) {
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand && a.rows[i].Watched {
|
||||
a.rows[i].WatchBusy = busy
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) handleKey(event *tcell.EventKey) bool {
|
||||
if len(a.rows) == 0 {
|
||||
return event.Key() == tcell.KeyEscape || event.Rune() == 'q'
|
||||
}
|
||||
|
||||
switch event.Key() {
|
||||
case tcell.KeyUp:
|
||||
if a.pane == 0 {
|
||||
a.move(-1)
|
||||
} else {
|
||||
a.logScrollUp(1)
|
||||
}
|
||||
case tcell.KeyDown:
|
||||
if a.pane == 0 {
|
||||
a.move(1)
|
||||
} else {
|
||||
a.logScrollDown(1)
|
||||
}
|
||||
case tcell.KeyLeft:
|
||||
a.pane = 0
|
||||
case tcell.KeyRight:
|
||||
a.pane = 1
|
||||
case tcell.KeyTAB:
|
||||
a.pane = (a.pane + 1) % 2
|
||||
case tcell.KeyPgUp:
|
||||
if a.pane == 1 {
|
||||
a.logScrollUp(10)
|
||||
}
|
||||
case tcell.KeyPgDn:
|
||||
if a.pane == 1 {
|
||||
a.logScrollDown(10)
|
||||
}
|
||||
case tcell.KeyRune:
|
||||
switch event.Rune() {
|
||||
case 'q':
|
||||
return true
|
||||
case 'k':
|
||||
if a.pane == 0 {
|
||||
a.move(-1)
|
||||
} else {
|
||||
a.logScrollUp(1)
|
||||
}
|
||||
case 'j':
|
||||
if a.pane == 0 {
|
||||
a.move(1)
|
||||
} else {
|
||||
a.logScrollDown(1)
|
||||
}
|
||||
case 'h':
|
||||
if a.pane == 0 {
|
||||
a.toggleExpand(false)
|
||||
}
|
||||
case 'l':
|
||||
if a.pane == 0 {
|
||||
a.toggleExpand(true)
|
||||
}
|
||||
case ' ':
|
||||
if a.pane == 0 {
|
||||
if a.rangeMode {
|
||||
a.applyRangeSelection()
|
||||
} else {
|
||||
a.toggleFocusedSelection()
|
||||
}
|
||||
}
|
||||
case 'v':
|
||||
if a.pane == 0 {
|
||||
a.toggleRangeMode()
|
||||
}
|
||||
case 'a':
|
||||
if a.pane == 0 {
|
||||
a.setAllSelected(true)
|
||||
}
|
||||
case 'c':
|
||||
if a.pane == 0 {
|
||||
a.setAllSelected(false)
|
||||
}
|
||||
case 's':
|
||||
a.runSelected()
|
||||
case 'S':
|
||||
a.runAll()
|
||||
case 'e':
|
||||
a.runEscape(false)
|
||||
case 'E':
|
||||
a.runEscape(true)
|
||||
case 'f':
|
||||
a.runFormatFocused()
|
||||
case 'r':
|
||||
a.runResetAll()
|
||||
case 'd':
|
||||
a.runDumpAll()
|
||||
case 'w':
|
||||
a.toggleSelectedWatch()
|
||||
case 'W':
|
||||
a.toggleSelectedWatch()
|
||||
case 'g':
|
||||
if a.pane == 1 {
|
||||
a.logScroll = 0
|
||||
}
|
||||
case 'G':
|
||||
if a.pane == 1 {
|
||||
a.logScroll = 1 << 30
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *App) runSelected() {
|
||||
if a.cb.RunSelected == nil || a.runBusy {
|
||||
return
|
||||
}
|
||||
ids := a.actionableCommandIDs()
|
||||
if len(ids) == 0 {
|
||||
a.status = "No command selected or focused"
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.beginRun("Run selected", ids, func() (RunReport, error) {
|
||||
return a.cb.RunSelected(ids)
|
||||
})
|
||||
}
|
||||
|
||||
func (a *App) runAll() {
|
||||
if a.cb.RunAll == nil || a.runBusy {
|
||||
return
|
||||
}
|
||||
a.beginRun("Run all", a.allCommandIDs(), a.cb.RunAll)
|
||||
}
|
||||
|
||||
func (a *App) beginRun(kind string, clearCommandIDs []string, fn func() (RunReport, error)) {
|
||||
if a.screen == nil {
|
||||
return
|
||||
}
|
||||
a.clearRunLogsFor(clearCommandIDs)
|
||||
a.runBusy = true
|
||||
a.runLabel = kind
|
||||
a.status = kind + "..."
|
||||
a.logs = append(a.logs, a.status)
|
||||
go func() {
|
||||
report, err := fn()
|
||||
a.screen.PostEventWait(tcell.NewEventInterrupt(runDoneEvent{kind: kind, report: report, err: err}))
|
||||
}()
|
||||
}
|
||||
|
||||
func (a *App) clearRunLogsFor(commandIDs []string) {
|
||||
if len(commandIDs) == 0 {
|
||||
a.logScroll = 0
|
||||
return
|
||||
}
|
||||
for _, id := range commandIDs {
|
||||
delete(a.commandLogs, id)
|
||||
}
|
||||
a.logScroll = 0
|
||||
}
|
||||
|
||||
func (a *App) allCommandIDs() []string {
|
||||
ids := make([]string, 0)
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand {
|
||||
ids = append(ids, a.rows[i].ID)
|
||||
}
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
func (a *App) runEscape(minimize bool) {
|
||||
if a.cb.Escape == nil {
|
||||
return
|
||||
}
|
||||
message, err := a.cb.Escape(minimize)
|
||||
if err != nil {
|
||||
a.status = "Escape failed: " + err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = message
|
||||
a.logs = append(a.logs, message)
|
||||
}
|
||||
|
||||
func (a *App) runFormatFocused() {
|
||||
if a.cb.FormatFile == nil || a.fmtBusy {
|
||||
return
|
||||
}
|
||||
focused := a.focusedRow()
|
||||
if focused == nil || focused.File == "" {
|
||||
a.status = "No focused cook file to format"
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
if a.screen == nil {
|
||||
message, err := a.cb.FormatFile(focused.File)
|
||||
if err != nil {
|
||||
a.status = "Format failed: " + err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = message
|
||||
a.logs = append(a.logs, message)
|
||||
return
|
||||
}
|
||||
path := focused.File
|
||||
a.fmtBusy = true
|
||||
a.status = "Formatting " + path + "..."
|
||||
a.logs = append(a.logs, a.status)
|
||||
go func() {
|
||||
message, err := a.cb.FormatFile(path)
|
||||
a.screen.PostEventWait(tcell.NewEventInterrupt(formatDoneEvent{message: message, err: err}))
|
||||
}()
|
||||
}
|
||||
|
||||
func (a *App) handleFormatDone(event formatDoneEvent) {
|
||||
a.fmtBusy = false
|
||||
if event.err != nil {
|
||||
a.status = "Format failed: " + event.err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = event.message
|
||||
a.logs = append(a.logs, event.message)
|
||||
}
|
||||
|
||||
func (a *App) runResetAll() {
|
||||
if a.cb.ResetAll == nil || a.fmtBusy {
|
||||
return
|
||||
}
|
||||
if a.screen == nil {
|
||||
message, err := a.cb.ResetAll()
|
||||
if err != nil {
|
||||
a.status = "Reset failed: " + err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = message
|
||||
a.logs = append(a.logs, message)
|
||||
return
|
||||
}
|
||||
a.fmtBusy = true
|
||||
a.status = "Resetting files..."
|
||||
a.logs = append(a.logs, a.status)
|
||||
go func() {
|
||||
message, err := a.cb.ResetAll()
|
||||
a.screen.PostEventWait(tcell.NewEventInterrupt(resetDoneEvent{message: message, err: err}))
|
||||
}()
|
||||
}
|
||||
|
||||
func (a *App) handleResetDone(event resetDoneEvent) {
|
||||
a.fmtBusy = false
|
||||
if event.err != nil {
|
||||
a.status = "Reset failed: " + event.err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = event.message
|
||||
a.logs = append(a.logs, event.message)
|
||||
}
|
||||
|
||||
func (a *App) runDumpAll() {
|
||||
if a.cb.DumpAll == nil || a.fmtBusy {
|
||||
return
|
||||
}
|
||||
if a.screen == nil {
|
||||
message, err := a.cb.DumpAll()
|
||||
if err != nil {
|
||||
a.status = "Dump failed: " + err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = message
|
||||
a.logs = append(a.logs, message)
|
||||
return
|
||||
}
|
||||
a.fmtBusy = true
|
||||
a.status = "Clearing snapshots..."
|
||||
a.logs = append(a.logs, a.status)
|
||||
go func() {
|
||||
message, err := a.cb.DumpAll()
|
||||
a.screen.PostEventWait(tcell.NewEventInterrupt(dumpDoneEvent{message: message, err: err}))
|
||||
}()
|
||||
}
|
||||
|
||||
func (a *App) handleDumpDone(event dumpDoneEvent) {
|
||||
a.fmtBusy = false
|
||||
if event.err != nil {
|
||||
a.status = "Dump failed: " + event.err.Error()
|
||||
a.logs = append(a.logs, a.status)
|
||||
return
|
||||
}
|
||||
a.status = event.message
|
||||
a.logs = append(a.logs, event.message)
|
||||
}
|
||||
|
||||
func (a *App) applySummaryToRows(summaries []CommandSummary) {
|
||||
rowsByName := make(map[string]CommandSummary, len(summaries))
|
||||
for _, summary := range summaries {
|
||||
rowsByName[summary.Name] = summary
|
||||
}
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind != RowCommand {
|
||||
continue
|
||||
}
|
||||
summary, exists := rowsByName[a.rows[i].ID]
|
||||
if !exists {
|
||||
continue
|
||||
}
|
||||
a.rows[i].Status = summary.Light
|
||||
a.rows[i].HasStats = true
|
||||
a.rows[i].Seen = summary.Seen
|
||||
a.rows[i].PredSkip = summary.PredSkip
|
||||
a.rows[i].FilesMod = summary.FilesMod
|
||||
a.rows[i].Unchanged = summary.Unchanged
|
||||
a.rows[i].LuaErr = summary.LuaErr
|
||||
a.rows[i].ParseErr = summary.ParseErr
|
||||
a.rows[i].OtherErr = summary.OtherErr
|
||||
a.rows[i].Edits = summary.Edits
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) toggleExpand(expand bool) {
|
||||
if len(a.rows) == 0 || a.idx < 0 || a.idx >= len(a.rows) {
|
||||
return
|
||||
}
|
||||
if a.rows[a.idx].Kind == RowFile {
|
||||
a.expanded[a.rows[a.idx].File] = expand
|
||||
return
|
||||
}
|
||||
if a.rows[a.idx].Kind == RowCommand {
|
||||
a.expanded[a.rows[a.idx].File] = expand
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) rowVisible(idx int) bool {
|
||||
if idx < 0 || idx >= len(a.rows) {
|
||||
return false
|
||||
}
|
||||
if a.rows[idx].Kind == RowFile {
|
||||
return true
|
||||
}
|
||||
return a.expanded[a.rows[idx].File]
|
||||
}
|
||||
|
||||
func (a *App) visibleRows() []int {
|
||||
visible := make([]int, 0, len(a.rows))
|
||||
for i := range a.rows {
|
||||
if a.rowVisible(i) {
|
||||
visible = append(visible, i)
|
||||
}
|
||||
}
|
||||
return visible
|
||||
}
|
||||
|
||||
func (a *App) focusedRow() *Row {
|
||||
if a.idx < 0 || a.idx >= len(a.rows) {
|
||||
return nil
|
||||
}
|
||||
return &a.rows[a.idx]
|
||||
}
|
||||
|
||||
func (a *App) logLinesForFocus() []string {
|
||||
if a.focusedRow() == nil {
|
||||
return a.logs
|
||||
}
|
||||
if a.focusedRow().Kind == RowCommand {
|
||||
return a.commandLogs[a.focusedRow().ID]
|
||||
}
|
||||
if a.focusedRow().Kind == RowFile {
|
||||
lines := make([]string, 0)
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind != RowCommand {
|
||||
continue
|
||||
}
|
||||
if a.rows[i].File != a.focusedRow().File {
|
||||
continue
|
||||
}
|
||||
lines = append(lines, a.commandLogs[a.rows[i].ID]...)
|
||||
}
|
||||
if len(lines) > 0 {
|
||||
return lines
|
||||
}
|
||||
}
|
||||
return a.logs
|
||||
}
|
||||
|
||||
func (a *App) logScrollUp(n int) {
|
||||
a.logScroll += n
|
||||
}
|
||||
|
||||
func (a *App) logScrollDown(n int) {
|
||||
a.logScroll -= n
|
||||
if a.logScroll < 0 {
|
||||
a.logScroll = 0
|
||||
}
|
||||
}
|
||||
275
tui/app_test.go
Normal file
275
tui/app_test.go
Normal file
@@ -0,0 +1,275 @@
|
||||
package tui
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
)
|
||||
|
||||
func TestBeginRunClearsOnlyTargetCommandLogs(t *testing.T) {
|
||||
a := &App{
|
||||
rows: []Row{{ID: "c1", Kind: RowCommand, File: "cook.toml", Label: "cmd"}},
|
||||
logs: []string{"old-1", "old-2"},
|
||||
commandLogs: map[string][]string{
|
||||
"c1": {"old-c1-log"},
|
||||
"c2": {"old-c2-log"},
|
||||
},
|
||||
logScroll: 25,
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
a.beginRun("Run selected", []string{"c1"}, func() (RunReport, error) {
|
||||
return RunReport{}, nil
|
||||
})
|
||||
|
||||
if len(a.logs) != 3 || a.logs[2] != "Run selected..." {
|
||||
t.Fatalf("expected global logs preserved with run status appended, got %#v", a.logs)
|
||||
}
|
||||
if _, ok := a.commandLogs["c1"]; ok {
|
||||
t.Fatalf("expected c1 logs cleared, got %#v", a.commandLogs)
|
||||
}
|
||||
if len(a.commandLogs["c2"]) != 1 || a.commandLogs["c2"][0] != "old-c2-log" {
|
||||
t.Fatalf("expected c2 logs preserved, got %#v", a.commandLogs)
|
||||
}
|
||||
if a.logScroll != 0 {
|
||||
t.Fatalf("expected log scroll reset, got %d", a.logScroll)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleKeySTriggersRunAll(t *testing.T) {
|
||||
called := make(chan struct{}, 1)
|
||||
a := &App{
|
||||
cb: Callbacks{
|
||||
RunAll: func() (RunReport, error) {
|
||||
called <- struct{}{}
|
||||
return RunReport{}, nil
|
||||
},
|
||||
},
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A", Selected: true},
|
||||
{ID: "c2", Kind: RowCommand, File: "cook.toml", Command: "B", Label: "B", Selected: true},
|
||||
},
|
||||
}
|
||||
|
||||
quit := a.handleKey(tcell.NewEventKey(tcell.KeyRune, 'S', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected S key to not quit")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-called:
|
||||
t.Fatalf("expected no run without screen initialized")
|
||||
default:
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
quit = a.handleKey(tcell.NewEventKey(tcell.KeyRune, 'S', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected S key to not quit")
|
||||
}
|
||||
select {
|
||||
case <-called:
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected S key to trigger run all callback")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleKeySFallsBackToFocusedCommand(t *testing.T) {
|
||||
called := make(chan []string, 1)
|
||||
a := &App{
|
||||
cb: Callbacks{
|
||||
RunSelected: func(ids []string) (RunReport, error) {
|
||||
called <- ids
|
||||
return RunReport{}, nil
|
||||
},
|
||||
},
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A"},
|
||||
},
|
||||
expanded: map[string]bool{"cook.toml": true},
|
||||
idx: 1,
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
quit := a.handleKey(tcell.NewEventKey(tcell.KeyRune, 's', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected s key to not quit")
|
||||
}
|
||||
|
||||
select {
|
||||
case ids := <-called:
|
||||
if len(ids) != 1 || ids[0] != "c1" {
|
||||
t.Fatalf("expected focused command id, got %#v", ids)
|
||||
}
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected s key to trigger run selected callback")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleKeyFFormatsFocusedFile(t *testing.T) {
|
||||
called := make(chan string, 1)
|
||||
a := &App{
|
||||
cb: Callbacks{
|
||||
FormatFile: func(path string) (string, error) {
|
||||
called <- path
|
||||
return "formatted", nil
|
||||
},
|
||||
},
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A"},
|
||||
},
|
||||
expanded: map[string]bool{"cook.toml": true},
|
||||
idx: 1,
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
quit := a.handleKey(tcell.NewEventKey(tcell.KeyRune, 'f', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected f key to not quit")
|
||||
}
|
||||
|
||||
select {
|
||||
case p := <-called:
|
||||
if p != "cook.toml" {
|
||||
t.Fatalf("expected focused file path, got %q", p)
|
||||
}
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected f key to trigger format callback")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleKeyRTriggersResetAll(t *testing.T) {
|
||||
called := make(chan struct{}, 1)
|
||||
a := &App{
|
||||
cb: Callbacks{
|
||||
ResetAll: func() (string, error) {
|
||||
called <- struct{}{}
|
||||
return "reset", nil
|
||||
},
|
||||
},
|
||||
rows: []Row{{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"}},
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
quit := a.handleKey(tcell.NewEventKey(tcell.KeyRune, 'r', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected r key to not quit")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-called:
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected r key to trigger reset callback")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleKeyDTriggersDumpAll(t *testing.T) {
|
||||
called := make(chan struct{}, 1)
|
||||
a := &App{
|
||||
cb: Callbacks{
|
||||
DumpAll: func() (string, error) {
|
||||
called <- struct{}{}
|
||||
return "dump", nil
|
||||
},
|
||||
},
|
||||
rows: []Row{{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"}},
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
quit := a.handleKey(tcell.NewEventKey(tcell.KeyRune, 'd', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected d key to not quit")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-called:
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected d key to trigger dump callback")
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleKeySUsesRangeWhenNoSelection(t *testing.T) {
|
||||
called := make(chan []string, 1)
|
||||
a := &App{
|
||||
cb: Callbacks{
|
||||
RunSelected: func(ids []string) (RunReport, error) {
|
||||
called <- ids
|
||||
return RunReport{}, nil
|
||||
},
|
||||
},
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A"},
|
||||
{ID: "c2", Kind: RowCommand, File: "cook.toml", Command: "B", Label: "B"},
|
||||
{ID: "c3", Kind: RowCommand, File: "cook.toml", Command: "C", Label: "C"},
|
||||
},
|
||||
expanded: map[string]bool{"cook.toml": true},
|
||||
idx: 1,
|
||||
}
|
||||
|
||||
s := tcell.NewSimulationScreen("UTF-8")
|
||||
if err := s.Init(); err != nil {
|
||||
t.Fatalf("failed to init simulation screen: %v", err)
|
||||
}
|
||||
defer s.Fini()
|
||||
a.screen = s
|
||||
|
||||
a.toggleRangeMode()
|
||||
a.idx = 3
|
||||
|
||||
quit := a.handleKey(tcell.NewEventKey(tcell.KeyRune, 's', tcell.ModNone))
|
||||
if quit {
|
||||
t.Fatalf("expected s key to not quit")
|
||||
}
|
||||
|
||||
select {
|
||||
case ids := <-called:
|
||||
if len(ids) != 3 {
|
||||
t.Fatalf("expected 3 range commands, got %#v", ids)
|
||||
}
|
||||
if ids[0] != "c1" || ids[1] != "c2" || ids[2] != "c3" {
|
||||
t.Fatalf("unexpected range command order: %#v", ids)
|
||||
}
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("expected s key to trigger range run")
|
||||
}
|
||||
}
|
||||
279
tui/model.go
Normal file
279
tui/model.go
Normal file
@@ -0,0 +1,279 @@
|
||||
package tui
|
||||
|
||||
func (a *App) move(delta int) {
|
||||
if len(a.rows) == 0 {
|
||||
return
|
||||
}
|
||||
visible := a.visibleRows()
|
||||
if len(visible) == 0 {
|
||||
return
|
||||
}
|
||||
current := 0
|
||||
for i, idx := range visible {
|
||||
if idx == a.idx {
|
||||
current = i
|
||||
break
|
||||
}
|
||||
}
|
||||
current += delta
|
||||
if current < 0 {
|
||||
current = 0
|
||||
}
|
||||
if current >= len(visible) {
|
||||
current = len(visible) - 1
|
||||
}
|
||||
a.idx = visible[current]
|
||||
}
|
||||
|
||||
func (a *App) toggleRangeMode() {
|
||||
if len(a.rows) == 0 || a.idx < 0 || a.idx >= len(a.rows) {
|
||||
return
|
||||
}
|
||||
if a.rangeMode {
|
||||
a.rangeMode = false
|
||||
a.status = "Range select cancelled"
|
||||
return
|
||||
}
|
||||
a.rangeMode = true
|
||||
a.rangeStart = a.idx
|
||||
a.status = "Range select started"
|
||||
}
|
||||
|
||||
func (a *App) applyRangeSelection() {
|
||||
if !a.rangeMode {
|
||||
return
|
||||
}
|
||||
start := a.rangeStart
|
||||
end := a.idx
|
||||
if start > end {
|
||||
start, end = end, start
|
||||
}
|
||||
allSelected := true
|
||||
seenCommand := false
|
||||
for i := start; i <= end; i++ {
|
||||
if i < 0 || i >= len(a.rows) || !a.rowVisible(i) {
|
||||
continue
|
||||
}
|
||||
if a.rows[i].Kind == RowFile {
|
||||
for j := range a.rows {
|
||||
if a.rows[j].Kind == RowCommand && a.rows[j].File == a.rows[i].File {
|
||||
seenCommand = true
|
||||
if !a.rows[j].Selected {
|
||||
allSelected = false
|
||||
}
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
seenCommand = true
|
||||
if !a.rows[i].Selected {
|
||||
allSelected = false
|
||||
}
|
||||
}
|
||||
target := true
|
||||
if seenCommand && allSelected {
|
||||
target = false
|
||||
}
|
||||
changed := 0
|
||||
for i := start; i <= end; i++ {
|
||||
if i < 0 || i >= len(a.rows) || !a.rowVisible(i) {
|
||||
continue
|
||||
}
|
||||
if a.rows[i].Kind == RowFile {
|
||||
for j := range a.rows {
|
||||
if a.rows[j].Kind == RowCommand && a.rows[j].File == a.rows[i].File {
|
||||
a.rows[j].Selected = target
|
||||
changed++
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
a.rows[i].Selected = target
|
||||
changed++
|
||||
}
|
||||
a.rangeMode = false
|
||||
if changed == 0 {
|
||||
a.status = "Range selected (no commands)"
|
||||
return
|
||||
}
|
||||
if target {
|
||||
a.status = "Range selected"
|
||||
} else {
|
||||
a.status = "Range unselected"
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) toggleFocusedSelection() {
|
||||
if len(a.rows) == 0 || a.idx >= len(a.rows) {
|
||||
return
|
||||
}
|
||||
r := a.rows[a.idx]
|
||||
if r.Kind == RowFile {
|
||||
target := !a.fileFullySelected(r.File)
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand && a.rows[i].File == r.File {
|
||||
a.rows[i].Selected = target
|
||||
}
|
||||
}
|
||||
if target {
|
||||
a.status = "Selected all commands for file"
|
||||
} else {
|
||||
a.status = "Cleared all commands for file"
|
||||
}
|
||||
return
|
||||
}
|
||||
a.rows[a.idx].Selected = !a.rows[a.idx].Selected
|
||||
}
|
||||
|
||||
func (a *App) fileFullySelected(file string) bool {
|
||||
seen := false
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind != RowCommand || a.rows[i].File != file {
|
||||
continue
|
||||
}
|
||||
seen = true
|
||||
if !a.rows[i].Selected {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return seen
|
||||
}
|
||||
|
||||
func (a *App) setAllSelected(selected bool) {
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand {
|
||||
a.rows[i].Selected = selected
|
||||
}
|
||||
}
|
||||
if selected {
|
||||
a.status = "Selected all commands"
|
||||
} else {
|
||||
a.status = "Cleared all selections"
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) selectedCommandIDs() []string {
|
||||
ids := make([]string, 0)
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand && a.rows[i].Selected {
|
||||
ids = append(ids, a.rows[i].ID)
|
||||
}
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
func (a *App) actionableCommandIDs() []string {
|
||||
ids := a.selectedCommandIDs()
|
||||
if len(ids) > 0 {
|
||||
return ids
|
||||
}
|
||||
if a.rangeMode {
|
||||
ids = a.rangeCommandIDs()
|
||||
if len(ids) > 0 {
|
||||
return ids
|
||||
}
|
||||
}
|
||||
focused := a.focusedRow()
|
||||
if focused != nil && focused.Kind == RowCommand {
|
||||
return []string{focused.ID}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *App) rangeCommandIDs() []string {
|
||||
if !a.rangeMode || len(a.rows) == 0 {
|
||||
return nil
|
||||
}
|
||||
start := a.rangeStart
|
||||
end := a.idx
|
||||
if start > end {
|
||||
start, end = end, start
|
||||
}
|
||||
ids := make([]string, 0)
|
||||
seen := make(map[string]struct{})
|
||||
for i := start; i <= end; i++ {
|
||||
if i < 0 || i >= len(a.rows) || !a.rowVisible(i) {
|
||||
continue
|
||||
}
|
||||
if a.rows[i].Kind == RowFile {
|
||||
for j := range a.rows {
|
||||
if a.rows[j].Kind != RowCommand || a.rows[j].File != a.rows[i].File {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[a.rows[j].ID]; ok {
|
||||
continue
|
||||
}
|
||||
seen[a.rows[j].ID] = struct{}{}
|
||||
ids = append(ids, a.rows[j].ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if a.rows[i].Kind != RowCommand {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[a.rows[i].ID]; ok {
|
||||
continue
|
||||
}
|
||||
seen[a.rows[i].ID] = struct{}{}
|
||||
ids = append(ids, a.rows[i].ID)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
func (a *App) toggleSelectedWatch() {
|
||||
selected := a.actionableCommandIDs()
|
||||
if len(selected) == 0 {
|
||||
a.status = "No command selected or focused"
|
||||
return
|
||||
}
|
||||
|
||||
allWatched := true
|
||||
selectedSet := make(map[string]struct{}, len(selected))
|
||||
for _, id := range selected {
|
||||
selectedSet[id] = struct{}{}
|
||||
}
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind != RowCommand {
|
||||
continue
|
||||
}
|
||||
if _, ok := selectedSet[a.rows[i].ID]; !ok {
|
||||
continue
|
||||
}
|
||||
if !a.rows[i].Watched {
|
||||
allWatched = false
|
||||
break
|
||||
}
|
||||
}
|
||||
target := !allWatched
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind != RowCommand {
|
||||
continue
|
||||
}
|
||||
if _, ok := selectedSet[a.rows[i].ID]; !ok {
|
||||
continue
|
||||
}
|
||||
a.rows[i].Watched = target
|
||||
}
|
||||
a.syncWatchState()
|
||||
}
|
||||
|
||||
func (a *App) syncWatchState() {
|
||||
if a.cb.OnWatchSync == nil {
|
||||
return
|
||||
}
|
||||
ids := make([]string, 0)
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand && a.rows[i].Watched {
|
||||
ids = append(ids, a.rows[i].ID)
|
||||
}
|
||||
}
|
||||
if err := a.cb.OnWatchSync(ids); err != nil {
|
||||
a.status = "Watch sync failed: " + err.Error()
|
||||
return
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
a.status = "Watch cleared"
|
||||
} else {
|
||||
a.status = "Watching commands updated"
|
||||
}
|
||||
}
|
||||
69
tui/model_test.go
Normal file
69
tui/model_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package tui
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestToggleSelectedWatchAffectsSelectedOnly(t *testing.T) {
|
||||
a := &App{
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A", Selected: true},
|
||||
{ID: "c2", Kind: RowCommand, File: "cook.toml", Command: "B", Label: "B", Selected: false},
|
||||
},
|
||||
}
|
||||
|
||||
a.toggleSelectedWatch()
|
||||
|
||||
if !a.rows[1].Watched {
|
||||
t.Fatalf("expected selected command to become watched")
|
||||
}
|
||||
if a.rows[2].Watched {
|
||||
t.Fatalf("expected unselected command to remain unchanged")
|
||||
}
|
||||
|
||||
a.toggleSelectedWatch()
|
||||
if a.rows[1].Watched {
|
||||
t.Fatalf("expected selected command watch to toggle off")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRangeSelectSpaceSelectsRange(t *testing.T) {
|
||||
a := &App{
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A"},
|
||||
{ID: "c2", Kind: RowCommand, File: "cook.toml", Command: "B", Label: "B"},
|
||||
{ID: "c3", Kind: RowCommand, File: "cook.toml", Command: "C", Label: "C"},
|
||||
},
|
||||
expanded: map[string]bool{"cook.toml": true},
|
||||
}
|
||||
|
||||
a.idx = 1
|
||||
a.toggleRangeMode()
|
||||
a.idx = 3
|
||||
a.applyRangeSelection()
|
||||
|
||||
if !a.rows[1].Selected || !a.rows[2].Selected || !a.rows[3].Selected {
|
||||
t.Fatalf("expected rows 1..3 to be selected")
|
||||
}
|
||||
}
|
||||
|
||||
func TestToggleSelectedWatchFallsBackToFocusedCommand(t *testing.T) {
|
||||
a := &App{
|
||||
rows: []Row{
|
||||
{ID: "f", Kind: RowFile, File: "cook.toml", Label: "cook.toml"},
|
||||
{ID: "c1", Kind: RowCommand, File: "cook.toml", Command: "A", Label: "A"},
|
||||
{ID: "c2", Kind: RowCommand, File: "cook.toml", Command: "B", Label: "B"},
|
||||
},
|
||||
expanded: map[string]bool{"cook.toml": true},
|
||||
idx: 2,
|
||||
}
|
||||
|
||||
a.toggleSelectedWatch()
|
||||
|
||||
if a.rows[1].Watched {
|
||||
t.Fatalf("expected unfocused command to remain unchanged")
|
||||
}
|
||||
if !a.rows[2].Watched {
|
||||
t.Fatalf("expected focused command to become watched")
|
||||
}
|
||||
}
|
||||
45
tui/styles.go
Normal file
45
tui/styles.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package tui
|
||||
|
||||
import "github.com/gdamore/tcell/v2"
|
||||
|
||||
var (
|
||||
styleHeader = tcell.StyleDefault.Foreground(tcell.ColorWhite).Bold(true)
|
||||
styleHeaderCol = tcell.StyleDefault.Foreground(tcell.ColorLightCyan).Bold(true)
|
||||
styleDim = tcell.StyleDefault.Foreground(tcell.ColorGray)
|
||||
styleNormal = tcell.StyleDefault.Foreground(tcell.ColorWhite)
|
||||
styleFile = tcell.StyleDefault.Foreground(tcell.ColorLightCyan).Bold(true)
|
||||
styleFooter = tcell.StyleDefault.Foreground(tcell.ColorWhite)
|
||||
styleSummary = tcell.StyleDefault.Foreground(tcell.ColorWhite)
|
||||
styleGreen = tcell.StyleDefault.Foreground(tcell.ColorLime)
|
||||
styleYellow = tcell.StyleDefault.Foreground(tcell.ColorYellow)
|
||||
styleRed = tcell.StyleDefault.Foreground(tcell.ColorRed)
|
||||
styleWatchBlue = tcell.StyleDefault.Foreground(tcell.ColorDeepSkyBlue).Bold(true)
|
||||
styleStatusG = tcell.StyleDefault.Foreground(tcell.ColorLime).Bold(true)
|
||||
styleStatusY = tcell.StyleDefault.Foreground(tcell.ColorYellow).Bold(true)
|
||||
styleStatusR = tcell.StyleDefault.Foreground(tcell.ColorRed).Bold(true)
|
||||
styleStatusNil = tcell.StyleDefault.Foreground(tcell.ColorGray)
|
||||
styleFocusBg = tcell.ColorDarkSlateGray
|
||||
)
|
||||
|
||||
func applyFocus(st tcell.Style, focused bool) tcell.Style {
|
||||
if !focused {
|
||||
return st
|
||||
}
|
||||
return st.Background(styleFocusBg)
|
||||
}
|
||||
|
||||
func styleForRow(r Row) tcell.Style {
|
||||
if r.Watched {
|
||||
return tcell.StyleDefault.Foreground(tcell.ColorDeepSkyBlue)
|
||||
}
|
||||
switch r.Status {
|
||||
case StatusGreen:
|
||||
return tcell.StyleDefault.Foreground(tcell.ColorLime)
|
||||
case StatusYellow:
|
||||
return tcell.StyleDefault.Foreground(tcell.ColorYellow)
|
||||
case StatusRed:
|
||||
return tcell.StyleDefault.Foreground(tcell.ColorRed)
|
||||
default:
|
||||
return styleNormal
|
||||
}
|
||||
}
|
||||
73
tui/types.go
Normal file
73
tui/types.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package tui
|
||||
|
||||
type RowKind int
|
||||
|
||||
const (
|
||||
RowFile RowKind = iota
|
||||
RowCommand
|
||||
)
|
||||
|
||||
type StatusLight int
|
||||
|
||||
const (
|
||||
StatusNone StatusLight = iota
|
||||
StatusGreen
|
||||
StatusYellow
|
||||
StatusRed
|
||||
)
|
||||
|
||||
type Row struct {
|
||||
ID string
|
||||
Kind RowKind
|
||||
File string
|
||||
Command string
|
||||
Label string
|
||||
Selected bool
|
||||
Watched bool
|
||||
WatchBusy bool
|
||||
Status StatusLight
|
||||
HasStats bool
|
||||
Seen int64
|
||||
PredSkip int64
|
||||
FilesMod int64
|
||||
Unchanged int64
|
||||
LuaErr int64
|
||||
ParseErr int64
|
||||
OtherErr int64
|
||||
Edits int64
|
||||
}
|
||||
|
||||
type CommandSummary struct {
|
||||
Name string
|
||||
Seen int64
|
||||
PredSkip int64
|
||||
FilesMod int64
|
||||
Unchanged int64
|
||||
LuaErr int64
|
||||
ParseErr int64
|
||||
OtherErr int64
|
||||
Edits int64
|
||||
Light StatusLight
|
||||
Watched bool
|
||||
Running bool
|
||||
}
|
||||
|
||||
type RunReport struct {
|
||||
Processed int64
|
||||
Failed int64
|
||||
Modified int64
|
||||
ElapsedMs int64
|
||||
Summaries []CommandSummary
|
||||
}
|
||||
|
||||
type WatchReloadResult struct {
|
||||
Rows []Row
|
||||
Report RunReport
|
||||
TriggeredRuns int
|
||||
Message string
|
||||
}
|
||||
|
||||
type LogEvent struct {
|
||||
CommandID string
|
||||
Line string
|
||||
}
|
||||
384
tui/view.go
Normal file
384
tui/view.go
Normal file
@@ -0,0 +1,384 @@
|
||||
package tui
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/gdamore/tcell/v2"
|
||||
)
|
||||
|
||||
var spinnerFrames = []string{"|", "/", "-", "\\"}
|
||||
|
||||
type column struct {
|
||||
name string
|
||||
width int
|
||||
}
|
||||
|
||||
func (a *App) draw(screen tcell.Screen) {
|
||||
screen.Clear()
|
||||
width, height := screen.Size()
|
||||
|
||||
header := "chef tui"
|
||||
if a.runBusy {
|
||||
header += " run:" + spinnerFrames[a.spinnerFrame]
|
||||
if a.runLabel != "" {
|
||||
header += " " + a.runLabel
|
||||
}
|
||||
}
|
||||
if a.version != "" {
|
||||
header += " " + a.version
|
||||
}
|
||||
drawText(screen, 0, 0, styleHeader, trimToWidth(header, width))
|
||||
drawText(screen, 0, 1, styleDim, trimToWidth("keys: tab switch pane | j/k move | h/l collapse/expand | v range | space select | s/S run | f fmt | r reset | d dump | e/E escape | w watch | q quit", width))
|
||||
|
||||
leftWidth := width / 3
|
||||
if leftWidth < 40 {
|
||||
leftWidth = 40
|
||||
}
|
||||
if leftWidth > width-40 {
|
||||
leftWidth = width - 40
|
||||
}
|
||||
rightWidth := width - leftWidth - 1
|
||||
|
||||
top := 3
|
||||
footer := height - 1
|
||||
if footer <= top {
|
||||
footer = top + 1
|
||||
}
|
||||
bodyHeight := footer - top
|
||||
|
||||
a.drawLeftPane(screen, 0, top, leftWidth, bodyHeight)
|
||||
a.drawRightPane(screen, leftWidth+1, top, rightWidth, bodyHeight)
|
||||
|
||||
for y := top; y < top+bodyHeight; y++ {
|
||||
screen.SetContent(leftWidth, y, '|', nil, styleDim)
|
||||
}
|
||||
|
||||
drawText(screen, 0, footer, styleFooter, trimToWidth(a.status, width))
|
||||
screen.Show()
|
||||
}
|
||||
|
||||
func (a *App) drawLeftPane(screen tcell.Screen, x int, y int, width int, height int) {
|
||||
if width <= 0 || height <= 0 {
|
||||
return
|
||||
}
|
||||
leftStyle := styleHeaderCol
|
||||
if a.pane == 0 {
|
||||
leftStyle = applyFocus(styleHeaderCol, true)
|
||||
}
|
||||
drawCell(screen, x, y, width, " Cook Files / Commands ", leftStyle)
|
||||
|
||||
cols := []column{
|
||||
{name: "S", width: 1},
|
||||
{name: "Name", width: max(20, width-1-1-7-1-5-1)},
|
||||
{name: "St", width: 7},
|
||||
{name: "W", width: 5},
|
||||
}
|
||||
drawColumnsHeader(screen, x, y+1, width, cols)
|
||||
|
||||
visible := a.visibleRows()
|
||||
rowsHeight := height - 2
|
||||
if rowsHeight < 1 {
|
||||
return
|
||||
}
|
||||
|
||||
focused := 0
|
||||
for i, idx := range visible {
|
||||
if idx == a.idx {
|
||||
focused = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
start := 0
|
||||
if focused >= rowsHeight {
|
||||
start = focused - rowsHeight + 1
|
||||
}
|
||||
end := start + rowsHeight
|
||||
if end > len(visible) {
|
||||
end = len(visible)
|
||||
}
|
||||
|
||||
for rowY, i := y+2, start; i < end; i, rowY = i+1, rowY+1 {
|
||||
a.drawTreeRow(screen, x, rowY, width, cols, visible[i] == a.idx, a.rows[visible[i]])
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) drawTreeRow(screen tcell.Screen, x int, y int, width int, cols []column, focused bool, row Row) {
|
||||
inRange := false
|
||||
if a.rangeMode {
|
||||
start := a.rangeStart
|
||||
end := a.idx
|
||||
if start > end {
|
||||
start, end = end, start
|
||||
}
|
||||
for i := start; i <= end; i++ {
|
||||
if i >= 0 && i < len(a.rows) && a.rows[i].ID == row.ID {
|
||||
inRange = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if focused {
|
||||
drawCell(screen, x, y, width, "", applyFocus(styleNormal, true))
|
||||
} else if inRange {
|
||||
drawCell(screen, x, y, width, "", styleDim.Background(tcell.ColorDarkSlateGray))
|
||||
}
|
||||
|
||||
sel := ""
|
||||
if row.Kind == RowCommand {
|
||||
if row.Selected {
|
||||
sel = "x"
|
||||
}
|
||||
} else {
|
||||
all := a.fileFullySelected(row.File)
|
||||
any := false
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind == RowCommand && a.rows[i].File == row.File && a.rows[i].Selected {
|
||||
any = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if all {
|
||||
sel = "x"
|
||||
} else if any {
|
||||
sel = "."
|
||||
}
|
||||
}
|
||||
name := row.Label
|
||||
if row.Kind == RowCommand {
|
||||
name = " " + name
|
||||
} else {
|
||||
if a.expanded[row.File] {
|
||||
name = "▾ " + name
|
||||
} else {
|
||||
name = "▸ " + name
|
||||
}
|
||||
}
|
||||
|
||||
status := "·"
|
||||
if row.Kind == RowCommand {
|
||||
switch row.Status {
|
||||
case StatusGreen, StatusYellow, StatusRed:
|
||||
status = "●"
|
||||
}
|
||||
}
|
||||
|
||||
watch := "-"
|
||||
if row.Watched {
|
||||
watch = "●"
|
||||
if row.WatchBusy {
|
||||
watch = spinnerFrames[a.spinnerFrame]
|
||||
}
|
||||
}
|
||||
|
||||
base := styleNormal
|
||||
if row.Kind == RowFile {
|
||||
base = styleFile
|
||||
}
|
||||
if focused {
|
||||
base = applyFocus(base, true)
|
||||
} else if inRange {
|
||||
base = base.Background(tcell.ColorDarkSlateGray)
|
||||
}
|
||||
|
||||
cx := x
|
||||
drawCell(screen, cx, y, cols[0].width, sel, base)
|
||||
cx += cols[0].width + 1
|
||||
drawCell(screen, cx, y, cols[1].width, name, base)
|
||||
cx += cols[1].width + 1
|
||||
drawCell(screen, cx, y, cols[2].width, status, applyFocus(statusStyle(row), focused))
|
||||
cx += cols[2].width + 1
|
||||
drawCell(screen, cx, y, cols[3].width, watch, applyFocus(watchStyle(row), focused))
|
||||
}
|
||||
|
||||
func (a *App) drawRightPane(screen tcell.Screen, x int, y int, width int, height int) {
|
||||
if width <= 0 || height <= 0 {
|
||||
return
|
||||
}
|
||||
rightStyle := styleHeaderCol
|
||||
if a.pane == 1 {
|
||||
rightStyle = applyFocus(styleHeaderCol, true)
|
||||
}
|
||||
title := " Logs "
|
||||
if a.focusedRow() != nil {
|
||||
title = " Logs: " + a.focusedRow().Label + " "
|
||||
}
|
||||
drawCell(screen, x, y, width, title, rightStyle)
|
||||
|
||||
lines := a.logLinesForFocus()
|
||||
linesHeight := height - 1
|
||||
if linesHeight < 1 {
|
||||
return
|
||||
}
|
||||
reverseWrapped := make([]string, 0, linesHeight+a.logScroll+8)
|
||||
target := linesHeight + a.logScroll
|
||||
for li := len(lines) - 1; li >= 0; li-- {
|
||||
parts := wrapLine(lines[li], width)
|
||||
if len(parts) == 0 {
|
||||
reverseWrapped = append(reverseWrapped, "")
|
||||
} else {
|
||||
for pi := len(parts) - 1; pi >= 0; pi-- {
|
||||
reverseWrapped = append(reverseWrapped, parts[pi])
|
||||
}
|
||||
}
|
||||
if len(reverseWrapped) >= target {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
maxScroll := len(reverseWrapped) - linesHeight
|
||||
if maxScroll < 0 {
|
||||
maxScroll = 0
|
||||
}
|
||||
if a.logScroll > maxScroll {
|
||||
a.logScroll = maxScroll
|
||||
}
|
||||
|
||||
segStart := a.logScroll
|
||||
segEnd := segStart + linesHeight
|
||||
if segEnd > len(reverseWrapped) {
|
||||
segEnd = len(reverseWrapped)
|
||||
}
|
||||
|
||||
rowY := y + 1
|
||||
for i := segEnd - 1; i >= segStart && rowY < y+height; i-- {
|
||||
drawLogLine(screen, x, rowY, width, reverseWrapped[i])
|
||||
rowY++
|
||||
}
|
||||
}
|
||||
|
||||
func wrapLine(line string, width int) []string {
|
||||
if width <= 0 {
|
||||
return nil
|
||||
}
|
||||
if isTableLine(line) {
|
||||
return []string{trimToWidth(line, width)}
|
||||
}
|
||||
runes := []rune(line)
|
||||
if len(runes) == 0 {
|
||||
return []string{""}
|
||||
}
|
||||
out := make([]string, 0, (len(runes)/width)+1)
|
||||
for len(runes) > width {
|
||||
out = append(out, string(runes[:width]))
|
||||
runes = runes[width:]
|
||||
}
|
||||
out = append(out, string(runes))
|
||||
return out
|
||||
}
|
||||
|
||||
func isTableLine(line string) bool {
|
||||
if line == "" {
|
||||
return false
|
||||
}
|
||||
if !(strings.HasPrefix(line, "+") || strings.HasPrefix(line, "|")) {
|
||||
return false
|
||||
}
|
||||
return strings.Count(line, "|") >= 3 || strings.Count(line, "+") >= 3
|
||||
}
|
||||
|
||||
func drawLogLine(screen tcell.Screen, x int, y int, width int, line string) {
|
||||
drawCell(screen, x, y, width, line, styleNormal)
|
||||
tagStyle := styleNormal
|
||||
tag := ""
|
||||
if strings.Contains(line, "[LUA]") {
|
||||
tag = "[LUA]"
|
||||
tagStyle = styleWatchBlue
|
||||
} else if strings.Contains(line, "[INFO]") {
|
||||
tag = "[INFO]"
|
||||
tagStyle = styleGreen
|
||||
} else if strings.Contains(line, "[WARNING]") {
|
||||
tag = "[WARNING]"
|
||||
tagStyle = styleYellow
|
||||
} else if strings.Contains(line, "[ERROR]") {
|
||||
tag = "[ERROR]"
|
||||
tagStyle = styleRed
|
||||
}
|
||||
if tag == "" {
|
||||
return
|
||||
}
|
||||
start := strings.Index(line, tag)
|
||||
if start < 0 || start >= width {
|
||||
return
|
||||
}
|
||||
runes := []rune(tag)
|
||||
for i := 0; i < len(runes) && start+i < width; i++ {
|
||||
screen.SetContent(x+start+i, y, runes[i], nil, tagStyle)
|
||||
}
|
||||
}
|
||||
|
||||
func drawColumnsHeader(screen tcell.Screen, x int, y int, width int, cols []column) {
|
||||
cx := x
|
||||
for _, col := range cols {
|
||||
if cx >= x+width {
|
||||
return
|
||||
}
|
||||
drawCell(screen, cx, y, col.width, col.name, styleHeaderCol)
|
||||
cx += col.width + 1
|
||||
}
|
||||
}
|
||||
|
||||
func statusStyle(row Row) tcell.Style {
|
||||
s := styleStatusNil
|
||||
switch row.Status {
|
||||
case StatusGreen:
|
||||
s = styleStatusG
|
||||
case StatusYellow:
|
||||
s = styleStatusY
|
||||
case StatusRed:
|
||||
s = styleStatusR
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func watchStyle(row Row) tcell.Style {
|
||||
if row.Watched {
|
||||
return styleWatchBlue
|
||||
}
|
||||
return styleDim
|
||||
}
|
||||
|
||||
func drawCell(screen tcell.Screen, x int, y int, width int, text string, style tcell.Style) {
|
||||
if width <= 0 {
|
||||
return
|
||||
}
|
||||
runes := []rune(text)
|
||||
if len(runes) > width {
|
||||
runes = runes[:width]
|
||||
}
|
||||
for i := 0; i < width; i++ {
|
||||
ch := ' '
|
||||
if i < len(runes) {
|
||||
ch = runes[i]
|
||||
}
|
||||
screen.SetContent(x+i, y, ch, nil, style)
|
||||
}
|
||||
}
|
||||
|
||||
func drawText(screen tcell.Screen, x int, y int, style tcell.Style, text string) {
|
||||
for i, r := range text {
|
||||
screen.SetContent(x+i, y, r, nil, style)
|
||||
}
|
||||
}
|
||||
|
||||
func trimToWidth(s string, w int) string {
|
||||
if w <= 0 {
|
||||
return ""
|
||||
}
|
||||
r := []rune(s)
|
||||
if len(r) <= w {
|
||||
return s
|
||||
}
|
||||
if w <= 1 {
|
||||
return string(r[:w])
|
||||
}
|
||||
return string(r[:w-1])
|
||||
}
|
||||
|
||||
func max(a int, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
108
tui/watch.go
Normal file
108
tui/watch.go
Normal file
@@ -0,0 +1,108 @@
|
||||
package tui
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type fileStamp struct {
|
||||
exists bool
|
||||
hash string
|
||||
}
|
||||
|
||||
func StartCookWatcher(files []string, interval time.Duration) (<-chan struct{}, <-chan error, func(), error) {
|
||||
if len(files) == 0 {
|
||||
return nil, nil, nil, fmt.Errorf("no cook files to watch")
|
||||
}
|
||||
if interval <= 0 {
|
||||
interval = 500 * time.Millisecond
|
||||
}
|
||||
|
||||
normalized := make([]string, 0, len(files))
|
||||
seen := make(map[string]struct{}, len(files))
|
||||
for _, f := range files {
|
||||
nf, err := normalizePath(f)
|
||||
if err != nil {
|
||||
return nil, nil, nil, fmt.Errorf("normalize watch file %q: %w", f, err)
|
||||
}
|
||||
if _, ok := seen[nf]; ok {
|
||||
continue
|
||||
}
|
||||
seen[nf] = struct{}{}
|
||||
normalized = append(normalized, nf)
|
||||
}
|
||||
|
||||
stamps := make(map[string]fileStamp, len(normalized))
|
||||
for _, f := range normalized {
|
||||
stamps[f] = statStamp(f)
|
||||
}
|
||||
|
||||
events := make(chan struct{}, 1)
|
||||
errs := make(chan error, 8)
|
||||
stopCh := make(chan struct{})
|
||||
var once sync.Once
|
||||
|
||||
go func() {
|
||||
defer close(events)
|
||||
defer close(errs)
|
||||
|
||||
ticker := time.NewTicker(interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-stopCh:
|
||||
return
|
||||
case <-ticker.C:
|
||||
changed := false
|
||||
for _, f := range normalized {
|
||||
next := statStamp(f)
|
||||
prev := stamps[f]
|
||||
if next != prev {
|
||||
stamps[f] = next
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
if changed {
|
||||
select {
|
||||
case events <- struct{}{}:
|
||||
default:
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
stop := func() {
|
||||
once.Do(func() { close(stopCh) })
|
||||
}
|
||||
|
||||
return events, errs, stop, nil
|
||||
}
|
||||
|
||||
func statStamp(path string) fileStamp {
|
||||
b, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return fileStamp{exists: false}
|
||||
}
|
||||
sum := sha1.Sum(b)
|
||||
return fileStamp{exists: true, hash: fmt.Sprintf("%x", sum[:])}
|
||||
}
|
||||
|
||||
func normalizePath(path string) (string, error) {
|
||||
abs, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
abs = filepath.Clean(abs)
|
||||
if runtime.GOOS == "windows" {
|
||||
abs = strings.ToLower(abs)
|
||||
}
|
||||
return abs, nil
|
||||
}
|
||||
34
tui/watch_test.go
Normal file
34
tui/watch_test.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package tui
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestStartCookWatcherPollsFileChanges(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "cook.yml")
|
||||
if err := os.WriteFile(path, []byte("lua: 'a'\n"), 0644); err != nil {
|
||||
t.Fatalf("write initial file: %v", err)
|
||||
}
|
||||
|
||||
events, errs, stop, err := StartCookWatcher([]string{path}, 100*time.Millisecond)
|
||||
if err != nil {
|
||||
t.Fatalf("start watcher: %v", err)
|
||||
}
|
||||
defer stop()
|
||||
|
||||
if err := os.WriteFile(path, []byte("lua: 'b'\n"), 0644); err != nil {
|
||||
t.Fatalf("write updated file: %v", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case <-events:
|
||||
case err := <-errs:
|
||||
t.Fatalf("watcher error: %v", err)
|
||||
case <-time.After(2 * time.Second):
|
||||
t.Fatalf("expected polling watcher event")
|
||||
}
|
||||
}
|
||||
676
tui_cmd.go
Normal file
676
tui_cmd.go
Normal file
@@ -0,0 +1,676 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"cook/processor"
|
||||
"cook/tui"
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type tuiEntry struct {
|
||||
ID string
|
||||
File string
|
||||
Command utils.ModifyCommand
|
||||
IsIsolate bool
|
||||
}
|
||||
|
||||
type tuiAdapter struct {
|
||||
args []string
|
||||
parallel int
|
||||
filter string
|
||||
db utils.DB
|
||||
logEvents chan<- tui.LogEvent
|
||||
rows []tui.Row
|
||||
entries map[string]tuiEntry
|
||||
fingerprints map[string]string
|
||||
watched map[string]bool
|
||||
cookFiles []string
|
||||
}
|
||||
|
||||
var ansiEscape = regexp.MustCompile(`\x1b\[[0-9;?]*[ -/]*[@-~]`)
|
||||
var commandField = regexp.MustCompile(`\bcommand=("[^"]+"|'[^']+'|[^\s]+)`)
|
||||
var commandFieldAlt = regexp.MustCompile(`\bcommand:("[^"]+"|'[^']+'|[^\s]+)`)
|
||||
|
||||
func newTUICmd() *cobra.Command {
|
||||
var parallel int
|
||||
var filter string
|
||||
var debugLog bool
|
||||
cmd := &cobra.Command{
|
||||
Use: "tui <cook_files...>",
|
||||
Short: "Open interactive TUI for selecting and running commands",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
logEvents := make(chan tui.LogEvent, 4096)
|
||||
logReader, logWriter := io.Pipe()
|
||||
defer logWriter.Close()
|
||||
go func() {
|
||||
scanner := bufio.NewScanner(logReader)
|
||||
for scanner.Scan() {
|
||||
line := sanitizeLogLine(scanner.Text())
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
commandID := parseCommandID(line)
|
||||
select {
|
||||
case logEvents <- tui.LogEvent{Line: line, CommandID: commandID}:
|
||||
default:
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
if debugLog {
|
||||
file, err := os.Create("chef.log")
|
||||
if err != nil {
|
||||
return fmt.Errorf("open chef.log: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
logger.Default = logger.New(io.MultiWriter(logWriter, file), "", log.Lmicroseconds|log.Lshortfile)
|
||||
logger.SetLevel(logger.ParseLevel("TRACE"))
|
||||
} else {
|
||||
logger.Default = logger.New(logWriter, "", log.Lmicroseconds|log.Lshortfile)
|
||||
logLevel, err := cmd.Flags().GetString("loglevel")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
logger.SetLevel(logger.ParseLevel(logLevel))
|
||||
}
|
||||
|
||||
db, err := utils.GetDB()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get database: %w", err)
|
||||
}
|
||||
|
||||
adapter := &tuiAdapter{
|
||||
args: args,
|
||||
parallel: parallel,
|
||||
filter: filter,
|
||||
db: db,
|
||||
logEvents: logEvents,
|
||||
entries: make(map[string]tuiEntry),
|
||||
fingerprints: make(map[string]string),
|
||||
watched: make(map[string]bool),
|
||||
}
|
||||
|
||||
if err := adapter.reloadRows(false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
watchEvents, watchErrors, stopWatch, err := tui.StartCookWatcher(adapter.cookFiles, 500*time.Millisecond)
|
||||
watchEnabled := true
|
||||
initialMessage := fmt.Sprintf("Loaded %d entries from %d cook file(s)", len(adapter.entries), len(adapter.cookFiles))
|
||||
if err != nil {
|
||||
watchEnabled = false
|
||||
initialMessage = "Watcher disabled: " + err.Error()
|
||||
}
|
||||
if stopWatch != nil {
|
||||
defer stopWatch()
|
||||
}
|
||||
|
||||
app := tui.New(tui.Inputs{
|
||||
Rows: adapter.rows,
|
||||
CookFiles: adapter.cookFiles,
|
||||
Version: VersionString(),
|
||||
WatchEvents: watchEvents,
|
||||
WatchErrors: watchErrors,
|
||||
LogEvents: logEvents,
|
||||
WatchEnabled: watchEnabled,
|
||||
InitialMessage: initialMessage,
|
||||
}, tui.Callbacks{
|
||||
RunSelected: adapter.runSelected,
|
||||
RunAll: adapter.runAll,
|
||||
FormatFile: adapter.runFmt,
|
||||
ResetAll: adapter.runResetAll,
|
||||
DumpAll: adapter.runDumpAll,
|
||||
Escape: adapter.runEscape,
|
||||
OnWatchSync: adapter.syncWatched,
|
||||
OnWatchTick: adapter.onWatchTick,
|
||||
})
|
||||
|
||||
if err := app.Run(); err != nil {
|
||||
return fmt.Errorf("tui run failed: %w", err)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmd.Flags().IntVarP(¶llel, "parallel", "P", 100, "Number of files to process in parallel")
|
||||
cmd.Flags().StringVarP(&filter, "filter", "f", "", "Filter commands before loading")
|
||||
cmd.Flags().BoolVarP(&debugLog, "debug-log", "d", false, "Tee logs to chef.log for debugging")
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runEscape(minimize bool) (string, error) {
|
||||
inLen, outLen, err := runEscapeClipboard(minimize)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("Escape done (min=%t): in=%d out=%d", minimize, inLen, outLen), nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runFmt(path string) (string, error) {
|
||||
flog := logger.WithPrefix(path)
|
||||
if err := formatLuaBlocksInTOML(path, flog); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return "Formatted " + path, nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runResetAll() (string, error) {
|
||||
if err := resetAllFiles(a.db); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return "Reset all files from snapshots", nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runDumpAll() (string, error) {
|
||||
if err := removeAllFiles(a.db); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return "Cleared all snapshots", nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) syncWatched(ids []string) error {
|
||||
a.watched = make(map[string]bool, len(ids))
|
||||
for _, id := range ids {
|
||||
a.watched[id] = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) emit(line string, commandID string) {
|
||||
if a.logEvents == nil {
|
||||
return
|
||||
}
|
||||
line = sanitizeLogLine(line)
|
||||
if line == "" {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case a.logEvents <- tui.LogEvent{Line: line, CommandID: commandID}:
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
func sanitizeLogLine(line string) string {
|
||||
line = ansiEscape.ReplaceAllString(line, "")
|
||||
return strings.TrimRight(line, "\r")
|
||||
}
|
||||
|
||||
func parseCommandID(line string) string {
|
||||
clean := ansiEscape.ReplaceAllString(line, "")
|
||||
m := commandField.FindStringSubmatch(clean)
|
||||
if len(m) < 2 {
|
||||
m = commandFieldAlt.FindStringSubmatch(clean)
|
||||
}
|
||||
if len(m) < 2 {
|
||||
return ""
|
||||
}
|
||||
return strings.Trim(m[1], `"'`)
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) onWatchTick() (tui.WatchReloadResult, error) {
|
||||
old := make(map[string]string, len(a.fingerprints))
|
||||
for k, v := range a.fingerprints {
|
||||
old[k] = v
|
||||
}
|
||||
|
||||
if err := a.reloadRows(true); err != nil {
|
||||
return tui.WatchReloadResult{}, err
|
||||
}
|
||||
|
||||
watchedIDs := make([]string, 0)
|
||||
for id := range a.watched {
|
||||
newDump, ok := a.fingerprints[id]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
oldDump, hadOld := old[id]
|
||||
if !hadOld || oldDump != newDump {
|
||||
watchedIDs = append(watchedIDs, id)
|
||||
}
|
||||
}
|
||||
if len(watchedIDs) == 0 {
|
||||
return tui.WatchReloadResult{
|
||||
Rows: a.rows,
|
||||
Message: "Watch tick: no watched command changed",
|
||||
}, nil
|
||||
}
|
||||
|
||||
sort.Strings(watchedIDs)
|
||||
report, err := a.runByIDs(watchedIDs)
|
||||
if err != nil {
|
||||
return tui.WatchReloadResult{}, err
|
||||
}
|
||||
markWatchedAndRunning(report.Summaries, a.watched, watchedIDs)
|
||||
return tui.WatchReloadResult{
|
||||
Rows: a.rows,
|
||||
Report: report,
|
||||
TriggeredRuns: len(watchedIDs),
|
||||
Message: fmt.Sprintf("Watch event: reran %d watched command(s)", len(watchedIDs)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runSelected(ids []string) (tui.RunReport, error) {
|
||||
report, err := a.runByIDs(ids)
|
||||
if err != nil {
|
||||
return tui.RunReport{}, err
|
||||
}
|
||||
markWatchedAndRunning(report.Summaries, a.watched, nil)
|
||||
return report, nil
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runAll() (tui.RunReport, error) {
|
||||
ids := make([]string, 0, len(a.entries))
|
||||
for id := range a.entries {
|
||||
ids = append(ids, id)
|
||||
}
|
||||
sort.Strings(ids)
|
||||
report, err := a.runByIDs(ids)
|
||||
if err != nil {
|
||||
return tui.RunReport{}, err
|
||||
}
|
||||
markWatchedAndRunning(report.Summaries, a.watched, nil)
|
||||
return report, nil
|
||||
}
|
||||
|
||||
func markWatchedAndRunning(summaries []tui.CommandSummary, watched map[string]bool, running []string) {
|
||||
runningByID := make(map[string]bool)
|
||||
for _, id := range running {
|
||||
runningByID[id] = true
|
||||
}
|
||||
for i := range summaries {
|
||||
summaries[i].Watched = watched[summaries[i].Name]
|
||||
summaries[i].Running = runningByID[summaries[i].Name]
|
||||
}
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) runByIDs(ids []string) (tui.RunReport, error) {
|
||||
if len(ids) == 0 {
|
||||
return tui.RunReport{}, nil
|
||||
}
|
||||
uniqueIDs := make([]string, 0, len(ids))
|
||||
seen := make(map[string]struct{}, len(ids))
|
||||
for _, id := range ids {
|
||||
if _, ok := seen[id]; ok {
|
||||
continue
|
||||
}
|
||||
seen[id] = struct{}{}
|
||||
if _, ok := a.entries[id]; ok {
|
||||
uniqueIDs = append(uniqueIDs, id)
|
||||
}
|
||||
}
|
||||
|
||||
if len(uniqueIDs) == 0 {
|
||||
return tui.RunReport{}, nil
|
||||
}
|
||||
|
||||
originalLogger := logger.Default
|
||||
originalLevel := logger.GetLevel()
|
||||
defer func() {
|
||||
logger.Default = originalLogger
|
||||
logger.SetLevel(originalLevel)
|
||||
}()
|
||||
|
||||
start := time.Now()
|
||||
runtimeNameToRowID := make(map[string]string, len(uniqueIDs))
|
||||
runtimeNameToDisplay := make(map[string]string, len(uniqueIDs))
|
||||
commands := make([]utils.ModifyCommand, 0, len(uniqueIDs))
|
||||
for i, id := range uniqueIDs {
|
||||
entry := a.entries[id]
|
||||
cmd := entry.Command
|
||||
runtimeName := fmt.Sprintf("__tui_%d", i+1)
|
||||
cmd.Name = runtimeName
|
||||
commands = append(commands, cmd)
|
||||
runtimeNameToRowID[runtimeName] = id
|
||||
displayName := entry.Command.Name
|
||||
if displayName == "" {
|
||||
displayName = "<unnamed>"
|
||||
}
|
||||
runtimeNameToDisplay[runtimeName] = displayName
|
||||
}
|
||||
|
||||
buffer := bytes.NewBuffer(nil)
|
||||
logger.Default = logger.New(buffer, "", log.Lmicroseconds|log.Lshortfile)
|
||||
logger.SetLevel(originalLevel)
|
||||
|
||||
resetRuntimeStats()
|
||||
globs := utils.AggregateGlobs(commands)
|
||||
files, err := utils.ExpandGlobs(globs)
|
||||
if err != nil {
|
||||
return tui.RunReport{}, fmt.Errorf("failed to expand globs: %w", err)
|
||||
}
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
if err != nil {
|
||||
return tui.RunReport{}, fmt.Errorf("failed to associate files with commands: %w", err)
|
||||
}
|
||||
if err := utils.ResetWhereNecessary(associations, a.db); err != nil {
|
||||
return tui.RunReport{}, fmt.Errorf("failed pre-run reset: %w", err)
|
||||
}
|
||||
|
||||
commandLoggers := buildCommandLoggers(commands)
|
||||
processAssociations(a.parallel, associations, a.db, commandLoggers)
|
||||
logRunSummary(time.Since(start))
|
||||
|
||||
rows := commandSummaryRows()
|
||||
commandSummaries := toTUISummaries(rows)
|
||||
for i := range commandSummaries {
|
||||
if rowID, ok := runtimeNameToRowID[commandSummaries[i].Name]; ok {
|
||||
commandSummaries[i].Name = rowID
|
||||
}
|
||||
}
|
||||
|
||||
displayRows := make([]utils.CommandSummaryRow, len(rows))
|
||||
copy(displayRows, rows)
|
||||
for i := range displayRows {
|
||||
if displayName, ok := runtimeNameToDisplay[displayRows[i].Name]; ok {
|
||||
displayRows[i].Name = displayName
|
||||
}
|
||||
}
|
||||
utils.RenderCommandSummaryTable(buffer, displayRows)
|
||||
|
||||
processed := atomic.LoadInt64(&stats.ProcessedFiles)
|
||||
failed := atomic.LoadInt64(&stats.FailedFiles)
|
||||
modified := atomic.LoadInt64(&stats.TotalModifications)
|
||||
|
||||
lastCommandID := ""
|
||||
for _, line := range strings.Split(buffer.String(), "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
runtimeName := parseCommandID(line)
|
||||
commandID := runtimeNameToRowID[runtimeName]
|
||||
if commandID != "" {
|
||||
lastCommandID = commandID
|
||||
} else if lastCommandID != "" {
|
||||
commandID = lastCommandID
|
||||
} else if len(uniqueIDs) == 1 {
|
||||
commandID = uniqueIDs[0]
|
||||
} else {
|
||||
for _, id := range uniqueIDs {
|
||||
a.emit(line, id)
|
||||
}
|
||||
continue
|
||||
}
|
||||
a.emit(line, commandID)
|
||||
}
|
||||
|
||||
return tui.RunReport{
|
||||
Processed: processed,
|
||||
Failed: failed,
|
||||
Modified: modified,
|
||||
ElapsedMs: time.Since(start).Milliseconds(),
|
||||
Summaries: commandSummaries,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func toTUISummaries(rows []utils.CommandSummaryRow) []tui.CommandSummary {
|
||||
out := make([]tui.CommandSummary, 0, len(rows))
|
||||
for _, r := range rows {
|
||||
light := tui.StatusYellow
|
||||
hasErrors := (r.LuaErr + r.ParseErr + r.OtherErr) > 0
|
||||
if hasErrors {
|
||||
light = tui.StatusRed
|
||||
} else if r.FilesModified > 0 {
|
||||
light = tui.StatusGreen
|
||||
}
|
||||
out = append(out, tui.CommandSummary{
|
||||
Name: r.Name,
|
||||
Seen: r.Seen,
|
||||
PredSkip: r.PredSkip,
|
||||
FilesMod: r.FilesModified,
|
||||
Unchanged: r.FilesUnchanged,
|
||||
LuaErr: r.LuaErr,
|
||||
ParseErr: r.ParseErr,
|
||||
OtherErr: r.OtherErr,
|
||||
Edits: r.Edits,
|
||||
Light: light,
|
||||
})
|
||||
}
|
||||
sort.Slice(out, func(i, j int) bool { return out[i].Name < out[j].Name })
|
||||
return out
|
||||
}
|
||||
|
||||
func (a *tuiAdapter) reloadRows(preserveSelection bool) error {
|
||||
type commandRowState struct {
|
||||
selected bool
|
||||
watched bool
|
||||
status tui.StatusLight
|
||||
hasStats bool
|
||||
seen int64
|
||||
predSkip int64
|
||||
filesMod int64
|
||||
unchanged int64
|
||||
luaErr int64
|
||||
parseErr int64
|
||||
otherErr int64
|
||||
edits int64
|
||||
}
|
||||
oldByID := make(map[string]commandRowState)
|
||||
oldByKey := make(map[string][]commandRowState)
|
||||
if preserveSelection {
|
||||
for i := range a.rows {
|
||||
if a.rows[i].Kind != tui.RowCommand {
|
||||
continue
|
||||
}
|
||||
state := commandRowState{
|
||||
selected: a.rows[i].Selected,
|
||||
watched: a.rows[i].Watched,
|
||||
status: a.rows[i].Status,
|
||||
hasStats: a.rows[i].HasStats,
|
||||
seen: a.rows[i].Seen,
|
||||
predSkip: a.rows[i].PredSkip,
|
||||
filesMod: a.rows[i].FilesMod,
|
||||
unchanged: a.rows[i].Unchanged,
|
||||
luaErr: a.rows[i].LuaErr,
|
||||
parseErr: a.rows[i].ParseErr,
|
||||
otherErr: a.rows[i].OtherErr,
|
||||
edits: a.rows[i].Edits,
|
||||
}
|
||||
oldByID[a.rows[i].ID] = state
|
||||
k := rowStateKey(a.rows[i].File, a.rows[i].Command)
|
||||
oldByKey[k] = append(oldByKey[k], state)
|
||||
}
|
||||
}
|
||||
|
||||
commands, vars, err := utils.LoadCommands(a.args)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load commands: %w", err)
|
||||
}
|
||||
if len(vars) > 0 {
|
||||
processor.SetVariables(vars)
|
||||
}
|
||||
if a.filter != "" {
|
||||
commands = utils.FilterCommands(commands, a.filter)
|
||||
}
|
||||
|
||||
cookFiles, err := collectCookFilesFromArgs(a.args)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to resolve cook files: %w", err)
|
||||
}
|
||||
|
||||
rows, entries, fps := buildRowsAndEntriesFromCommands(commands, cookFiles)
|
||||
if preserveSelection {
|
||||
replayByKeyIndex := make(map[string]int)
|
||||
for i := range rows {
|
||||
if rows[i].Kind != tui.RowCommand {
|
||||
continue
|
||||
}
|
||||
state, ok := oldByID[rows[i].ID]
|
||||
if !ok {
|
||||
k := rowStateKey(rows[i].File, rows[i].Command)
|
||||
idx := replayByKeyIndex[k]
|
||||
if list := oldByKey[k]; idx < len(list) {
|
||||
state = list[idx]
|
||||
replayByKeyIndex[k] = idx + 1
|
||||
ok = true
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
rows[i].Selected = state.selected
|
||||
rows[i].Watched = state.watched
|
||||
rows[i].Status = state.status
|
||||
rows[i].HasStats = state.hasStats
|
||||
rows[i].Seen = state.seen
|
||||
rows[i].PredSkip = state.predSkip
|
||||
rows[i].FilesMod = state.filesMod
|
||||
rows[i].Unchanged = state.unchanged
|
||||
rows[i].LuaErr = state.luaErr
|
||||
rows[i].ParseErr = state.parseErr
|
||||
rows[i].OtherErr = state.otherErr
|
||||
rows[i].Edits = state.edits
|
||||
}
|
||||
}
|
||||
|
||||
newWatched := make(map[string]bool)
|
||||
if preserveSelection {
|
||||
for i := range rows {
|
||||
if rows[i].Kind == tui.RowCommand && rows[i].Watched {
|
||||
newWatched[rows[i].ID] = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for id := range a.watched {
|
||||
if _, ok := entries[id]; ok {
|
||||
newWatched[id] = true
|
||||
}
|
||||
}
|
||||
for i := range rows {
|
||||
if rows[i].Kind == tui.RowCommand {
|
||||
rows[i].Watched = newWatched[rows[i].ID]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
a.rows = rows
|
||||
a.entries = entries
|
||||
a.fingerprints = fps
|
||||
a.watched = newWatched
|
||||
|
||||
a.cookFiles = cookFiles
|
||||
return nil
|
||||
}
|
||||
|
||||
func rowStateKey(file string, command string) string {
|
||||
return file + "\x1f" + command
|
||||
}
|
||||
|
||||
func buildRowsAndEntriesFromCommands(commands []utils.ModifyCommand, cookFiles []string) ([]tui.Row, map[string]tuiEntry, map[string]string) {
|
||||
commandsBySource := make(map[string][]utils.ModifyCommand)
|
||||
for _, cmd := range commands {
|
||||
source := cmd.SourceFile
|
||||
if source == "" {
|
||||
source = cmd.SourceDir
|
||||
}
|
||||
commandsBySource[source] = append(commandsBySource[source], cmd)
|
||||
}
|
||||
|
||||
order := append([]string(nil), cookFiles...)
|
||||
if len(order) == 0 {
|
||||
for source := range commandsBySource {
|
||||
order = append(order, source)
|
||||
}
|
||||
sort.Strings(order)
|
||||
}
|
||||
|
||||
labelBySource := sourceLabels(order)
|
||||
|
||||
rows := make([]tui.Row, 0)
|
||||
entries := make(map[string]tuiEntry)
|
||||
fps := make(map[string]string)
|
||||
|
||||
for _, source := range order {
|
||||
cmds := commandsBySource[source]
|
||||
if len(cmds) == 0 {
|
||||
continue
|
||||
}
|
||||
rows = append(rows, tui.Row{ID: "file::" + source, Kind: tui.RowFile, File: source, Label: labelBySource[source]})
|
||||
counts := make(map[string]int)
|
||||
for _, cmd := range cmds {
|
||||
counts[cmd.Name]++
|
||||
rowID := fmt.Sprintf("%s::%s#%d", source, cmd.Name, counts[cmd.Name])
|
||||
label := cmd.Name
|
||||
if label == "" {
|
||||
label = "<unnamed>"
|
||||
}
|
||||
if cmd.Isolate {
|
||||
label += " [isolate]"
|
||||
}
|
||||
rows = append(rows, tui.Row{ID: rowID, Kind: tui.RowCommand, File: source, Command: cmd.Name, Label: label})
|
||||
entries[rowID] = tuiEntry{ID: rowID, File: source, Command: cmd, IsIsolate: cmd.Isolate}
|
||||
fps[rowID] = commandFingerprint(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
return rows, entries, fps
|
||||
}
|
||||
|
||||
func sourceLabels(files []string) map[string]string {
|
||||
labels := make(map[string]string, len(files))
|
||||
baseCount := make(map[string]int)
|
||||
for _, f := range files {
|
||||
baseCount[filepath.Base(f)]++
|
||||
}
|
||||
for _, f := range files {
|
||||
base := filepath.Base(f)
|
||||
if baseCount[base] == 1 {
|
||||
labels[f] = base
|
||||
continue
|
||||
}
|
||||
labels[f] = f
|
||||
}
|
||||
return labels
|
||||
}
|
||||
|
||||
func commandFingerprint(cmd utils.ModifyCommand) string {
|
||||
b, err := yaml.Marshal(cmd)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("marshal-error:%s:%s:%s", cmd.Name, cmd.Regex, cmd.Lua)
|
||||
}
|
||||
return string(b)
|
||||
}
|
||||
|
||||
func resetRuntimeStats() {
|
||||
stats = GlobalStats{ModificationsPerCommand: sync.Map{}, CommandStats: sync.Map{}}
|
||||
}
|
||||
|
||||
func collectCookFilesFromArgs(args []string) ([]string, error) {
|
||||
out := make([]string, 0)
|
||||
seen := make(map[string]struct{})
|
||||
for _, arg := range args {
|
||||
static, pattern := utils.SplitPattern(arg)
|
||||
matches, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to resolve cook arg %q: %w", arg, err)
|
||||
}
|
||||
for _, m := range matches {
|
||||
p := utils.ResolvePath(filepath.Join(static, m))
|
||||
if _, exists := seen[p]; exists {
|
||||
continue
|
||||
}
|
||||
seen[p] = struct{}{}
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
sort.Strings(out)
|
||||
return out, nil
|
||||
}
|
||||
146
utils/db.go
Normal file
146
utils/db.go
Normal file
@@ -0,0 +1,146 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
gormlogger "gorm.io/gorm/logger"
|
||||
)
|
||||
|
||||
type DB interface {
|
||||
DB() *gorm.DB
|
||||
Raw(sql string, args ...any) *gorm.DB
|
||||
SaveFile(filePath string, fileData []byte) error
|
||||
GetFile(filePath string) ([]byte, error)
|
||||
GetAllFiles() ([]FileSnapshot, error)
|
||||
}
|
||||
|
||||
type FileSnapshot struct {
|
||||
Date time.Time `gorm:"primaryKey"`
|
||||
FilePath string `gorm:"primaryKey"`
|
||||
FileData []byte `gorm:"type:blob"`
|
||||
}
|
||||
|
||||
type DBWrapper struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
var globalDB *DBWrapper
|
||||
|
||||
func GetDB() (DB, error) {
|
||||
getlogger := logger.WithPrefix("GetDB")
|
||||
getlogger.Debug("Attempting to get database connection")
|
||||
var err error
|
||||
|
||||
dbFile := filepath.Join("data.sqlite")
|
||||
getlogger.Debug("Opening database file: %q", dbFile)
|
||||
getlogger.Trace("Database configuration: PrepareStmt=true, GORM logger=Silent")
|
||||
db, err := gorm.Open(sqlite.Open(dbFile), &gorm.Config{
|
||||
// SkipDefaultTransaction: true,
|
||||
PrepareStmt: true,
|
||||
Logger: gormlogger.Default.LogMode(gormlogger.Silent),
|
||||
})
|
||||
if err != nil {
|
||||
getlogger.Error("Failed to open database file %q: %v", dbFile, err)
|
||||
return nil, err
|
||||
}
|
||||
getlogger.Debug("Database opened successfully, running auto migration for FileSnapshot model")
|
||||
if err := db.AutoMigrate(&FileSnapshot{}); err != nil {
|
||||
getlogger.Error("Auto migration failed for FileSnapshot model: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getlogger.Info("Database initialized and migrated successfully")
|
||||
|
||||
globalDB = &DBWrapper{db: db}
|
||||
getlogger.Debug("Database wrapper initialized and cached globally")
|
||||
return globalDB, nil
|
||||
}
|
||||
|
||||
// Just a wrapper
|
||||
func (db *DBWrapper) Raw(sql string, args ...any) *gorm.DB {
|
||||
rawLogger := logger.WithPrefix("Raw").WithField("sql", sql)
|
||||
rawLogger.Debug("Executing raw SQL query with args: %v", args)
|
||||
return db.db.Raw(sql, args...)
|
||||
}
|
||||
|
||||
func (db *DBWrapper) DB() *gorm.DB {
|
||||
logger.WithPrefix("DB").Debug("Returning GORM DB instance")
|
||||
return db.db
|
||||
}
|
||||
|
||||
func (db *DBWrapper) FileExists(filePath string) (bool, error) {
|
||||
fileExistsLogger := logger.WithPrefix("FileExists").WithField("filePath", filePath)
|
||||
fileExistsLogger.Debug("Checking if file exists in database")
|
||||
var count int64
|
||||
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).Count(&count).Error
|
||||
if err != nil {
|
||||
fileExistsLogger.Error("Error checking if file exists: %v", err)
|
||||
return false, err
|
||||
}
|
||||
fileExistsLogger.Debug("File exists: %t", count > 0)
|
||||
return count > 0, err
|
||||
}
|
||||
|
||||
func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
|
||||
saveFileLogger := logger.WithPrefix("SaveFile").WithField("filePath", filePath).WithField("dataSize", len(fileData))
|
||||
saveFileLogger.Debug("Attempting to save file to database")
|
||||
saveFileLogger.Trace("File data length: %d", len(fileData))
|
||||
|
||||
exists, err := db.FileExists(filePath)
|
||||
if err != nil {
|
||||
saveFileLogger.Error("Error checking if file exists: %v", err)
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
saveFileLogger.Debug("File already exists in database, skipping save to avoid overwriting original snapshot")
|
||||
return nil
|
||||
}
|
||||
saveFileLogger.Debug("Creating new file snapshot in database")
|
||||
err = db.db.Create(&FileSnapshot{
|
||||
Date: time.Now(),
|
||||
FilePath: filePath,
|
||||
FileData: fileData,
|
||||
}).Error
|
||||
if err != nil {
|
||||
saveFileLogger.Error("Failed to create file snapshot: %v", err)
|
||||
} else {
|
||||
saveFileLogger.Info("File successfully saved to database")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *DBWrapper) GetFile(filePath string) ([]byte, error) {
|
||||
getFileLogger := logger.WithPrefix("GetFile").WithField("filePath", filePath)
|
||||
getFileLogger.Debug("Getting file from database")
|
||||
var fileSnapshot FileSnapshot
|
||||
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).First(&fileSnapshot).Error
|
||||
if err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
getFileLogger.Debug("File not found in database: %v", err)
|
||||
} else {
|
||||
getFileLogger.Warning("Failed to get file from database: %v", err)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
getFileLogger.Debug("File found in database")
|
||||
getFileLogger.Trace("Retrieved file data length: %d", len(fileSnapshot.FileData))
|
||||
return fileSnapshot.FileData, nil
|
||||
}
|
||||
|
||||
func (db *DBWrapper) GetAllFiles() ([]FileSnapshot, error) {
|
||||
getAllFilesLogger := logger.WithPrefix("GetAllFiles")
|
||||
getAllFilesLogger.Debug("Getting all files from database")
|
||||
var fileSnapshots []FileSnapshot
|
||||
err := db.db.Model(&FileSnapshot{}).Find(&fileSnapshots).Error
|
||||
if err != nil {
|
||||
getAllFilesLogger.Error("Failed to get all files from database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getAllFilesLogger.Debug("Found %d files in database", len(fileSnapshots))
|
||||
getAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
|
||||
return fileSnapshots, nil
|
||||
}
|
||||
62
utils/db_test.go
Normal file
62
utils/db_test.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func TestDBWrapperRoundTrip(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "db-roundtrip-*")
|
||||
require.NoError(t, err)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
db, err := GetDB()
|
||||
require.NoError(t, err)
|
||||
sqlDB, err := db.DB().DB()
|
||||
require.NoError(t, err)
|
||||
defer sqlDB.Close()
|
||||
|
||||
path := filepath.Join(tmpDir, "a.txt")
|
||||
data := []byte("hello")
|
||||
require.NoError(t, db.SaveFile(path, data))
|
||||
|
||||
got, err := db.GetFile(path)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, data, got)
|
||||
|
||||
all, err := db.GetAllFiles()
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, all)
|
||||
|
||||
exists, err := db.(*DBWrapper).FileExists(path)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists)
|
||||
}
|
||||
|
||||
func TestGetFileNotFound(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "db-notfound-*")
|
||||
require.NoError(t, err)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
defer os.Chdir(origDir)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
|
||||
db, err := GetDB()
|
||||
require.NoError(t, err)
|
||||
sqlDB, err := db.DB().DB()
|
||||
require.NoError(t, err)
|
||||
defer sqlDB.Close()
|
||||
|
||||
_, err = db.GetFile("missing.txt")
|
||||
assert.Error(t, err)
|
||||
assert.True(t, errors.Is(err, gorm.ErrRecordNotFound))
|
||||
}
|
||||
79
utils/file.go
Normal file
79
utils/file.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
func LimitString(s string, maxLen int) string {
|
||||
limitStringLogger := logger.WithPrefix("LimitString").WithField("originalLength", len(s)).WithField("maxLength", maxLen)
|
||||
limitStringLogger.Debug("Limiting string length")
|
||||
s = strings.ReplaceAll(s, "\n", "\\n")
|
||||
if len(s) <= maxLen {
|
||||
limitStringLogger.Trace("String length (%d) is within max length (%d), no truncation", len(s), maxLen)
|
||||
return s
|
||||
}
|
||||
limited := s[:maxLen-3] + "..."
|
||||
limitStringLogger.Trace("String truncated from %d to %d characters: %q", len(s), len(limited), limited)
|
||||
return limited
|
||||
}
|
||||
|
||||
func ResetWhereNecessary(associations map[string]FileCommandAssociation, db DB) error {
|
||||
resetWhereNecessaryLogger := logger.WithPrefix("ResetWhereNecessary")
|
||||
resetWhereNecessaryLogger.Debug("Starting reset where necessary operation")
|
||||
resetWhereNecessaryLogger.Trace("File-command associations input: %v", associations)
|
||||
dirtyFiles := make(map[string]struct{})
|
||||
for _, association := range associations {
|
||||
resetWhereNecessaryLogger.Debug("Processing association for file: %q", association.File)
|
||||
for _, command := range association.Commands {
|
||||
resetWhereNecessaryLogger.Debug("Checking command %q for reset requirement", command.Name)
|
||||
resetWhereNecessaryLogger.Trace("Command details: %v", command)
|
||||
if !command.NoReset {
|
||||
resetWhereNecessaryLogger.Debug("Command %q requires reset for file %q, marking as dirty", command.Name, association.File)
|
||||
dirtyFiles[association.File] = struct{}{}
|
||||
}
|
||||
}
|
||||
for _, command := range association.IsolateCommands {
|
||||
resetWhereNecessaryLogger.Debug("Checking isolate command %q for reset requirement", command.Name)
|
||||
resetWhereNecessaryLogger.Trace("Isolate command details: %v", command)
|
||||
if !command.NoReset {
|
||||
resetWhereNecessaryLogger.Debug("Isolate command %q requires reset for file %q, marking as dirty", command.Name, association.File)
|
||||
dirtyFiles[association.File] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Identified %d files that need to be reset", len(dirtyFiles))
|
||||
resetWhereNecessaryLogger.Trace("Dirty files identified: %v", dirtyFiles)
|
||||
|
||||
for file := range dirtyFiles {
|
||||
resetWhereNecessaryLogger.Debug("Resetting file %q", file)
|
||||
fileData, err := db.GetFile(file)
|
||||
if err != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to get original content for file %q from database: %v", file, err)
|
||||
// Seed the snapshot from current disk content if missing, then use it as fallback
|
||||
currentData, readErr := os.ReadFile(file)
|
||||
if readErr != nil {
|
||||
resetWhereNecessaryLogger.Warning("Additionally failed to read current file content for %q: %v", file, readErr)
|
||||
continue
|
||||
}
|
||||
// Best-effort attempt to save baseline; ignore errors to avoid blocking reset
|
||||
if saveErr := db.SaveFile(file, currentData); saveErr != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to seed baseline snapshot for %q: %v", file, saveErr)
|
||||
}
|
||||
fileData = currentData
|
||||
}
|
||||
resetWhereNecessaryLogger.Trace("Retrieved original file data length for %q: %d", file, len(fileData))
|
||||
resetWhereNecessaryLogger.Debug("Writing original content back to file %q", file)
|
||||
err = os.WriteFile(file, fileData, 0644)
|
||||
if err != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to write original content back to file %q: %v", file, err)
|
||||
continue
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Successfully reset file %q", file)
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Finished reset where necessary operation")
|
||||
return nil
|
||||
}
|
||||
165
utils/file_test.go
Normal file
165
utils/file_test.go
Normal file
@@ -0,0 +1,165 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestLimitString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
maxLen int
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Short string",
|
||||
input: "hello",
|
||||
maxLen: 10,
|
||||
expected: "hello",
|
||||
},
|
||||
{
|
||||
name: "Exact length",
|
||||
input: "hello",
|
||||
maxLen: 5,
|
||||
expected: "hello",
|
||||
},
|
||||
{
|
||||
name: "Too long",
|
||||
input: "hello world",
|
||||
maxLen: 8,
|
||||
expected: "hello...",
|
||||
},
|
||||
{
|
||||
name: "With newlines",
|
||||
input: "hello\nworld",
|
||||
maxLen: 20,
|
||||
expected: "hello\\nworld",
|
||||
},
|
||||
{
|
||||
name: "With newlines truncated",
|
||||
input: "hello\nworld\nfoo\nbar",
|
||||
maxLen: 15,
|
||||
expected: "hello\\nworld...",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := LimitString(tt.input, tt.maxLen)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResetWhereNecessary(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "reset-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create test files
|
||||
file1 := filepath.Join(tmpDir, "file1.txt")
|
||||
file2 := filepath.Join(tmpDir, "file2.txt")
|
||||
file3 := filepath.Join(tmpDir, "file3.txt")
|
||||
|
||||
err = os.WriteFile(file1, []byte("original1"), 0644)
|
||||
assert.NoError(t, err)
|
||||
err = os.WriteFile(file2, []byte("original2"), 0644)
|
||||
assert.NoError(t, err)
|
||||
err = os.WriteFile(file3, []byte("original3"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Modify files
|
||||
err = os.WriteFile(file1, []byte("modified1"), 0644)
|
||||
assert.NoError(t, err)
|
||||
err = os.WriteFile(file2, []byte("modified2"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create mock DB
|
||||
db, err := GetDB()
|
||||
assert.NoError(t, err)
|
||||
err = db.SaveFile(file1, []byte("original1"))
|
||||
assert.NoError(t, err)
|
||||
err = db.SaveFile(file2, []byte("original2"))
|
||||
assert.NoError(t, err)
|
||||
// file3 not in DB
|
||||
|
||||
// Create associations with reset commands
|
||||
associations := map[string]FileCommandAssociation{
|
||||
file1: {
|
||||
File: file1,
|
||||
Commands: []ModifyCommand{
|
||||
{Name: "cmd1", NoReset: false},
|
||||
},
|
||||
},
|
||||
file2: {
|
||||
File: file2,
|
||||
IsolateCommands: []ModifyCommand{
|
||||
{Name: "cmd2", NoReset: false},
|
||||
},
|
||||
},
|
||||
file3: {
|
||||
File: file3,
|
||||
Commands: []ModifyCommand{
|
||||
{Name: "cmd3", NoReset: true}, // No reset
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Run reset
|
||||
err = ResetWhereNecessary(associations, db)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify file1 was reset
|
||||
data, _ := os.ReadFile(file1)
|
||||
assert.Equal(t, "original1", string(data))
|
||||
|
||||
// Verify file2 was reset
|
||||
data, _ = os.ReadFile(file2)
|
||||
assert.Equal(t, "original2", string(data))
|
||||
|
||||
// Verify file3 was NOT reset
|
||||
data, _ = os.ReadFile(file3)
|
||||
assert.Equal(t, "original3", string(data))
|
||||
}
|
||||
|
||||
func TestResetWhereNecessaryMissingFromDB(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "reset-missing-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a test file that's been modified
|
||||
file1 := filepath.Join(tmpDir, "file1.txt")
|
||||
err = os.WriteFile(file1, []byte("modified_content"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create DB but DON'T save file to it
|
||||
db, err := GetDB()
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create associations with reset command
|
||||
associations := map[string]FileCommandAssociation{
|
||||
file1: {
|
||||
File: file1,
|
||||
Commands: []ModifyCommand{
|
||||
{Name: "cmd1", NoReset: false},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Run reset - should use current disk content as fallback
|
||||
err = ResetWhereNecessary(associations, db)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify file was "reset" to current content (saved to DB for next time)
|
||||
data, _ := os.ReadFile(file1)
|
||||
assert.Equal(t, "modified_content", string(data))
|
||||
|
||||
// Verify it was saved to DB
|
||||
savedData, err := db.GetFile(file1)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "modified_content", string(savedData))
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"flag"
|
||||
)
|
||||
|
||||
var (
|
||||
// Deprecated
|
||||
GitFlag = flag.Bool("git", false, "Use git to manage files")
|
||||
// Deprecated
|
||||
ResetFlag = flag.Bool("reset", false, "Reset files to their original state")
|
||||
LogLevel = flag.String("loglevel", "INFO", "Set log level: ERROR, WARNING, INFO, DEBUG, TRACE")
|
||||
Cookfile = flag.String("cook", "**/cook.yml", "Path to cook config files, can be globbed")
|
||||
ParallelFiles = flag.Int("P", 100, "Number of files to process in parallel")
|
||||
)
|
||||
97
utils/git.go
97
utils/git.go
@@ -1,97 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"modify/logger"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5"
|
||||
)
|
||||
|
||||
var (
|
||||
Repo *git.Repository
|
||||
Worktree *git.Worktree
|
||||
)
|
||||
|
||||
func SetupGit() error {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get current working directory: %w", err)
|
||||
}
|
||||
logger.Debug("Current working directory obtained: %s", cwd)
|
||||
|
||||
logger.Debug("Attempting to open git repository at %s", cwd)
|
||||
Repo, err = git.PlainOpen(cwd)
|
||||
if err != nil {
|
||||
logger.Debug("No existing git repository found at %s, attempting to initialize a new git repository.", cwd)
|
||||
Repo, err = git.PlainInit(cwd, false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize a new git repository at %s: %w", cwd, err)
|
||||
}
|
||||
logger.Info("Successfully initialized a new git repository at %s", cwd)
|
||||
} else {
|
||||
logger.Info("Successfully opened existing git repository at %s", cwd)
|
||||
}
|
||||
|
||||
logger.Debug("Attempting to obtain worktree for repository at %s", cwd)
|
||||
Worktree, err = Repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to obtain worktree for repository at %s: %w", cwd, err)
|
||||
}
|
||||
logger.Debug("Successfully obtained worktree for repository at %s", cwd)
|
||||
return nil
|
||||
}
|
||||
|
||||
func CleanupGitFiles(files []string) error {
|
||||
for _, file := range files {
|
||||
logger.Debug("Checking git status for file: %s", file)
|
||||
status, err := Worktree.Status()
|
||||
if err != nil {
|
||||
logger.Error("Error getting worktree status: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error getting worktree status: %v\n", err)
|
||||
return fmt.Errorf("error getting worktree status: %w", err)
|
||||
}
|
||||
if status.IsUntracked(file) {
|
||||
logger.Info("Detected untracked file: %s. Adding to git index.", file)
|
||||
_, err = Worktree.Add(file)
|
||||
if err != nil {
|
||||
logger.Error("Error adding file to git: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error adding file to git: %v\n", err)
|
||||
return fmt.Errorf("error adding file to git: %w", err)
|
||||
}
|
||||
|
||||
filename := filepath.Base(file)
|
||||
logger.Info("File %s added successfully. Committing with message: 'Track %s'", filename, filename)
|
||||
_, err = Worktree.Commit("Track "+filename, &git.CommitOptions{
|
||||
Author: &object.Signature{
|
||||
Name: "Big Chef",
|
||||
Email: "bigchef@bigchef.com",
|
||||
When: time.Now(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("Error committing file: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error committing file: %v\n", err)
|
||||
return fmt.Errorf("error committing file: %w", err)
|
||||
}
|
||||
logger.Info("Successfully committed file: %s", filename)
|
||||
} else {
|
||||
logger.Info("File %s is already tracked. Restoring it to the working tree.", file)
|
||||
err := Worktree.Restore(&git.RestoreOptions{
|
||||
Files: []string{file},
|
||||
Staged: true,
|
||||
Worktree: true,
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("Error restoring file: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error restoring file: %v\n", err)
|
||||
return fmt.Errorf("error restoring file: %w", err)
|
||||
}
|
||||
logger.Info("File %s restored successfully", file)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -2,207 +2,555 @@ package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"modify/logger"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type ModifyCommand struct {
|
||||
Name string `yaml:"name"`
|
||||
Regex string `yaml:"regex"`
|
||||
Lua string `yaml:"lua"`
|
||||
Files []string `yaml:"files"`
|
||||
Git bool `yaml:"git"`
|
||||
Reset bool `yaml:"reset"`
|
||||
LogLevel string `yaml:"loglevel"`
|
||||
Name string `yaml:"name,omitempty" toml:"name,omitempty"`
|
||||
Regex string `yaml:"regex,omitempty" toml:"regex,omitempty"`
|
||||
Regexes []string `yaml:"regexes,omitempty" toml:"regexes,omitempty"`
|
||||
RegexPred string `yaml:"regex_pred,omitempty" toml:"regex_pred,omitempty"`
|
||||
RegexPreds []string `yaml:"regex_preds,omitempty" toml:"regex_preds,omitempty"`
|
||||
Lua string `yaml:"lua,omitempty" toml:"lua,omitempty"`
|
||||
Files []string `yaml:"files,omitempty" toml:"files,omitempty"`
|
||||
NoReset bool `yaml:"noreset,omitempty" toml:"noreset,omitempty"`
|
||||
LogLevel string `yaml:"loglevel,omitempty" toml:"loglevel,omitempty"`
|
||||
Isolate bool `yaml:"isolate,omitempty" toml:"isolate,omitempty"`
|
||||
NoDedup bool `yaml:"nodedup,omitempty" toml:"nodedup,omitempty"`
|
||||
Disabled bool `yaml:"disable,omitempty" toml:"disable,omitempty"`
|
||||
JSON bool `yaml:"json,omitempty" toml:"json,omitempty"`
|
||||
XML bool `yaml:"xml,omitempty" toml:"xml,omitempty"`
|
||||
Raw bool `yaml:"raw,omitempty" toml:"raw,omitempty"`
|
||||
SourceDir string `yaml:"-" toml:"-"` // Directory of the config file that loaded this command
|
||||
SourceFile string `yaml:"-" toml:"-"` // Absolute path of the config file that loaded this command
|
||||
}
|
||||
|
||||
type CookFile []ModifyCommand
|
||||
|
||||
func (c *ModifyCommand) Validate() error {
|
||||
if c.Regex == "" {
|
||||
return fmt.Errorf("pattern is required")
|
||||
validateLogger := logger.WithPrefix("Validate").WithField("commandName", c.Name)
|
||||
validateLogger.Debug("Validating command")
|
||||
|
||||
// For structured modes, regex patterns are not required
|
||||
if !c.JSON && !c.XML {
|
||||
if c.Regex == "" && len(c.Regexes) == 0 {
|
||||
validateLogger.Error("Validation failed: Regex pattern is required for non-structured mode")
|
||||
return fmt.Errorf("pattern is required for non-structured mode")
|
||||
}
|
||||
}
|
||||
|
||||
if c.Lua == "" {
|
||||
validateLogger.Error("Validation failed: Lua expression is required")
|
||||
return fmt.Errorf("lua expression is required")
|
||||
}
|
||||
if len(c.Files) == 0 {
|
||||
validateLogger.Error("Validation failed: At least one file is required")
|
||||
return fmt.Errorf("at least one file is required")
|
||||
}
|
||||
preds := make([]string, 0, len(c.RegexPreds)+1)
|
||||
if c.RegexPred != "" {
|
||||
preds = append(preds, c.RegexPred)
|
||||
}
|
||||
preds = append(preds, c.RegexPreds...)
|
||||
for _, pred := range preds {
|
||||
if _, err := regexp.Compile(pred); err != nil {
|
||||
validateLogger.Error("Validation failed: invalid regex_pred %q: %v", pred, err)
|
||||
return fmt.Errorf("invalid regex_pred %q: %w", pred, err)
|
||||
}
|
||||
}
|
||||
if c.LogLevel == "" {
|
||||
validateLogger.Debug("LogLevel not specified, defaulting to INFO")
|
||||
c.LogLevel = "INFO"
|
||||
}
|
||||
validateLogger.Debug("Command validated successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[string][]ModifyCommand, error) {
|
||||
// Ehh.. Not much better... Guess this wasn't the big deal
|
||||
var matchesMemoTable map[string]bool = make(map[string]bool)
|
||||
var globMemoTable map[string][]string = make(map[string][]string)
|
||||
|
||||
func Matches(path string, glob string) (bool, error) {
|
||||
matchesLogger := logger.WithPrefix("Matches").WithField("path", path).WithField("glob", glob)
|
||||
matchesLogger.Debug("Checking if path matches glob")
|
||||
key := fmt.Sprintf("%s:%s", path, glob)
|
||||
if matches, ok := matchesMemoTable[key]; ok {
|
||||
matchesLogger.Debug("Found match in memo table: %t", matches)
|
||||
return matches, nil
|
||||
}
|
||||
matches, err := doublestar.Match(glob, path)
|
||||
if err != nil {
|
||||
matchesLogger.Error("Failed to match glob: %v", err)
|
||||
return false, fmt.Errorf("failed to match glob %s with file %s: %w", glob, path, err)
|
||||
}
|
||||
matchesMemoTable[key] = matches
|
||||
matchesLogger.Debug("Match result: %t, storing in memo table", matches)
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func SplitPattern(pattern string) (string, string) {
|
||||
splitPatternLogger := logger.WithPrefix("SplitPattern").WithField("pattern", pattern)
|
||||
splitPatternLogger.Debug("Splitting pattern")
|
||||
splitPatternLogger.Trace("Original pattern: %q", pattern)
|
||||
|
||||
// Split the pattern first to separate static and wildcard parts
|
||||
static, remainingPattern := doublestar.SplitPattern(pattern)
|
||||
splitPatternLogger.Trace("After split: static=%q, pattern=%q", static, remainingPattern)
|
||||
|
||||
// Normalize to forward slashes but DON'T resolve relative to CWD
|
||||
// Paths should already be resolved by the caller (AggregateGlobs, etc.)
|
||||
static = filepath.ToSlash(static)
|
||||
splitPatternLogger.Trace("Normalized static part: %q", static)
|
||||
|
||||
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, remainingPattern)
|
||||
return static, remainingPattern
|
||||
}
|
||||
|
||||
type FileCommandAssociation struct {
|
||||
File string
|
||||
IsolateCommands []ModifyCommand
|
||||
Commands []ModifyCommand
|
||||
}
|
||||
|
||||
func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[string]FileCommandAssociation, error) {
|
||||
associateFilesLogger := logger.WithPrefix("AssociateFilesWithCommands")
|
||||
associateFilesLogger.Debug("Associating files with commands")
|
||||
associateFilesLogger.Trace("Input files: %v", files)
|
||||
associateFilesLogger.Trace("Input commands: %v", commands)
|
||||
associationCount := 0
|
||||
fileCommands := make(map[string][]ModifyCommand)
|
||||
fileCommands := make(map[string]FileCommandAssociation)
|
||||
|
||||
for _, file := range files {
|
||||
// Use centralized path resolution internally but keep original file as key
|
||||
resolvedFile := ResolvePath(file)
|
||||
associateFilesLogger.Debug("Processing file: %q (resolved: %q)", file, resolvedFile)
|
||||
fileCommands[file] = FileCommandAssociation{
|
||||
File: resolvedFile,
|
||||
IsolateCommands: []ModifyCommand{},
|
||||
Commands: []ModifyCommand{},
|
||||
}
|
||||
for _, command := range commands {
|
||||
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
|
||||
for _, glob := range command.Files {
|
||||
// TODO: Maybe memoize this function call
|
||||
matches, err := doublestar.Match(glob, file)
|
||||
// Resolve glob relative to SourceDir if it's a relative path
|
||||
var resolvedGlob string
|
||||
if !filepath.IsAbs(glob) && command.SourceDir != "" {
|
||||
resolvedGlob = filepath.Join(command.SourceDir, glob)
|
||||
associateFilesLogger.Trace("Joined relative glob %q to %q using SourceDir %q", glob, resolvedGlob, command.SourceDir)
|
||||
} else {
|
||||
resolvedGlob = glob
|
||||
}
|
||||
|
||||
// Make absolute and normalize
|
||||
resolvedGlob = ResolvePath(resolvedGlob)
|
||||
associateFilesLogger.Trace("Final resolved glob: %q", resolvedGlob)
|
||||
|
||||
// SplitPattern just splits, doesn't resolve
|
||||
static, pattern := SplitPattern(resolvedGlob)
|
||||
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", resolvedGlob, static, pattern)
|
||||
|
||||
// Use resolved file for matching (already normalized to forward slashes by ResolvePath)
|
||||
absFile := resolvedFile
|
||||
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
|
||||
|
||||
// Only match if the file is under the static root
|
||||
if !(strings.HasPrefix(absFile, static+"/") || absFile == static) {
|
||||
associateFilesLogger.Trace("Skipping glob %q for file %q because file is outside static root %q", glob, file, static)
|
||||
continue
|
||||
}
|
||||
|
||||
patternFile := strings.TrimPrefix(absFile, static+`/`)
|
||||
associateFilesLogger.Trace("Pattern-relative path used for match: %q", patternFile)
|
||||
matches, err := Matches(patternFile, pattern)
|
||||
if err != nil {
|
||||
logger.Trace("Failed to match glob %s with file %s: %v", glob, file, err)
|
||||
associateFilesLogger.Warning("Failed to match glob %q with file %q: %v", glob, file, err)
|
||||
continue
|
||||
}
|
||||
if matches {
|
||||
logger.Debug("Found match for file %q and command %q", file, command.Regex)
|
||||
fileCommands[file] = append(fileCommands[file], command)
|
||||
associateFilesLogger.Debug("File %q matches glob %q. Associating with command %q", file, glob, command.Name)
|
||||
association := fileCommands[file]
|
||||
|
||||
if command.Isolate {
|
||||
associateFilesLogger.Debug("Command %q is an isolate command, adding to isolate list", command.Name)
|
||||
association.IsolateCommands = append(association.IsolateCommands, command)
|
||||
} else {
|
||||
associateFilesLogger.Debug("Command %q is a regular command, adding to regular list", command.Name)
|
||||
association.Commands = append(association.Commands, command)
|
||||
}
|
||||
fileCommands[file] = association
|
||||
associationCount++
|
||||
} else {
|
||||
associateFilesLogger.Trace("File %q did not match glob %q (pattern=%q, rel=%q)", file, glob, pattern, patternFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.Debug("Found %d commands for file %q", len(fileCommands[file]), file)
|
||||
if len(fileCommands[file]) == 0 {
|
||||
logger.Info("No commands found for file %q", file)
|
||||
}
|
||||
currentFileCommands := fileCommands[file]
|
||||
associateFilesLogger.Debug("Finished processing file %q. Found %d regular commands and %d isolate commands", file, len(currentFileCommands.Commands), len(currentFileCommands.IsolateCommands))
|
||||
associateFilesLogger.Trace("Commands for file %q: %v", file, currentFileCommands.Commands)
|
||||
associateFilesLogger.Trace("Isolate commands for file %q: %v", file, currentFileCommands.IsolateCommands)
|
||||
}
|
||||
logger.Info("Found %d associations between %d files and %d commands", associationCount, len(files), len(commands))
|
||||
associateFilesLogger.Info("Completed association. Found %d total associations for %d files and %d commands", associationCount, len(files), len(commands))
|
||||
return fileCommands, nil
|
||||
}
|
||||
|
||||
func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
|
||||
logger.Info("Aggregating globs for %d commands", len(commands))
|
||||
aggregateGlobsLogger := logger.WithPrefix("AggregateGlobs")
|
||||
aggregateGlobsLogger.Debug("Aggregating glob patterns from commands")
|
||||
aggregateGlobsLogger.Trace("Input commands for aggregation: %v", commands)
|
||||
globs := make(map[string]struct{})
|
||||
for _, command := range commands {
|
||||
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
|
||||
aggregateGlobsLogger.Trace("Command SourceDir: %q", command.SourceDir)
|
||||
for _, glob := range command.Files {
|
||||
globs[glob] = struct{}{}
|
||||
// If the glob is relative and we have a SourceDir, resolve relative to SourceDir
|
||||
var resolvedGlob string
|
||||
if !filepath.IsAbs(glob) && command.SourceDir != "" {
|
||||
// Relative path - resolve relative to the TOML file's directory
|
||||
resolvedGlob = filepath.Join(command.SourceDir, glob)
|
||||
aggregateGlobsLogger.Trace("Joined relative glob %q to %q using SourceDir %q", glob, resolvedGlob, command.SourceDir)
|
||||
} else {
|
||||
// Absolute path or no SourceDir - use as-is
|
||||
resolvedGlob = glob
|
||||
}
|
||||
|
||||
// Make absolute and normalize (ResolvePath handles both)
|
||||
resolvedGlob = ResolvePath(resolvedGlob)
|
||||
aggregateGlobsLogger.Trace("Final resolved glob: %q", resolvedGlob)
|
||||
|
||||
globs[resolvedGlob] = struct{}{}
|
||||
}
|
||||
}
|
||||
logger.Info("Found %d unique globs", len(globs))
|
||||
aggregateGlobsLogger.Debug("Finished aggregating globs. Found %d unique glob patterns", len(globs))
|
||||
aggregateGlobsLogger.Trace("Aggregated unique globs: %v", globs)
|
||||
return globs
|
||||
}
|
||||
|
||||
func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
|
||||
func ExpandGlobs(patterns map[string]struct{}) ([]string, error) {
|
||||
expandGlobsLogger := logger.WithPrefix("ExpandGLobs")
|
||||
expandGlobsLogger.Debug("Expanding glob patterns to actual files")
|
||||
expandGlobsLogger.Trace("Input patterns for expansion: %v", patterns)
|
||||
var files []string
|
||||
filesMap := make(map[string]bool)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
expandGlobsLogger.Error("Failed to get current working directory: %v", err)
|
||||
return nil, fmt.Errorf("failed to get current working directory: %w", err)
|
||||
}
|
||||
expandGlobsLogger.Debug("Current working directory: %q", cwd)
|
||||
|
||||
logger.Debug("Expanding patterns from directory: %s", cwd)
|
||||
for pattern := range patterns {
|
||||
logger.Trace("Processing pattern: %s", pattern)
|
||||
matches, _ := doublestar.Glob(os.DirFS(cwd), pattern)
|
||||
logger.Debug("Found %d matches for pattern %s", len(matches), pattern)
|
||||
for _, m := range matches {
|
||||
info, err := os.Stat(m)
|
||||
expandGlobsLogger.Debug("Processing glob pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
key := static + "|" + pattern
|
||||
matches, ok := globMemoTable[key]
|
||||
if !ok {
|
||||
var err error
|
||||
matches, err = doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
logger.Warning("Error getting file info for %s: %v", m, err)
|
||||
expandGlobsLogger.Warning("Error expanding glob %q in %q: %v", pattern, static, err)
|
||||
continue
|
||||
}
|
||||
if !info.IsDir() && !filesMap[m] {
|
||||
logger.Trace("Adding file to process list: %s", m)
|
||||
filesMap[m], files = true, append(files, m)
|
||||
globMemoTable[key] = matches
|
||||
}
|
||||
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
|
||||
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
|
||||
for _, m := range matches {
|
||||
// Resolve the full path
|
||||
fullPath := ResolvePath(filepath.Join(static, m))
|
||||
info, err := os.Stat(fullPath)
|
||||
if err != nil {
|
||||
expandGlobsLogger.Warning("Error getting file info for %q: %v", fullPath, err)
|
||||
continue
|
||||
}
|
||||
if !info.IsDir() && !filesMap[fullPath] {
|
||||
expandGlobsLogger.Trace("Adding unique file to list: %q", fullPath)
|
||||
filesMap[fullPath], files = true, append(files, fullPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
logger.Debug("Found %d files to process: %v", len(files), files)
|
||||
expandGlobsLogger.Debug("Finished expanding globs. Found %d unique files to process", len(files))
|
||||
expandGlobsLogger.Trace("Unique files to process: %v", files)
|
||||
} else {
|
||||
expandGlobsLogger.Warning("No files found after expanding glob patterns.")
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func LoadCommands(args []string) ([]ModifyCommand, error) {
|
||||
func LoadCommands(args []string) ([]ModifyCommand, map[string]interface{}, error) {
|
||||
loadCommandsLogger := logger.WithPrefix("LoadCommands")
|
||||
loadCommandsLogger.Debug("Loading commands from arguments (cook files or direct patterns)")
|
||||
loadCommandsLogger.Trace("Input arguments: %v", args)
|
||||
commands := []ModifyCommand{}
|
||||
variables := make(map[string]interface{})
|
||||
|
||||
logger.Info("Loading commands from cook files: %s", *Cookfile)
|
||||
newcommands, err := LoadCommandsFromCookFiles(*Cookfile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load commands from cook files: %w", err)
|
||||
}
|
||||
logger.Info("Successfully loaded %d commands from cook files", len(newcommands))
|
||||
commands = append(commands, newcommands...)
|
||||
logger.Info("Now total commands: %d", len(commands))
|
||||
for _, arg := range args {
|
||||
loadCommandsLogger.Debug("Processing argument for commands: %q", arg)
|
||||
var newCommands []ModifyCommand
|
||||
var newVariables map[string]interface{}
|
||||
var err error
|
||||
|
||||
logger.Info("Loading commands from arguments: %v", args)
|
||||
newcommands, err = LoadCommandFromArgs(args)
|
||||
if err != nil {
|
||||
if len(commands) == 0 {
|
||||
return nil, fmt.Errorf("failed to load commands from args: %w", err)
|
||||
// Check file extension to determine format
|
||||
if strings.HasSuffix(arg, ".toml") {
|
||||
loadCommandsLogger.Debug("Loading TOML commands from %q", arg)
|
||||
newCommands, newVariables, err = LoadCommandsFromTomlFiles(arg)
|
||||
if err != nil {
|
||||
loadCommandsLogger.Error("Failed to load TOML commands from argument %q: %v", arg, err)
|
||||
return nil, nil, fmt.Errorf("failed to load commands from TOML files: %w", err)
|
||||
}
|
||||
} else {
|
||||
// Default to YAML for .yml, .yaml, or any other extension
|
||||
loadCommandsLogger.Debug("Loading YAML commands from %q", arg)
|
||||
newCommands, newVariables, err = LoadCommandsFromCookFiles(arg)
|
||||
if err != nil {
|
||||
loadCommandsLogger.Error("Failed to load YAML commands from argument %q: %v", arg, err)
|
||||
return nil, nil, fmt.Errorf("failed to load commands from cook files: %w", err)
|
||||
}
|
||||
}
|
||||
for k, v := range newVariables {
|
||||
variables[k] = v
|
||||
}
|
||||
logger.Warning("Failed to load commands from args: %v", err)
|
||||
}
|
||||
logger.Info("Successfully loaded %d commands from args", len(newcommands))
|
||||
commands = append(commands, newcommands...)
|
||||
logger.Info("Now total commands: %d", len(commands))
|
||||
|
||||
return commands, nil
|
||||
loadCommandsLogger.Debug("Successfully loaded %d commands from %q", len(newCommands), arg)
|
||||
for _, cmd := range newCommands {
|
||||
if cmd.Disabled {
|
||||
loadCommandsLogger.Debug("Skipping disabled command: %q", cmd.Name)
|
||||
continue
|
||||
}
|
||||
commands = append(commands, cmd)
|
||||
loadCommandsLogger.Trace("Added command %q. Current total commands: %d", cmd.Name, len(commands))
|
||||
}
|
||||
}
|
||||
|
||||
loadCommandsLogger.Info("Finished loading commands. Total %d commands and %d variables loaded", len(commands), len(variables))
|
||||
return commands, variables, nil
|
||||
}
|
||||
|
||||
func LoadCommandFromArgs(args []string) ([]ModifyCommand, error) {
|
||||
// Cannot reset without git, right?
|
||||
if *ResetFlag {
|
||||
*GitFlag = true
|
||||
}
|
||||
|
||||
if len(args) < 3 {
|
||||
return nil, fmt.Errorf("at least %d arguments are required", 3)
|
||||
}
|
||||
|
||||
command := ModifyCommand{
|
||||
Regex: args[0],
|
||||
Lua: args[1],
|
||||
Files: args[2:],
|
||||
Git: *GitFlag,
|
||||
Reset: *ResetFlag,
|
||||
LogLevel: *LogLevel,
|
||||
}
|
||||
|
||||
if err := command.Validate(); err != nil {
|
||||
return nil, fmt.Errorf("invalid command: %w", err)
|
||||
}
|
||||
|
||||
return []ModifyCommand{command}, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFiles(s string) ([]ModifyCommand, error) {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get current working directory: %w", err)
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFiles(pattern string) ([]ModifyCommand, map[string]interface{}, error) {
|
||||
loadCookFilesLogger := logger.WithPrefix("LoadCommandsFromCookFiles").WithField("pattern", pattern)
|
||||
loadCookFilesLogger.Debug("Loading commands from cook files based on pattern")
|
||||
loadCookFilesLogger.Trace("Input pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
commands := []ModifyCommand{}
|
||||
cookFiles, err := doublestar.Glob(os.DirFS(cwd), *Cookfile)
|
||||
variables := make(map[string]interface{})
|
||||
cookFiles, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to glob cook files: %w", err)
|
||||
loadCookFilesLogger.Error("Failed to glob cook files for pattern %q: %v", pattern, err)
|
||||
return nil, nil, fmt.Errorf("failed to glob cook files: %w", err)
|
||||
}
|
||||
loadCookFilesLogger.Debug("Found %d cook files for pattern %q", len(cookFiles), pattern)
|
||||
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
|
||||
|
||||
for _, cookFile := range cookFiles {
|
||||
// Use centralized path resolution
|
||||
cookFile = ResolvePath(filepath.Join(static, cookFile))
|
||||
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
|
||||
|
||||
cookFileData, err := os.ReadFile(cookFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read cook file: %w", err)
|
||||
loadCookFilesLogger.Error("Failed to read cook file %q: %v", cookFile, err)
|
||||
return nil, nil, fmt.Errorf("failed to read cook file: %w", err)
|
||||
}
|
||||
newcommands, err := LoadCommandsFromCookFile(cookFileData)
|
||||
loadCookFilesLogger.Trace("Read %d bytes from cook file %q", len(cookFileData), cookFile)
|
||||
newCommands, newVariables, err := LoadCommandsFromCookFile(cookFileData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load commands from cook file: %w", err)
|
||||
loadCookFilesLogger.Error("Failed to load commands from cook file data for %q: %v", cookFile, err)
|
||||
return nil, nil, fmt.Errorf("failed to load commands from cook file: %w", err)
|
||||
}
|
||||
commands = append(commands, newcommands...)
|
||||
// Set source directory for each command
|
||||
sourceDir := filepath.Dir(cookFile)
|
||||
for i := range newCommands {
|
||||
newCommands[i].SourceDir = sourceDir
|
||||
newCommands[i].SourceFile = cookFile
|
||||
}
|
||||
commands = append(commands, newCommands...)
|
||||
for k, v := range newVariables {
|
||||
variables[k] = v
|
||||
}
|
||||
loadCookFilesLogger.Debug("Added %d commands and %d variables from cook file %q. Total commands now: %d", len(newCommands), len(newVariables), cookFile, len(commands))
|
||||
}
|
||||
|
||||
return commands, nil
|
||||
loadCookFilesLogger.Debug("Finished loading commands from cook files. Total %d commands and %d variables", len(commands), len(variables))
|
||||
return commands, variables, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFile(cookFileData []byte) ([]ModifyCommand, error) {
|
||||
commands := []ModifyCommand{}
|
||||
err := yaml.Unmarshal(cookFileData, &commands)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal cook file: %w", err)
|
||||
func LoadCommandsFromCookFile(cookFileData []byte) ([]ModifyCommand, map[string]interface{}, error) {
|
||||
loadCommandLogger := logger.WithPrefix("LoadCommandsFromCookFile")
|
||||
loadCommandLogger.Debug("Unmarshaling commands from cook file data")
|
||||
loadCommandLogger.Trace("Cook file data length: %d", len(cookFileData))
|
||||
|
||||
var rootNode yaml.Node
|
||||
if err := yaml.Unmarshal(cookFileData, &rootNode); err != nil {
|
||||
loadCommandLogger.Error("Failed to parse cook file root: %v", err)
|
||||
return nil, nil, fmt.Errorf("failed to parse cook file: %w", err)
|
||||
}
|
||||
|
||||
var cookFile struct {
|
||||
Variables map[string]interface{} `yaml:"variables,omitempty"`
|
||||
Commands []ModifyCommand `yaml:"commands"`
|
||||
}
|
||||
|
||||
if len(rootNode.Content) == 0 {
|
||||
loadCommandLogger.Debug("Cook file is empty")
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
rootKind := rootNode.Content[0].Kind
|
||||
switch rootKind {
|
||||
case yaml.MappingNode:
|
||||
err := yaml.Unmarshal(cookFileData, &cookFile)
|
||||
if err != nil {
|
||||
loadCommandLogger.Error("Failed to unmarshal cook file mapping: %v", err)
|
||||
return nil, nil, fmt.Errorf("failed to unmarshal cook file: %w", err)
|
||||
}
|
||||
loadCommandLogger.Debug("Successfully unmarshaled %d commands and %d variables from mapping", len(cookFile.Commands), len(cookFile.Variables))
|
||||
loadCommandLogger.Trace("Unmarshaled commands: %v", cookFile.Commands)
|
||||
loadCommandLogger.Trace("Unmarshaled variables: %v", cookFile.Variables)
|
||||
return cookFile.Commands, cookFile.Variables, nil
|
||||
case yaml.SequenceNode:
|
||||
var commands []ModifyCommand
|
||||
err := yaml.Unmarshal(cookFileData, &commands)
|
||||
if err != nil {
|
||||
loadCommandLogger.Error("Failed to unmarshal cook file sequence: %v", err)
|
||||
return nil, nil, fmt.Errorf("failed to unmarshal cook file: %w", err)
|
||||
}
|
||||
loadCommandLogger.Debug("Successfully unmarshaled %d commands from sequence", len(commands))
|
||||
loadCommandLogger.Trace("Unmarshaled commands: %v", commands)
|
||||
return commands, nil, nil
|
||||
default:
|
||||
loadCommandLogger.Error("Unsupported cook file root kind: %d", rootKind)
|
||||
return nil, nil, fmt.Errorf("unsupported cook file root type")
|
||||
}
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// CountGlobsBeforeDedup counts the total number of glob patterns across all commands before deduplication
|
||||
func CountGlobsBeforeDedup(commands []ModifyCommand) int {
|
||||
countGlobsLogger := logger.WithPrefix("CountGlobsBeforeDedup")
|
||||
countGlobsLogger.Debug("Counting glob patterns before deduplication")
|
||||
count := 0
|
||||
for _, cmd := range commands {
|
||||
countGlobsLogger.Trace("Processing command %q, adding %d globs", cmd.Name, len(cmd.Files))
|
||||
count += len(cmd.Files)
|
||||
}
|
||||
countGlobsLogger.Debug("Total glob patterns before deduplication: %d", count)
|
||||
return count
|
||||
}
|
||||
|
||||
func FilterCommands(commands []ModifyCommand, filter string) []ModifyCommand {
|
||||
filterCommandsLogger := logger.WithPrefix("FilterCommands").WithField("filter", filter)
|
||||
filterCommandsLogger.Debug("Filtering commands")
|
||||
filterCommandsLogger.Trace("Input commands: %v", commands)
|
||||
filteredCommands := []ModifyCommand{}
|
||||
filters := strings.Split(filter, ",")
|
||||
filterCommandsLogger.Trace("Split filters: %v", filters)
|
||||
for _, cmd := range commands {
|
||||
filterCommandsLogger.Debug("Checking command %q against filters", cmd.Name)
|
||||
for _, f := range filters {
|
||||
if strings.Contains(cmd.Name, f) {
|
||||
filterCommandsLogger.Debug("Command %q matches filter %q, adding to filtered list", cmd.Name, f)
|
||||
filteredCommands = append(filteredCommands, cmd)
|
||||
break // Command matches, no need to check other filters
|
||||
}
|
||||
}
|
||||
}
|
||||
filterCommandsLogger.Debug("Finished filtering commands. Found %d filtered commands", len(filteredCommands))
|
||||
filterCommandsLogger.Trace("Filtered commands: %v", filteredCommands)
|
||||
return filteredCommands
|
||||
}
|
||||
|
||||
func LoadCommandsFromTomlFiles(pattern string) ([]ModifyCommand, map[string]interface{}, error) {
|
||||
loadTomlFilesLogger := logger.WithPrefix("LoadCommandsFromTomlFiles").WithField("pattern", pattern)
|
||||
loadTomlFilesLogger.Debug("Loading commands from TOML files based on pattern")
|
||||
loadTomlFilesLogger.Trace("Input pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
commands := []ModifyCommand{}
|
||||
variables := make(map[string]interface{})
|
||||
tomlFiles, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
loadTomlFilesLogger.Error("Failed to glob TOML files for pattern %q: %v", pattern, err)
|
||||
return nil, nil, fmt.Errorf("failed to glob TOML files: %w", err)
|
||||
}
|
||||
loadTomlFilesLogger.Debug("Found %d TOML files for pattern %q", len(tomlFiles), pattern)
|
||||
loadTomlFilesLogger.Trace("TOML files found: %v", tomlFiles)
|
||||
|
||||
for _, tomlFile := range tomlFiles {
|
||||
// Use centralized path resolution
|
||||
tomlFile = ResolvePath(filepath.Join(static, tomlFile))
|
||||
loadTomlFilesLogger.Debug("Loading commands from individual TOML file: %q", tomlFile)
|
||||
|
||||
tomlFileData, err := os.ReadFile(tomlFile)
|
||||
if err != nil {
|
||||
loadTomlFilesLogger.Error("Failed to read TOML file %q: %v", tomlFile, err)
|
||||
return nil, nil, fmt.Errorf("failed to read TOML file: %w", err)
|
||||
}
|
||||
loadTomlFilesLogger.Trace("Read %d bytes from TOML file %q", len(tomlFileData), tomlFile)
|
||||
newCommands, newVariables, err := LoadCommandsFromTomlFile(tomlFileData)
|
||||
if err != nil {
|
||||
loadTomlFilesLogger.Error("Failed to load commands from TOML file data for %q: %v", tomlFile, err)
|
||||
return nil, nil, fmt.Errorf("failed to load commands from TOML file: %w", err)
|
||||
}
|
||||
// Set source directory for each command
|
||||
sourceDir := filepath.Dir(tomlFile)
|
||||
for i := range newCommands {
|
||||
newCommands[i].SourceDir = sourceDir
|
||||
newCommands[i].SourceFile = tomlFile
|
||||
}
|
||||
commands = append(commands, newCommands...)
|
||||
for k, v := range newVariables {
|
||||
variables[k] = v
|
||||
}
|
||||
loadTomlFilesLogger.Debug("Added %d commands and %d variables from TOML file %q. Total commands now: %d", len(newCommands), len(newVariables), tomlFile, len(commands))
|
||||
}
|
||||
|
||||
loadTomlFilesLogger.Debug("Finished loading commands from TOML files. Total %d commands and %d variables", len(commands), len(variables))
|
||||
return commands, variables, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromTomlFile(tomlFileData []byte) ([]ModifyCommand, map[string]interface{}, error) {
|
||||
loadTomlCommandLogger := logger.WithPrefix("LoadCommandsFromTomlFile")
|
||||
loadTomlCommandLogger.Debug("Unmarshaling commands from TOML file data")
|
||||
loadTomlCommandLogger.Trace("TOML file data length: %d", len(tomlFileData))
|
||||
|
||||
// TOML structure for commands array and top-level variables
|
||||
var tomlData struct {
|
||||
Variables map[string]interface{} `toml:"variables,omitempty"`
|
||||
Commands []ModifyCommand `toml:"commands"`
|
||||
// Also support direct array without wrapper
|
||||
DirectCommands []ModifyCommand `toml:"-"`
|
||||
}
|
||||
|
||||
// First try to parse as wrapped structure
|
||||
err := toml.Unmarshal(tomlFileData, &tomlData)
|
||||
if err != nil {
|
||||
loadTomlCommandLogger.Error("Failed to unmarshal TOML file data: %v", err)
|
||||
return nil, nil, fmt.Errorf("failed to unmarshal TOML file: %w", err)
|
||||
}
|
||||
|
||||
var commands []ModifyCommand
|
||||
variables := make(map[string]interface{})
|
||||
|
||||
// Extract top-level variables
|
||||
if len(tomlData.Variables) > 0 {
|
||||
loadTomlCommandLogger.Debug("Found %d top-level variables", len(tomlData.Variables))
|
||||
for k, v := range tomlData.Variables {
|
||||
variables[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// Use commands from wrapped structure
|
||||
commands = tomlData.Commands
|
||||
loadTomlCommandLogger.Debug("Successfully unmarshaled %d commands and %d variables", len(commands), len(variables))
|
||||
loadTomlCommandLogger.Trace("Unmarshaled commands: %v", commands)
|
||||
loadTomlCommandLogger.Trace("Unmarshaled variables: %v", variables)
|
||||
return commands, variables, nil
|
||||
}
|
||||
|
||||
296
utils/modifycommand_helpers_test.go
Normal file
296
utils/modifycommand_helpers_test.go
Normal file
@@ -0,0 +1,296 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAggregateGlobsWithDuplicates(t *testing.T) {
|
||||
commands := []ModifyCommand{
|
||||
{Files: []string{"*.txt", "*.md"}},
|
||||
{Files: []string{"*.txt", "*.go"}}, // *.txt is duplicate
|
||||
{Files: []string{"test/**/*.xml"}},
|
||||
}
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
|
||||
// Should deduplicate
|
||||
assert.Equal(t, 4, len(globs))
|
||||
// AggregateGlobs resolves paths, which uses forward slashes internally
|
||||
assert.Contains(t, globs, ResolvePath("*.txt"))
|
||||
assert.Contains(t, globs, ResolvePath("*.md"))
|
||||
assert.Contains(t, globs, ResolvePath("*.go"))
|
||||
assert.Contains(t, globs, ResolvePath("test/**/*.xml"))
|
||||
}
|
||||
|
||||
func TestExpandGlobsWithActualFiles(t *testing.T) {
|
||||
// Create temp dir with test files
|
||||
tmpDir, err := os.MkdirTemp("", "glob-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create test files
|
||||
testFile1 := filepath.Join(tmpDir, "test1.txt")
|
||||
testFile2 := filepath.Join(tmpDir, "test2.txt")
|
||||
testFile3 := filepath.Join(tmpDir, "test.md")
|
||||
|
||||
os.WriteFile(testFile1, []byte("test"), 0644)
|
||||
os.WriteFile(testFile2, []byte("test"), 0644)
|
||||
os.WriteFile(testFile3, []byte("test"), 0644)
|
||||
|
||||
// Change to temp directory so glob pattern can find files
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// Test expanding globs using ResolvePath to normalize the pattern
|
||||
globs := map[string]struct{}{
|
||||
ResolvePath("*.txt"): {},
|
||||
}
|
||||
|
||||
files, err := ExpandGlobs(globs)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 2, len(files))
|
||||
}
|
||||
|
||||
func TestSplitPatternWithTilde(t *testing.T) {
|
||||
pattern := "~/test/*.txt"
|
||||
static, pat := SplitPattern(pattern)
|
||||
|
||||
// Should expand ~
|
||||
assert.NotEqual(t, "~", static)
|
||||
assert.Contains(t, pat, "*.txt")
|
||||
}
|
||||
|
||||
func TestLoadCommandsWithDisabled(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "disabled-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
yamlContent := `
|
||||
variables:
|
||||
test: "value"
|
||||
|
||||
commands:
|
||||
- name: "enabled_cmd"
|
||||
regex: "test"
|
||||
lua: "v1 * 2"
|
||||
files: ["*.txt"]
|
||||
- name: "disabled_cmd"
|
||||
regex: "test2"
|
||||
lua: "v1 * 3"
|
||||
files: ["*.txt"]
|
||||
disable: true
|
||||
`
|
||||
|
||||
yamlFile := filepath.Join(tmpDir, "test.yml")
|
||||
err = os.WriteFile(yamlFile, []byte(yamlContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Change to temp directory so LoadCommands can find the file with a simple pattern
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
commands, variables, err := LoadCommands([]string{"test.yml"})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Should only load enabled command
|
||||
assert.Equal(t, 1, len(commands))
|
||||
assert.Equal(t, "enabled_cmd", commands[0].Name)
|
||||
|
||||
// Should still load variables
|
||||
assert.Equal(t, 1, len(variables))
|
||||
}
|
||||
|
||||
func TestFilterCommandsByName(t *testing.T) {
|
||||
commands := []ModifyCommand{
|
||||
{Name: "test_multiply"},
|
||||
{Name: "test_divide"},
|
||||
{Name: "other_command"},
|
||||
{Name: "test_add"},
|
||||
}
|
||||
|
||||
// Filter by "test"
|
||||
filtered := FilterCommands(commands, "test")
|
||||
assert.Equal(t, 3, len(filtered))
|
||||
|
||||
// Filter by multiple
|
||||
filtered = FilterCommands(commands, "multiply,divide")
|
||||
assert.Equal(t, 2, len(filtered))
|
||||
}
|
||||
|
||||
func TestCountGlobsBeforeDedup(t *testing.T) {
|
||||
commands := []ModifyCommand{
|
||||
{Files: []string{"*.txt", "*.md", "*.go"}},
|
||||
{Files: []string{"*.xml"}},
|
||||
{Files: []string{"test/**/*.txt", "data/**/*.json"}},
|
||||
}
|
||||
|
||||
count := CountGlobsBeforeDedup(commands)
|
||||
assert.Equal(t, 6, count)
|
||||
}
|
||||
|
||||
func TestMatchesWithMemoization(t *testing.T) {
|
||||
path := "test/file.txt"
|
||||
glob := "**/*.txt"
|
||||
|
||||
// First call
|
||||
matches1, err1 := Matches(path, glob)
|
||||
assert.NoError(t, err1)
|
||||
assert.True(t, matches1)
|
||||
|
||||
// Second call should use memo
|
||||
matches2, err2 := Matches(path, glob)
|
||||
assert.NoError(t, err2)
|
||||
assert.Equal(t, matches1, matches2)
|
||||
}
|
||||
|
||||
func TestValidateCommand(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
cmd ModifyCommand
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Valid command",
|
||||
cmd: ModifyCommand{
|
||||
Regex: "test",
|
||||
Lua: "v1 * 2",
|
||||
Files: []string{"*.txt"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Valid JSON mode without regex",
|
||||
cmd: ModifyCommand{
|
||||
JSON: true,
|
||||
Lua: "data.value = data.value * 2; modified = true",
|
||||
Files: []string{"*.json"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Valid XML mode without regex",
|
||||
cmd: ModifyCommand{
|
||||
XML: true,
|
||||
Lua: `data.attr.version = "2" ; return true`,
|
||||
Files: []string{"*.xml"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Missing regex in non-structured mode",
|
||||
cmd: ModifyCommand{
|
||||
Lua: "v1 * 2",
|
||||
Files: []string{"*.txt"},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Missing Lua",
|
||||
cmd: ModifyCommand{
|
||||
Regex: "test",
|
||||
Files: []string{"*.txt"},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Missing files",
|
||||
cmd: ModifyCommand{
|
||||
Regex: "test",
|
||||
Lua: "v1 * 2",
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Invalid regex_pred",
|
||||
cmd: ModifyCommand{
|
||||
Regex: "test",
|
||||
RegexPred: "(",
|
||||
Lua: "v1 * 2",
|
||||
Files: []string{"*.txt"},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.cmd.Validate()
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadCommandsFromTOMLWithVariables(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "toml-vars-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
tomlContent := `[variables]
|
||||
multiplier = 3
|
||||
prefix = "PREFIX_"
|
||||
|
||||
[[commands]]
|
||||
name = "test_cmd"
|
||||
regex = "value = !num"
|
||||
lua = "v1 * multiplier"
|
||||
files = ["*.txt"]
|
||||
`
|
||||
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Change to temp directory so glob pattern can find the file
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
commands, variables, err := LoadCommandsFromTomlFiles("test.toml")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 1, len(commands))
|
||||
assert.Equal(t, 2, len(variables))
|
||||
assert.Equal(t, int64(3), variables["multiplier"])
|
||||
assert.Equal(t, "PREFIX_", variables["prefix"])
|
||||
}
|
||||
|
||||
func TestLoadCommandsWithTOMLExtension(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "toml-ext-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
tomlContent := `
|
||||
[variables]
|
||||
test_var = "value"
|
||||
|
||||
[[commands]]
|
||||
name = "TestCmd"
|
||||
regex = "test"
|
||||
lua = "return true"
|
||||
files = ["*.txt"]
|
||||
`
|
||||
tomlFile := filepath.Join(tmpDir, "test.toml")
|
||||
err = os.WriteFile(tomlFile, []byte(tomlContent), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
os.Chdir(tmpDir)
|
||||
|
||||
// This should trigger the .toml suffix check in LoadCommands
|
||||
commands, variables, err := LoadCommands([]string{"test.toml"})
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, commands, 1)
|
||||
assert.Equal(t, "TestCmd", commands[0].Name)
|
||||
assert.Len(t, variables, 1)
|
||||
assert.Equal(t, "value", variables["test_var"])
|
||||
}
|
||||
303
utils/modifycommand_path_resolution_test.go
Normal file
303
utils/modifycommand_path_resolution_test.go
Normal file
@@ -0,0 +1,303 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAggregateGlobsWithSourceDir(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "aggregate-globs-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create a subdirectory structure
|
||||
subDir := filepath.Join(tmpDir, "subdir")
|
||||
err = os.MkdirAll(subDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test files
|
||||
testFile := filepath.Join(subDir, "test.xml")
|
||||
err = os.WriteFile(testFile, []byte("<test/>"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test1",
|
||||
Files: []string{"subdir/*.xml"},
|
||||
SourceDir: tmpDir,
|
||||
},
|
||||
{
|
||||
Name: "test2",
|
||||
Files: []string{"*.txt"},
|
||||
SourceDir: tmpDir,
|
||||
},
|
||||
}
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
|
||||
// Both should be resolved relative to tmpDir
|
||||
expectedSubdir := ResolvePath(filepath.Join(tmpDir, "subdir/*.xml"))
|
||||
expectedTxt := ResolvePath(filepath.Join(tmpDir, "*.txt"))
|
||||
|
||||
assert.Contains(t, globs, expectedSubdir, "Should contain resolved subdir glob")
|
||||
assert.Contains(t, globs, expectedTxt, "Should contain resolved txt glob")
|
||||
assert.Len(t, globs, 2, "Should have 2 unique globs")
|
||||
}
|
||||
|
||||
func TestAggregateGlobsWithAbsolutePaths(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "aggregate-globs-abs-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
absPath := ResolvePath(tmpDir)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test1",
|
||||
Files: []string{absPath + "/*.xml"},
|
||||
SourceDir: tmpDir, // SourceDir should be ignored for absolute paths
|
||||
},
|
||||
}
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
|
||||
// Absolute path should be used as-is (after ResolvePath normalization)
|
||||
expected := ResolvePath(absPath + "/*.xml")
|
||||
assert.Contains(t, globs, expected, "Should contain absolute path glob")
|
||||
assert.Len(t, globs, 1, "Should have 1 glob")
|
||||
}
|
||||
|
||||
func TestAggregateGlobsWithoutSourceDir(t *testing.T) {
|
||||
cwd, err := os.Getwd()
|
||||
assert.NoError(t, err)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test1",
|
||||
Files: []string{"*.xml"},
|
||||
SourceDir: "", // No SourceDir
|
||||
},
|
||||
}
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
|
||||
// Without SourceDir, should resolve relative to CWD
|
||||
expected := ResolvePath(filepath.Join(cwd, "*.xml"))
|
||||
assert.Contains(t, globs, expected, "Should resolve relative to CWD when SourceDir is empty")
|
||||
}
|
||||
|
||||
func TestAggregateGlobsConsistentRegardlessOfCWD(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "aggregate-globs-cwd-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create test structure
|
||||
testFile := filepath.Join(tmpDir, "test.xml")
|
||||
err = os.WriteFile(testFile, []byte("<test/>"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test",
|
||||
Files: []string{"test.xml"},
|
||||
SourceDir: tmpDir,
|
||||
},
|
||||
}
|
||||
|
||||
// Get original CWD
|
||||
originalCwd, err := os.Getwd()
|
||||
assert.NoError(t, err)
|
||||
defer os.Chdir(originalCwd)
|
||||
|
||||
// Test from original directory
|
||||
globs1 := AggregateGlobs(commands)
|
||||
expected1 := ResolvePath(filepath.Join(tmpDir, "test.xml"))
|
||||
|
||||
// Change to tmpDir
|
||||
err = os.Chdir(tmpDir)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test from tmpDir - should produce same result
|
||||
globs2 := AggregateGlobs(commands)
|
||||
expected2 := ResolvePath(filepath.Join(tmpDir, "test.xml"))
|
||||
|
||||
// Both should resolve to the same absolute path
|
||||
assert.Equal(t, expected1, expected2, "Paths should be identical regardless of CWD")
|
||||
assert.Contains(t, globs1, expected1, "First run should contain expected path")
|
||||
assert.Contains(t, globs2, expected2, "Second run should contain expected path")
|
||||
assert.Equal(t, globs1, globs2, "Globs should be identical regardless of CWD")
|
||||
}
|
||||
|
||||
func TestAssociateFilesWithCommandsSourceDir(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "associate-files-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
// Create test structure
|
||||
subDir := filepath.Join(tmpDir, "data")
|
||||
err = os.MkdirAll(subDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
testFile := filepath.Join(subDir, "test.xml")
|
||||
err = os.WriteFile(testFile, []byte("<test/>"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test",
|
||||
Regex: "pattern",
|
||||
Lua: "expr",
|
||||
Files: []string{"data/test.xml"},
|
||||
SourceDir: tmpDir,
|
||||
},
|
||||
}
|
||||
|
||||
files := []string{testFile}
|
||||
associations, err := AssociateFilesWithCommands(files, commands)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// File should be associated with command
|
||||
assert.Contains(t, associations, testFile, "File should be in associations")
|
||||
association := associations[testFile]
|
||||
assert.Len(t, association.Commands, 1, "Should have 1 command")
|
||||
assert.Equal(t, "test", association.Commands[0].Name, "Command name should match")
|
||||
}
|
||||
|
||||
func TestAssociateFilesWithCommandsAbsolutePath(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "associate-files-abs-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.xml")
|
||||
err = os.WriteFile(testFile, []byte("<test/>"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
absPath := ResolvePath(testFile)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test",
|
||||
Regex: "pattern",
|
||||
Lua: "expr",
|
||||
Files: []string{absPath},
|
||||
SourceDir: tmpDir, // Should be ignored for absolute paths
|
||||
},
|
||||
}
|
||||
|
||||
files := []string{testFile}
|
||||
associations, err := AssociateFilesWithCommands(files, commands)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// File should be associated
|
||||
assert.Contains(t, associations, testFile, "File should be in associations")
|
||||
association := associations[testFile]
|
||||
assert.Len(t, association.Commands, 1, "Should have 1 command")
|
||||
}
|
||||
|
||||
func TestAssociateFilesWithCommandsNoSourceDir(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "associate-files-no-sourcedir-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
testFile := filepath.Join(tmpDir, "test.xml")
|
||||
err = os.WriteFile(testFile, []byte("<test/>"), 0644)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Use absolute path since we have no SourceDir
|
||||
absPath := ResolvePath(testFile)
|
||||
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test",
|
||||
Regex: "pattern",
|
||||
Lua: "expr",
|
||||
Files: []string{absPath},
|
||||
SourceDir: "", // No SourceDir
|
||||
},
|
||||
}
|
||||
|
||||
files := []string{testFile}
|
||||
associations, err := AssociateFilesWithCommands(files, commands)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// File should be associated
|
||||
assert.Contains(t, associations, testFile, "File should be in associations")
|
||||
association := associations[testFile]
|
||||
assert.Len(t, association.Commands, 1, "Should have 1 command")
|
||||
}
|
||||
|
||||
func TestAggregateGlobsResolvesPathsRelativeToSourceDir(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "sourcedir-condition-test-*")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
glob string
|
||||
sourceDir string
|
||||
shouldResolve bool
|
||||
}{
|
||||
{
|
||||
name: "Relative path with SourceDir",
|
||||
glob: "test.xml",
|
||||
sourceDir: tmpDir,
|
||||
shouldResolve: true,
|
||||
},
|
||||
{
|
||||
name: "Absolute path with SourceDir",
|
||||
glob: ResolvePath(filepath.Join(tmpDir, "test.xml")),
|
||||
sourceDir: tmpDir,
|
||||
shouldResolve: false, // Should use absolute path as-is
|
||||
},
|
||||
{
|
||||
name: "Relative path without SourceDir",
|
||||
glob: "test.xml",
|
||||
sourceDir: "",
|
||||
shouldResolve: false, // Should resolve relative to CWD
|
||||
},
|
||||
{
|
||||
name: "Absolute path without SourceDir",
|
||||
glob: ResolvePath(filepath.Join(tmpDir, "test.xml")),
|
||||
sourceDir: "",
|
||||
shouldResolve: false, // Should use absolute path as-is
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
commands := []ModifyCommand{
|
||||
{
|
||||
Name: "test",
|
||||
Files: []string{tt.glob},
|
||||
SourceDir: tt.sourceDir,
|
||||
},
|
||||
}
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
assert.Len(t, globs, 1, "Should have 1 glob")
|
||||
|
||||
var resolvedGlob string
|
||||
for g := range globs {
|
||||
resolvedGlob = g
|
||||
}
|
||||
|
||||
if tt.shouldResolve {
|
||||
// Should be resolved relative to SourceDir
|
||||
expected := ResolvePath(filepath.Join(tt.sourceDir, tt.glob))
|
||||
assert.Equal(t, expected, resolvedGlob, "Should resolve relative to SourceDir")
|
||||
} else if filepath.IsAbs(tt.glob) {
|
||||
// Absolute path should be normalized but not changed
|
||||
expected := ResolvePath(tt.glob)
|
||||
assert.Equal(t, expected, resolvedGlob, "Absolute path should be normalized")
|
||||
} else {
|
||||
// Relative path without SourceDir should resolve to CWD
|
||||
cwd, _ := os.Getwd()
|
||||
expected := ResolvePath(filepath.Join(cwd, tt.glob))
|
||||
assert.Equal(t, expected, resolvedGlob, "Should resolve relative to CWD")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestModifyCommandValidate(t *testing.T) {
|
||||
@@ -71,17 +72,12 @@ func TestModifyCommandValidate(t *testing.T) {
|
||||
err := tc.command.Validate()
|
||||
|
||||
if tc.shouldError {
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error for command %+v but got none", tc.command)
|
||||
}
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
// Check that default log level is set
|
||||
if tc.command.LogLevel == "" {
|
||||
t.Errorf("Default log level not set")
|
||||
t.Fatal("default log level not set")
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -89,12 +85,7 @@ func TestModifyCommandValidate(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
// Create a temporary directory structure for testing
|
||||
tmpDir, err := os.MkdirTemp("", "associate-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
// Create some test files
|
||||
testFiles := []string{
|
||||
@@ -104,10 +95,8 @@ func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create the directory structure
|
||||
err = os.MkdirAll(filepath.Join(tmpDir, "subdir"), 0755)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create subdirectories: %v", err)
|
||||
}
|
||||
err := os.MkdirAll(filepath.Join(tmpDir, "subdir"), 0755)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create the files
|
||||
for _, file := range testFiles {
|
||||
@@ -120,10 +109,12 @@ func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// Change to the temporary directory to use relative paths
|
||||
origDir, _ := os.Getwd()
|
||||
os.Chdir(tmpDir)
|
||||
defer os.Chdir(origDir)
|
||||
origDir, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, os.Chdir(tmpDir))
|
||||
defer func() {
|
||||
require.NoError(t, os.Chdir(origDir))
|
||||
}()
|
||||
|
||||
// Define commands with different globs
|
||||
commands := []ModifyCommand{
|
||||
@@ -152,27 +143,17 @@ func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
}
|
||||
|
||||
associations, err := AssociateFilesWithCommands(relFiles, commands)
|
||||
if err != nil {
|
||||
t.Fatalf("AssociateFilesWithCommands failed: %v", err)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
// The associations expected depends on the implementation
|
||||
// Let's check the actual associations and verify they make sense
|
||||
for file, cmds := range associations {
|
||||
t.Logf("File %s is associated with %d commands", file, len(cmds))
|
||||
for i, cmd := range cmds {
|
||||
t.Logf(" Command %d: Pattern=%s, Files=%v", i, cmd.Regex, cmd.Files)
|
||||
}
|
||||
|
||||
// Specific validation based on our file types
|
||||
for file, assoc := range associations {
|
||||
switch file {
|
||||
case "file1.xml":
|
||||
if len(cmds) < 1 {
|
||||
t.Errorf("Expected at least 1 command for file1.xml, got %d", len(cmds))
|
||||
if len(assoc.Commands) < 1 {
|
||||
t.Errorf("Expected at least 1 command for file1.xml, got %d", len(assoc.Commands))
|
||||
}
|
||||
// Verify at least one command with *.xml pattern
|
||||
hasXmlGlob := false
|
||||
for _, cmd := range cmds {
|
||||
for _, cmd := range assoc.Commands {
|
||||
for _, glob := range cmd.Files {
|
||||
if glob == "*.xml" {
|
||||
hasXmlGlob = true
|
||||
@@ -187,12 +168,12 @@ func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
t.Errorf("Expected command with *.xml glob for file1.xml")
|
||||
}
|
||||
case "file2.txt":
|
||||
if len(cmds) < 1 {
|
||||
t.Errorf("Expected at least 1 command for file2.txt, got %d", len(cmds))
|
||||
if len(assoc.Commands) < 1 {
|
||||
t.Errorf("Expected at least 1 command for file2.txt, got %d", len(assoc.Commands))
|
||||
}
|
||||
// Verify at least one command with *.txt pattern
|
||||
hasTxtGlob := false
|
||||
for _, cmd := range cmds {
|
||||
for _, cmd := range assoc.Commands {
|
||||
for _, glob := range cmd.Files {
|
||||
if glob == "*.txt" {
|
||||
hasTxtGlob = true
|
||||
@@ -207,12 +188,12 @@ func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
t.Errorf("Expected command with *.txt glob for file2.txt")
|
||||
}
|
||||
case "subdir/file3.xml":
|
||||
if len(cmds) < 1 {
|
||||
t.Errorf("Expected at least 1 command for subdir/file3.xml, got %d", len(cmds))
|
||||
if len(assoc.Commands) < 1 {
|
||||
t.Errorf("Expected at least 1 command for subdir/file3.xml, got %d", len(assoc.Commands))
|
||||
}
|
||||
// Should match both *.xml and subdir/* patterns
|
||||
matches := 0
|
||||
for _, cmd := range cmds {
|
||||
for _, cmd := range assoc.Commands {
|
||||
for _, glob := range cmd.Files {
|
||||
if glob == "*.xml" || glob == "subdir/*" {
|
||||
matches++
|
||||
@@ -220,9 +201,7 @@ func TestAssociateFilesWithCommands(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
if matches < 1 {
|
||||
t.Errorf("Expected subdir/file3.xml to match at least one pattern (*.xml or subdir/*)")
|
||||
}
|
||||
assert.GreaterOrEqual(t, matches, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -248,11 +227,19 @@ func TestAggregateGlobs(t *testing.T) {
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
|
||||
// Now we properly resolve only the static part of globs
|
||||
// *.xml has no static part (current dir), so it becomes resolved_dir/*.xml
|
||||
// *.txt has no static part (current dir), so it becomes resolved_dir/*.txt
|
||||
// *.json has no static part (current dir), so it becomes resolved_dir/*.json
|
||||
// subdir/*.xml has static "subdir", so it becomes resolved_dir/subdir/*.xml
|
||||
cwd, _ := os.Getwd()
|
||||
resolvedCwd := ResolvePath(cwd)
|
||||
|
||||
expected := map[string]struct{}{
|
||||
"*.xml": {},
|
||||
"*.txt": {},
|
||||
"*.json": {},
|
||||
"subdir/*.xml": {},
|
||||
resolvedCwd + "/*.xml": {},
|
||||
resolvedCwd + "/*.txt": {},
|
||||
resolvedCwd + "/*.json": {},
|
||||
resolvedCwd + "/subdir/*.xml": {},
|
||||
}
|
||||
|
||||
if len(globs) != len(expected) {
|
||||
@@ -266,142 +253,21 @@ func TestAggregateGlobs(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadCommandFromArgs(t *testing.T) {
|
||||
// Save original flags
|
||||
origGitFlag := *GitFlag
|
||||
origResetFlag := *ResetFlag
|
||||
origLogLevel := *LogLevel
|
||||
|
||||
// Restore original flags after test
|
||||
defer func() {
|
||||
*GitFlag = origGitFlag
|
||||
*ResetFlag = origResetFlag
|
||||
*LogLevel = origLogLevel
|
||||
}()
|
||||
|
||||
// Test cases
|
||||
tests := []struct {
|
||||
name string
|
||||
args []string
|
||||
gitFlag bool
|
||||
resetFlag bool
|
||||
logLevel string
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "Valid command",
|
||||
args: []string{"pattern", "expr", "file1", "file2"},
|
||||
gitFlag: false,
|
||||
resetFlag: false,
|
||||
logLevel: "INFO",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Not enough args",
|
||||
args: []string{"pattern", "expr"},
|
||||
gitFlag: false,
|
||||
resetFlag: false,
|
||||
logLevel: "INFO",
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "With git flag",
|
||||
args: []string{"pattern", "expr", "file1"},
|
||||
gitFlag: true,
|
||||
resetFlag: false,
|
||||
logLevel: "INFO",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "With reset flag (forces git flag)",
|
||||
args: []string{"pattern", "expr", "file1"},
|
||||
gitFlag: false,
|
||||
resetFlag: true,
|
||||
logLevel: "INFO",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "With custom log level",
|
||||
args: []string{"pattern", "expr", "file1"},
|
||||
gitFlag: false,
|
||||
resetFlag: false,
|
||||
logLevel: "DEBUG",
|
||||
shouldError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Set flags for this test case
|
||||
*GitFlag = tc.gitFlag
|
||||
*ResetFlag = tc.resetFlag
|
||||
*LogLevel = tc.logLevel
|
||||
|
||||
commands, err := LoadCommandFromArgs(tc.args)
|
||||
|
||||
if tc.shouldError {
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error but got none")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if len(commands) != 1 {
|
||||
t.Errorf("Expected 1 command, got %d", len(commands))
|
||||
return
|
||||
}
|
||||
|
||||
cmd := commands[0]
|
||||
|
||||
// Check command properties
|
||||
if cmd.Regex != tc.args[0] {
|
||||
t.Errorf("Expected pattern %q, got %q", tc.args[0], cmd.Regex)
|
||||
}
|
||||
|
||||
if cmd.Lua != tc.args[1] {
|
||||
t.Errorf("Expected LuaExpr %q, got %q", tc.args[1], cmd.Lua)
|
||||
}
|
||||
|
||||
if len(cmd.Files) != len(tc.args)-2 {
|
||||
t.Errorf("Expected %d files, got %d", len(tc.args)-2, len(cmd.Files))
|
||||
}
|
||||
|
||||
// When reset is true, git should be true regardless of what was set
|
||||
expectedGit := tc.gitFlag || tc.resetFlag
|
||||
if cmd.Git != expectedGit {
|
||||
t.Errorf("Expected Git flag %v, got %v", expectedGit, cmd.Git)
|
||||
}
|
||||
|
||||
if cmd.Reset != tc.resetFlag {
|
||||
t.Errorf("Expected Reset flag %v, got %v", tc.resetFlag, cmd.Reset)
|
||||
}
|
||||
|
||||
if cmd.LogLevel != tc.logLevel {
|
||||
t.Errorf("Expected LogLevel %q, got %q", tc.logLevel, cmd.LogLevel)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Successfully unmarshal valid YAML data into ModifyCommand slice
|
||||
func TestLoadCommandsFromCookFileSuccess(t *testing.T) {
|
||||
// Arrange
|
||||
yamlData := []byte(`
|
||||
- name: command1
|
||||
pattern: "*.txt"
|
||||
lua: replace
|
||||
- name: command2
|
||||
pattern: "*.go"
|
||||
lua: delete
|
||||
commands:
|
||||
- name: command1
|
||||
regex: "*.txt"
|
||||
lua: replace
|
||||
- name: command2
|
||||
regex: "*.go"
|
||||
lua: delete
|
||||
`)
|
||||
|
||||
// Act
|
||||
commands, err := LoadCommandsFromCookFile(yamlData)
|
||||
commands, _, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
@@ -419,17 +285,18 @@ func TestLoadCommandsFromCookFileWithComments(t *testing.T) {
|
||||
// Arrange
|
||||
yamlData := []byte(`
|
||||
# This is a comment
|
||||
- name: command1
|
||||
pattern: "*.txt"
|
||||
lua: replace
|
||||
# Another comment
|
||||
- name: command2
|
||||
pattern: "*.go"
|
||||
lua: delete
|
||||
commands:
|
||||
- name: command1
|
||||
regex: "*.txt"
|
||||
lua: replace
|
||||
# Another comment
|
||||
- name: command2
|
||||
regex: "*.go"
|
||||
lua: delete
|
||||
`)
|
||||
|
||||
// Act
|
||||
commands, err := LoadCommandsFromCookFile(yamlData)
|
||||
commands, _, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
@@ -445,10 +312,10 @@ func TestLoadCommandsFromCookFileWithComments(t *testing.T) {
|
||||
// Handle different YAML formatting styles (flow vs block)
|
||||
func TestLoadCommandsFromCookFileWithFlowStyle(t *testing.T) {
|
||||
// Arrange
|
||||
yamlData := []byte(`[ { name: command1, pattern: "*.txt", lua: replace }, { name: command2, pattern: "*.go", lua: delete } ]`)
|
||||
yamlData := []byte(`commands: [ { name: command1, regex: "*.txt", lua: replace }, { name: command2, regex: "*.go", lua: delete } ]`)
|
||||
|
||||
// Act
|
||||
commands, err := LoadCommandsFromCookFile(yamlData)
|
||||
commands, _, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
@@ -461,6 +328,32 @@ func TestLoadCommandsFromCookFileWithFlowStyle(t *testing.T) {
|
||||
assert.Equal(t, "delete", commands[1].Lua)
|
||||
}
|
||||
|
||||
func TestLoadCommandsFromCookFileRootSequence(t *testing.T) {
|
||||
// Arrange
|
||||
yamlData := []byte(`
|
||||
- name: command1
|
||||
regex: "*.txt"
|
||||
lua: replace
|
||||
- name: command2
|
||||
regex: "*.go"
|
||||
lua: delete
|
||||
`)
|
||||
|
||||
// Act
|
||||
commands, variables, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, commands, 2)
|
||||
assert.Nil(t, variables)
|
||||
assert.Equal(t, "command1", commands[0].Name)
|
||||
assert.Equal(t, "*.txt", commands[0].Regex)
|
||||
assert.Equal(t, "replace", commands[0].Lua)
|
||||
assert.Equal(t, "command2", commands[1].Name)
|
||||
assert.Equal(t, "*.go", commands[1].Regex)
|
||||
assert.Equal(t, "delete", commands[1].Lua)
|
||||
}
|
||||
|
||||
// Handle nil or empty cookFileData (should return error)
|
||||
func TestLoadCommandsFromCookFileNilOrEmptyData(t *testing.T) {
|
||||
// Arrange
|
||||
@@ -468,8 +361,8 @@ func TestLoadCommandsFromCookFileNilOrEmptyData(t *testing.T) {
|
||||
emptyData := []byte{}
|
||||
|
||||
// Act
|
||||
commandsNil, errNil := LoadCommandsFromCookFile(nilData)
|
||||
commandsEmpty, errEmpty := LoadCommandsFromCookFile(emptyData)
|
||||
commandsNil, _, errNil := LoadCommandsFromCookFile(nilData)
|
||||
commandsEmpty, _, errEmpty := LoadCommandsFromCookFile(emptyData)
|
||||
|
||||
// Assert
|
||||
assert.Nil(t, errNil)
|
||||
@@ -484,7 +377,7 @@ func TestLoadCommandsFromCookFileEmptyData(t *testing.T) {
|
||||
yamlData := []byte(``)
|
||||
|
||||
// Act
|
||||
commands, err := LoadCommandsFromCookFile(yamlData)
|
||||
commands, _, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
@@ -495,19 +388,20 @@ func TestLoadCommandsFromCookFileEmptyData(t *testing.T) {
|
||||
func TestLoadCommandsFromCookFileWithMultipleEntries(t *testing.T) {
|
||||
// Arrange
|
||||
yamlData := []byte(`
|
||||
- name: command1
|
||||
pattern: "*.txt"
|
||||
lua: replace
|
||||
- name: command2
|
||||
pattern: "*.go"
|
||||
lua: delete
|
||||
- name: command3
|
||||
pattern: "*.md"
|
||||
lua: append
|
||||
commands:
|
||||
- name: command1
|
||||
regex: "*.txt"
|
||||
lua: replace
|
||||
- name: command2
|
||||
regex: "*.go"
|
||||
lua: delete
|
||||
- name: command3
|
||||
regex: "*.md"
|
||||
lua: append
|
||||
`)
|
||||
|
||||
// Act
|
||||
commands, err := LoadCommandsFromCookFile(yamlData)
|
||||
commands, _, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
@@ -523,29 +417,30 @@ func TestLoadCommandsFromCookFileWithMultipleEntries(t *testing.T) {
|
||||
assert.Equal(t, "append", commands[2].Lua)
|
||||
}
|
||||
|
||||
func TestLoadCommandsFromCookFileLegitExample(t *testing.T) {
|
||||
func TestLoadCommandsFromCookFileValidExample(t *testing.T) {
|
||||
// Arrange
|
||||
yamlData := []byte(`
|
||||
- name: crewlayabout
|
||||
pattern: '<Talent identifier="crewlayabout">!anyvalue="(?<repairspeedpenalty>!num)"!anyvalue="(?<skillpenalty>!num)"!anyvalue="(?<repairspeedbonus>!num)"!anyvalue="(?<skillbonus>!num)"!anydistance="(?<distance>!num)"!anySkillBonus!anyvalue="(?<skillpenaltyv>!num)"!anyvalue="(?<skillpenaltyv1>!num)"!anyvalue="(?<skillpenaltyv2>!num)"!anyvalue="(?<skillpenaltyv3>!num)"!anyvalue="(?<skillpenaltyv4>!num)"!anyvalue="(?<repairspeedpenaltyv>!num)'
|
||||
lua: |
|
||||
repairspeedpenalty=round(repairspeedpenalty/2, 2)
|
||||
skillpenalty=round(skillpenalty/2, 0)
|
||||
repairspeedbonus=round(repairspeedbonus*2, 2)
|
||||
skillbonus=round(skillbonus*2, 0)
|
||||
distance=round(distance*2, 0)
|
||||
skillpenaltyv=skillpenalty
|
||||
skillpenaltyv1=skillpenalty
|
||||
skillpenaltyv2=skillpenalty
|
||||
skillpenaltyv3=skillpenalty
|
||||
skillpenaltyv4=skillpenalty
|
||||
repairspeedpenaltyv=round(-repairspeedpenalty/100, 2)
|
||||
files:
|
||||
- '**/TalentsAssistant.xml'
|
||||
commands:
|
||||
- name: crewlayabout
|
||||
pattern: '<Talent identifier="crewlayabout">!anyvalue="(?<repairspeedpenalty>!num)"!anyvalue="(?<skillpenalty>!num)"!anyvalue="(?<repairspeedbonus>!num)"!anyvalue="(?<skillbonus>!num)"!anydistance="(?<distance>!num)"!anySkillBonus!anyvalue="(?<skillpenaltyv>!num)"!anyvalue="(?<skillpenaltyv1>!num)"!anyvalue="(?<skillpenaltyv2>!num)"!anyvalue="(?<skillpenaltyv3>!num)"!anyvalue="(?<skillpenaltyv4>!num)"!anyvalue="(?<repairspeedpenaltyv>!num)'
|
||||
lua: |
|
||||
repairspeedpenalty=round(repairspeedpenalty/2, 2)
|
||||
skillpenalty=round(skillpenalty/2, 0)
|
||||
repairspeedbonus=round(repairspeedbonus*2, 2)
|
||||
skillbonus=round(skillbonus*2, 0)
|
||||
distance=round(distance*2, 0)
|
||||
skillpenaltyv=skillpenalty
|
||||
skillpenaltyv1=skillpenalty
|
||||
skillpenaltyv2=skillpenalty
|
||||
skillpenaltyv3=skillpenalty
|
||||
skillpenaltyv4=skillpenalty
|
||||
repairspeedpenaltyv=round(-repairspeedpenalty/100, 2)
|
||||
files:
|
||||
- '**/TalentsAssistant.xml'
|
||||
`)
|
||||
|
||||
// Act
|
||||
commands, err := LoadCommandsFromCookFile(yamlData)
|
||||
commands, _, err := LoadCommandsFromCookFile(yamlData)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
@@ -553,157 +448,6 @@ func TestLoadCommandsFromCookFileLegitExample(t *testing.T) {
|
||||
assert.Equal(t, "crewlayabout", commands[0].Name)
|
||||
}
|
||||
|
||||
// Valid command with minimum 3 arguments returns a ModifyCommand slice with correct values
|
||||
func TestLoadCommandFromArgsWithValidArguments(t *testing.T) {
|
||||
// Setup
|
||||
oldGitFlag := GitFlag
|
||||
oldResetFlag := ResetFlag
|
||||
oldLogLevel := LogLevel
|
||||
|
||||
gitValue := true
|
||||
resetValue := false
|
||||
logLevelValue := "info"
|
||||
|
||||
GitFlag = &gitValue
|
||||
ResetFlag = &resetValue
|
||||
LogLevel = &logLevelValue
|
||||
|
||||
defer func() {
|
||||
GitFlag = oldGitFlag
|
||||
ResetFlag = oldResetFlag
|
||||
LogLevel = oldLogLevel
|
||||
}()
|
||||
|
||||
args := []string{"*.go", "return x", "file1.go", "file2.go"}
|
||||
|
||||
// Execute
|
||||
commands, err := LoadCommandFromArgs(args)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, commands, 1)
|
||||
assert.Equal(t, "*.go", commands[0].Regex)
|
||||
assert.Equal(t, "return x", commands[0].Lua)
|
||||
assert.Equal(t, []string{"file1.go", "file2.go"}, commands[0].Files)
|
||||
assert.Equal(t, true, commands[0].Git)
|
||||
assert.Equal(t, false, commands[0].Reset)
|
||||
assert.Equal(t, "info", commands[0].LogLevel)
|
||||
}
|
||||
|
||||
// Less than 3 arguments returns an error with appropriate message
|
||||
func TestLoadCommandFromArgsWithInsufficientArguments(t *testing.T) {
|
||||
// Setup
|
||||
oldGitFlag := GitFlag
|
||||
oldResetFlag := ResetFlag
|
||||
oldLogLevel := LogLevel
|
||||
|
||||
gitValue := false
|
||||
resetValue := false
|
||||
logLevelValue := "info"
|
||||
|
||||
GitFlag = &gitValue
|
||||
ResetFlag = &resetValue
|
||||
LogLevel = &logLevelValue
|
||||
|
||||
defer func() {
|
||||
GitFlag = oldGitFlag
|
||||
ResetFlag = oldResetFlag
|
||||
LogLevel = oldLogLevel
|
||||
}()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
args []string
|
||||
}{
|
||||
{"empty args", []string{}},
|
||||
{"one arg", []string{"*.go"}},
|
||||
{"two args", []string{"*.go", "return x"}},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Execute
|
||||
commands, err := LoadCommandFromArgs(tc.args)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, commands)
|
||||
assert.Contains(t, err.Error(), "at least 3 arguments are required")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern, Lua, and Files fields are correctly populated from args
|
||||
func TestLoadCommandFromArgsPopulatesFieldsCorrectly(t *testing.T) {
|
||||
// Setup
|
||||
oldGitFlag := GitFlag
|
||||
oldResetFlag := ResetFlag
|
||||
oldLogLevel := LogLevel
|
||||
|
||||
gitValue := false
|
||||
resetValue := false
|
||||
logLevelValue := "debug"
|
||||
|
||||
GitFlag = &gitValue
|
||||
ResetFlag = &resetValue
|
||||
LogLevel = &logLevelValue
|
||||
|
||||
defer func() {
|
||||
GitFlag = oldGitFlag
|
||||
ResetFlag = oldResetFlag
|
||||
LogLevel = oldLogLevel
|
||||
}()
|
||||
|
||||
args := []string{"*.txt", "print('Hello')", "file1.txt", "file2.txt"}
|
||||
|
||||
// Execute
|
||||
commands, err := LoadCommandFromArgs(args)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, commands, 1)
|
||||
assert.Equal(t, "*.txt", commands[0].Regex)
|
||||
assert.Equal(t, "print('Hello')", commands[0].Lua)
|
||||
assert.Equal(t, []string{"file1.txt", "file2.txt"}, commands[0].Files)
|
||||
assert.Equal(t, false, commands[0].Git)
|
||||
assert.Equal(t, false, commands[0].Reset)
|
||||
assert.Equal(t, "debug", commands[0].LogLevel)
|
||||
}
|
||||
|
||||
// Git flag is set to true when ResetFlag is true
|
||||
func TestLoadCommandFromArgsSetsGitFlagWhenResetFlagIsTrue(t *testing.T) {
|
||||
// Setup
|
||||
oldGitFlag := GitFlag
|
||||
oldResetFlag := ResetFlag
|
||||
oldLogLevel := LogLevel
|
||||
|
||||
gitValue := false
|
||||
resetValue := true
|
||||
logLevelValue := "info"
|
||||
|
||||
GitFlag = &gitValue
|
||||
ResetFlag = &resetValue
|
||||
LogLevel = &logLevelValue
|
||||
|
||||
defer func() {
|
||||
GitFlag = oldGitFlag
|
||||
ResetFlag = oldResetFlag
|
||||
LogLevel = oldLogLevel
|
||||
}()
|
||||
|
||||
args := []string{"*.go", "return x", "file1.go", "file2.go"}
|
||||
|
||||
// Execute
|
||||
commands, err := LoadCommandFromArgs(args)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, commands, 1)
|
||||
assert.Equal(t, true, commands[0].Git)
|
||||
}
|
||||
|
||||
// TODO: Figure out how to mock shit
|
||||
// Can't be asked doing that right now...
|
||||
// Successfully loads commands from multiple YAML files in the current directory
|
||||
// func TestLoadCommandsFromCookFilesSuccessfully(t *testing.T) {
|
||||
// // Setup test directory with mock YAML files
|
||||
@@ -803,7 +547,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
}
|
||||
|
||||
// Execute function
|
||||
commands, err := LoadCommandsFromCookFiles("")
|
||||
commands, _, err := LoadCommandsFromCookFiles("")
|
||||
|
||||
// Assertions
|
||||
if err != nil {
|
||||
@@ -869,7 +613,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
//
|
||||
// // Execute function
|
||||
// commands, err := LoadCommandsFromCookFiles("")
|
||||
// commands, _, err := LoadCommandsFromCookFiles("")
|
||||
//
|
||||
// // Assertions
|
||||
// if err != nil {
|
||||
@@ -953,7 +697,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
//
|
||||
// // Execute function
|
||||
// commands, err := LoadCommandsFromCookFiles("")
|
||||
// commands, _, err := LoadCommandsFromCookFiles("")
|
||||
//
|
||||
// // Assertions
|
||||
// if err != nil {
|
||||
@@ -965,6 +709,58 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
// }
|
||||
|
||||
func TestExpandGlobsMemoization(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "expand-globs-memo-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
err = os.WriteFile(filepath.Join(tmpDir, "test1.go"), []byte("test"), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
err = os.WriteFile(filepath.Join(tmpDir, "test2.go"), []byte("test"), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
origDir, _ := os.Getwd()
|
||||
os.Chdir(tmpDir)
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
cwd, _ := os.Getwd()
|
||||
resolvedCwd := ResolvePath(cwd)
|
||||
pattern1 := resolvedCwd + "/*.go"
|
||||
patterns := map[string]struct{}{pattern1: {}}
|
||||
|
||||
globMemoTable = make(map[string][]string)
|
||||
|
||||
files1, err := ExpandGlobs(patterns)
|
||||
if err != nil {
|
||||
t.Fatalf("ExpandGlobs failed: %v", err)
|
||||
}
|
||||
if len(files1) != 2 {
|
||||
t.Fatalf("Expected 2 files, got %d", len(files1))
|
||||
}
|
||||
|
||||
if len(globMemoTable) != 1 {
|
||||
t.Fatalf("Expected 1 entry in memo table, got %d", len(globMemoTable))
|
||||
}
|
||||
|
||||
files2, err := ExpandGlobs(patterns)
|
||||
if err != nil {
|
||||
t.Fatalf("ExpandGlobs failed: %v", err)
|
||||
}
|
||||
if len(files2) != 2 {
|
||||
t.Fatalf("Expected 2 files, got %d", len(files2))
|
||||
}
|
||||
|
||||
if len(globMemoTable) != 1 {
|
||||
t.Fatalf("Expected memo table to still have 1 entry, got %d", len(globMemoTable))
|
||||
}
|
||||
}
|
||||
|
||||
// LoadCommandsFromCookFile returns an error for a malformed YAML file
|
||||
// func TestLoadCommandsFromCookFilesMalformedYAML(t *testing.T) {
|
||||
// // Setup test directory with mock YAML files
|
||||
@@ -1060,7 +856,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
//
|
||||
// // Execute function
|
||||
// commands, err := LoadCommandsFromCookFiles("")
|
||||
// commands, _, err := LoadCommandsFromCookFiles("")
|
||||
//
|
||||
// // Assertions
|
||||
// if err == nil {
|
||||
@@ -1127,7 +923,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
//
|
||||
// // Execute function
|
||||
// commands, err := LoadCommandsFromCookFiles("")
|
||||
// commands, _, err := LoadCommandsFromCookFiles("")
|
||||
//
|
||||
// // Assertions
|
||||
// if err != nil {
|
||||
@@ -1197,7 +993,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
//
|
||||
// // Execute function
|
||||
// commands, err := LoadCommandsFromCookFiles("")
|
||||
// commands, _, err := LoadCommandsFromCookFiles("")
|
||||
//
|
||||
// // Assertions
|
||||
// if err != nil {
|
||||
@@ -1255,7 +1051,7 @@ func TestLoadCommandsFromCookFilesNoYamlFiles(t *testing.T) {
|
||||
// }
|
||||
//
|
||||
// // Execute function
|
||||
// commands, err := LoadCommandsFromCookFiles("")
|
||||
// commands, _, err := LoadCommandsFromCookFiles("")
|
||||
//
|
||||
// // Assertions
|
||||
// if err != nil {
|
||||
|
||||
76
utils/path.go
Normal file
76
utils/path.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// ResolvePath resolves a path to an absolute path, handling ~ expansion and cleaning
|
||||
func ResolvePath(path string) string {
|
||||
resolvelogger := logger.WithPrefix("ResolvePath").WithField("inputPath", path)
|
||||
resolvelogger.Trace("Resolving path: %q", path)
|
||||
|
||||
// Handle empty path
|
||||
if path == "" {
|
||||
resolvelogger.Trace("Empty path, returning empty string")
|
||||
return ""
|
||||
}
|
||||
|
||||
// Check if path is absolute
|
||||
if filepath.IsAbs(path) {
|
||||
resolvelogger.Trace("Path is already absolute: %q", path)
|
||||
cleaned := filepath.ToSlash(filepath.Clean(path))
|
||||
resolvelogger.Trace("Cleaned absolute path: %q", cleaned)
|
||||
return cleaned
|
||||
}
|
||||
|
||||
// Handle ~ expansion
|
||||
if strings.HasPrefix(path, "~") {
|
||||
homeDir, _ := os.UserHomeDir()
|
||||
if strings.HasPrefix(path, "~/") || strings.HasPrefix(path, "~\\") {
|
||||
path = filepath.Join(homeDir, path[2:])
|
||||
} else if path == "~" {
|
||||
path = homeDir
|
||||
} else {
|
||||
// ~something (like ~~), treat first ~ as home expansion, rest as literal
|
||||
path = homeDir + path[1:]
|
||||
}
|
||||
resolvelogger.Trace("Expanded ~ to home directory: %q", path)
|
||||
}
|
||||
|
||||
// Make absolute if not already
|
||||
if !filepath.IsAbs(path) {
|
||||
absPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
resolvelogger.Error("Failed to get absolute path: %v", err)
|
||||
return filepath.ToSlash(filepath.Clean(path))
|
||||
}
|
||||
resolvelogger.Trace("Made path absolute: %q -> %q", path, absPath)
|
||||
path = absPath
|
||||
}
|
||||
|
||||
// Clean the path and normalize to forward slashes for consistency
|
||||
cleaned := filepath.ToSlash(filepath.Clean(path))
|
||||
resolvelogger.Trace("Final cleaned path: %q", cleaned)
|
||||
return cleaned
|
||||
}
|
||||
|
||||
// GetRelativePath returns the relative path from base to target
|
||||
func GetRelativePath(base, target string) (string, error) {
|
||||
getRelativelogger := logger.WithPrefix("GetRelativePath")
|
||||
getRelativelogger.Debug("Getting relative path from %q to %q", base, target)
|
||||
|
||||
relPath, err := filepath.Rel(base, target)
|
||||
if err != nil {
|
||||
getRelativelogger.Error("Failed to get relative path: %v", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Use forward slashes for consistency
|
||||
relPath = filepath.ToSlash(relPath)
|
||||
getRelativelogger.Debug("Relative path: %q", relPath)
|
||||
return relPath, nil
|
||||
}
|
||||
386
utils/path_test.go
Normal file
386
utils/path_test.go
Normal file
@@ -0,0 +1,386 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestResolvePath(t *testing.T) {
|
||||
// Save original working directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "path_test")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string
|
||||
setup func() // Optional setup function
|
||||
}{
|
||||
{
|
||||
name: "Empty path",
|
||||
input: "",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "Already absolute path",
|
||||
input: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/absolute/path/file.txt"
|
||||
}
|
||||
return "/absolute/path/file.txt"
|
||||
}(),
|
||||
expected: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/absolute/path/file.txt"
|
||||
}
|
||||
return "/absolute/path/file.txt"
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Relative path",
|
||||
input: "relative/file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("relative/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Tilde expansion - home only",
|
||||
input: "~",
|
||||
expected: func() string {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
return strings.ReplaceAll(filepath.Clean(home), "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Tilde expansion - with subpath",
|
||||
input: "~/Documents/file.txt",
|
||||
expected: func() string {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
expected := filepath.Join(home, "Documents", "file.txt")
|
||||
return strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Path normalization - double slashes",
|
||||
input: "path//to//file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("path/to/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Path normalization - . and ..",
|
||||
input: "path/./to/../file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("path/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Windows backslash normalization",
|
||||
input: "path\\to\\file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("path/to/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Mixed separators with tilde",
|
||||
input: "~/Documents\\file.txt",
|
||||
expected: func() string {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
expected := filepath.Join(home, "Documents", "file.txt")
|
||||
return strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Relative path from current directory",
|
||||
input: "./file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.setup != nil {
|
||||
tt.setup()
|
||||
}
|
||||
|
||||
result := ResolvePath(tt.input)
|
||||
assert.Equal(t, tt.expected, result, "ResolvePath(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePathWithWorkingDirectoryChange(t *testing.T) {
|
||||
// Save original working directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
// Create temporary directories
|
||||
tmpDir, err := os.MkdirTemp("", "path_test")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
subDir := filepath.Join(tmpDir, "subdir")
|
||||
err = os.MkdirAll(subDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Change to subdirectory
|
||||
err = os.Chdir(subDir)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test relative path resolution from new working directory
|
||||
result := ResolvePath("../test.txt")
|
||||
expected := filepath.Join(tmpDir, "test.txt")
|
||||
expected = strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
|
||||
assert.Equal(t, expected, result)
|
||||
}
|
||||
|
||||
func TestResolvePathComplexTilde(t *testing.T) {
|
||||
// Test complex tilde patterns
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
|
||||
if home == "" {
|
||||
t.Skip("Cannot determine home directory for tilde expansion tests")
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
input: "~",
|
||||
expected: strings.ReplaceAll(filepath.Clean(home), "\\", "/"),
|
||||
},
|
||||
{
|
||||
input: "~/",
|
||||
expected: strings.ReplaceAll(filepath.Clean(home), "\\", "/"),
|
||||
},
|
||||
{
|
||||
input: "~~",
|
||||
expected: func() string {
|
||||
// ~~ should be treated as ~ followed by ~ (tilde expansion)
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
if home != "" {
|
||||
// First ~ gets expanded, second ~ remains
|
||||
return strings.ReplaceAll(filepath.Clean(home+"~"), "\\", "/")
|
||||
}
|
||||
abs, _ := filepath.Abs("~~")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
input: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/not/tilde/path"
|
||||
}
|
||||
return "/not/tilde/path"
|
||||
}(),
|
||||
expected: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/not/tilde/path"
|
||||
}
|
||||
return "/not/tilde/path"
|
||||
}(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run("Complex tilde: "+tt.input, func(t *testing.T) {
|
||||
result := ResolvePath(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func TestGetRelativePath(t *testing.T) {
|
||||
// Create temporary directories for testing
|
||||
tmpDir, err := os.MkdirTemp("", "relative_path_test")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
baseDir := filepath.Join(tmpDir, "base")
|
||||
targetDir := filepath.Join(tmpDir, "target")
|
||||
subDir := filepath.Join(targetDir, "subdir")
|
||||
|
||||
err = os.MkdirAll(baseDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
err = os.MkdirAll(subDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
base string
|
||||
target string
|
||||
expected string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Target is subdirectory of base",
|
||||
base: baseDir,
|
||||
target: filepath.Join(baseDir, "subdir"),
|
||||
expected: "subdir",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Target is parent of base",
|
||||
base: filepath.Join(baseDir, "subdir"),
|
||||
target: baseDir,
|
||||
expected: "..",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Target is sibling directory",
|
||||
base: baseDir,
|
||||
target: targetDir,
|
||||
expected: "../target",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Same directory",
|
||||
base: baseDir,
|
||||
target: baseDir,
|
||||
expected: ".",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "With tilde expansion",
|
||||
base: baseDir,
|
||||
target: filepath.Join(baseDir, "file.txt"),
|
||||
expected: "file.txt",
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := GetRelativePath(tt.base, tt.target)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePathRegression(t *testing.T) {
|
||||
// This test specifically addresses the original bug:
|
||||
// "~ is NOT BEING FUCKING RESOLVED"
|
||||
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
|
||||
if home == "" {
|
||||
t.Skip("Cannot determine home directory for regression test")
|
||||
}
|
||||
|
||||
// Test the exact pattern from the bug report
|
||||
testPath := "~/Seafile/activitywatch/sync.yml"
|
||||
result := ResolvePath(testPath)
|
||||
expected := filepath.Join(home, "Seafile", "activitywatch", "sync.yml")
|
||||
expected = strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
|
||||
assert.Equal(t, expected, result, "Tilde expansion bug not fixed!")
|
||||
assert.NotContains(t, result, "~", "Tilde still present in resolved path!")
|
||||
// Convert both to forward slashes for comparison
|
||||
homeForwardSlash := strings.ReplaceAll(home, "\\", "/")
|
||||
assert.Contains(t, result, homeForwardSlash, "Home directory not found in resolved path!")
|
||||
}
|
||||
|
||||
func TestResolvePathEdgeCases(t *testing.T) {
|
||||
// Save original working directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
setup func()
|
||||
shouldPanic bool
|
||||
}{
|
||||
{
|
||||
name: "Just dot",
|
||||
input: ".",
|
||||
},
|
||||
{
|
||||
name: "Just double dot",
|
||||
input: "..",
|
||||
},
|
||||
{
|
||||
name: "Triple dot",
|
||||
input: "...",
|
||||
},
|
||||
{
|
||||
name: "Multiple leading dots",
|
||||
input: "./.././../file.txt",
|
||||
},
|
||||
{
|
||||
name: "Path with spaces",
|
||||
input: "path with spaces/file.txt",
|
||||
},
|
||||
{
|
||||
name: "Very long relative path",
|
||||
input: strings.Repeat("../", 10) + "file.txt",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.setup != nil {
|
||||
tt.setup()
|
||||
}
|
||||
|
||||
if tt.shouldPanic {
|
||||
assert.Panics(t, func() {
|
||||
ResolvePath(tt.input)
|
||||
})
|
||||
} else {
|
||||
// Should not panic
|
||||
assert.NotPanics(t, func() {
|
||||
ResolvePath(tt.input)
|
||||
})
|
||||
// Result should be a valid absolute path
|
||||
result := ResolvePath(tt.input)
|
||||
if tt.input != "" {
|
||||
assert.True(t, filepath.IsAbs(result) || result == "", "Result should be absolute or empty")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,10 @@ package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"modify/logger"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
type ReplaceCommand struct {
|
||||
@@ -13,45 +15,76 @@ type ReplaceCommand struct {
|
||||
}
|
||||
|
||||
func ExecuteModifications(modifications []ReplaceCommand, fileData string) (string, int) {
|
||||
return executeModificationsWithLogger(logger.WithPrefix("ExecuteModifications"), modifications, fileData)
|
||||
}
|
||||
|
||||
func ExecuteModificationsForFile(file string, modifications []ReplaceCommand, fileData string) (string, int) {
|
||||
filePrefix := file
|
||||
if filePrefix == "" {
|
||||
filePrefix = "<unknown-file>"
|
||||
} else {
|
||||
filePrefix = filepath.Clean(filePrefix)
|
||||
}
|
||||
return executeModificationsWithLogger(logger.WithPrefix("ExecuteModifications").WithField("file", fmt.Sprintf("%q", filePrefix)), modifications, fileData)
|
||||
}
|
||||
|
||||
func executeModificationsWithLogger(executeModificationsLogger *logger.Logger, modifications []ReplaceCommand, fileData string) (string, int) {
|
||||
executeModificationsLogger.Debug("Executing a batch of text modifications")
|
||||
executeModificationsLogger.Trace("Number of modifications: %d, Original file data length: %d", len(modifications), len(fileData))
|
||||
var err error
|
||||
|
||||
sort.Slice(modifications, func(i, j int) bool {
|
||||
return modifications[i].From > modifications[j].From
|
||||
})
|
||||
logger.Trace("Preparing to apply %d replacement commands in reverse order", len(modifications))
|
||||
executeModificationsLogger.Debug("Modifications sorted in reverse order for safe replacement")
|
||||
executeModificationsLogger.Trace("Sorted modifications: %v", modifications)
|
||||
|
||||
executed := 0
|
||||
for _, modification := range modifications {
|
||||
for idx, modification := range modifications {
|
||||
executeModificationsLogger.Debug("Applying modification %d/%d", idx+1, len(modifications))
|
||||
executeModificationsLogger.Trace("Current modification details: From=%d, To=%d, With=%q", modification.From, modification.To, modification.With)
|
||||
fileData, err = modification.Execute(fileData)
|
||||
if err != nil {
|
||||
logger.Error("Failed to execute replacement: %v", err)
|
||||
executeModificationsLogger.Error("Failed to execute replacement for modification %+v: %v", modification, err)
|
||||
continue
|
||||
}
|
||||
executed++
|
||||
executeModificationsLogger.Trace("File data length after modification: %d", len(fileData))
|
||||
}
|
||||
logger.Info("Successfully applied %d text replacements", executed)
|
||||
executeModificationsLogger.Info("Successfully applied %d text replacements", executed)
|
||||
return fileData, executed
|
||||
}
|
||||
|
||||
func (m *ReplaceCommand) Execute(fileDataStr string) (string, error) {
|
||||
executeLogger := logger.WithPrefix("Execute").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With))
|
||||
executeLogger.Debug("Attempting to execute single replacement")
|
||||
err := m.Validate(len(fileDataStr))
|
||||
if err != nil {
|
||||
executeLogger.Error("Failed to validate modification: %v", err)
|
||||
return fileDataStr, fmt.Errorf("failed to validate modification: %v", err)
|
||||
}
|
||||
|
||||
logger.Trace("Replace pos %d-%d with %q", m.From, m.To, m.With)
|
||||
return fileDataStr[:m.From] + m.With + fileDataStr[m.To:], nil
|
||||
executeLogger.Trace("Applying replacement: fileDataStr[:%d] + %q + fileDataStr[%d:]", m.From, m.With, m.To)
|
||||
result := fileDataStr[:m.From] + m.With + fileDataStr[m.To:]
|
||||
executeLogger.Trace("Replacement executed. Result length: %d", len(result))
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *ReplaceCommand) Validate(maxsize int) error {
|
||||
validateLogger := logger.WithPrefix("Validate").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With)).WithField("maxSize", maxsize)
|
||||
validateLogger.Debug("Validating replacement command against max size")
|
||||
if m.To < m.From {
|
||||
validateLogger.Error("Validation failed: 'To' (%d) is less than 'From' (%d)", m.To, m.From)
|
||||
return fmt.Errorf("command to is less than from: %v", m)
|
||||
}
|
||||
if m.From > maxsize || m.To > maxsize {
|
||||
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is greater than max size (%d)", m.From, m.To, maxsize)
|
||||
return fmt.Errorf("command from or to is greater than replacement length: %v", m)
|
||||
}
|
||||
if m.From < 0 || m.To < 0 {
|
||||
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is less than 0", m.From, m.To)
|
||||
return fmt.Errorf("command from or to is less than 0: %v", m)
|
||||
}
|
||||
validateLogger.Debug("Modification command validated successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -182,7 +182,7 @@ func TestReverseOrderExecution(t *testing.T) {
|
||||
}
|
||||
|
||||
// Replace text in the middle of a string with new content
|
||||
func TestReplaceCommandExecute_ReplacesTextInMiddle(t *testing.T) {
|
||||
func TestReplaceCommandExecuteReplacesTextInMiddle(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
@@ -201,7 +201,7 @@ func TestReplaceCommandExecute_ReplacesTextInMiddle(t *testing.T) {
|
||||
}
|
||||
|
||||
// Replace with empty string (deletion)
|
||||
func TestReplaceCommandExecute_DeletesText(t *testing.T) {
|
||||
func TestReplaceCommandExecuteDeletesText(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
@@ -220,7 +220,7 @@ func TestReplaceCommandExecute_DeletesText(t *testing.T) {
|
||||
}
|
||||
|
||||
// Replace with longer string than original segment
|
||||
func TestReplaceCommandExecute_WithLongerString(t *testing.T) {
|
||||
func TestReplaceCommandExecuteHandlesLongerReplacement(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
@@ -239,7 +239,7 @@ func TestReplaceCommandExecute_WithLongerString(t *testing.T) {
|
||||
}
|
||||
|
||||
// From and To values are the same (zero-length replacement)
|
||||
func TestReplaceCommandExecute_ZeroLengthReplacement(t *testing.T) {
|
||||
func TestReplaceCommandExecuteInsertsAtZeroLengthRange(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 5,
|
||||
@@ -258,7 +258,7 @@ func TestReplaceCommandExecute_ZeroLengthReplacement(t *testing.T) {
|
||||
}
|
||||
|
||||
// From value is greater than To value
|
||||
func TestReplaceCommandExecute_FromGreaterThanTo(t *testing.T) {
|
||||
func TestReplaceCommandExecuteRejectsFromGreaterThanTo(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 10,
|
||||
@@ -277,7 +277,7 @@ func TestReplaceCommandExecute_FromGreaterThanTo(t *testing.T) {
|
||||
}
|
||||
|
||||
// From or To values exceed string length
|
||||
func TestReplaceCommandExecute_FromOrToExceedsLength(t *testing.T) {
|
||||
func TestReplaceCommandExecuteRejectsOutOfBoundsRange(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 5,
|
||||
@@ -296,7 +296,7 @@ func TestReplaceCommandExecute_FromOrToExceedsLength(t *testing.T) {
|
||||
}
|
||||
|
||||
// From or To values are negative
|
||||
func TestReplaceCommandExecute_NegativeFromOrTo(t *testing.T) {
|
||||
func TestReplaceCommandExecuteRejectsNegativeRange(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: -1,
|
||||
|
||||
73
utils/summarytable.go
Normal file
73
utils/summarytable.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/table"
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
)
|
||||
|
||||
type CommandSummaryRow struct {
|
||||
Name string
|
||||
Seen int64
|
||||
PredSkip int64
|
||||
FilesModified int64
|
||||
FilesUnchanged int64
|
||||
LuaErr int64
|
||||
ParseErr int64
|
||||
OtherErr int64
|
||||
Edits int64
|
||||
HasErrors bool
|
||||
}
|
||||
|
||||
func RenderCommandSummaryTable(out io.Writer, rows []CommandSummaryRow) {
|
||||
if len(rows) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
sort.Slice(rows, func(i, j int) bool {
|
||||
return rows[i].Name < rows[j].Name
|
||||
})
|
||||
|
||||
tw := table.NewWriter()
|
||||
tw.SetOutputMirror(out)
|
||||
tw.AppendHeader(table.Row{"Command", "Seen", "PredSkip", "FilesMod", "Unchanged", "LuaErr", "ParseErr", "OtherErr", "Edits"})
|
||||
|
||||
for _, row := range rows {
|
||||
style := text.Colors{}
|
||||
switch {
|
||||
case row.HasErrors:
|
||||
style = text.Colors{text.FgYellow}
|
||||
case row.FilesModified == 0:
|
||||
style = text.Colors{text.FgRed}
|
||||
}
|
||||
|
||||
cmdName := row.Name
|
||||
seen := fmt.Sprintf("%d", row.Seen)
|
||||
predSkip := fmt.Sprintf("%d", row.PredSkip)
|
||||
filesMod := fmt.Sprintf("%d", row.FilesModified)
|
||||
unchanged := fmt.Sprintf("%d", row.FilesUnchanged)
|
||||
luaErr := fmt.Sprintf("%d", row.LuaErr)
|
||||
parseErr := fmt.Sprintf("%d", row.ParseErr)
|
||||
otherErr := fmt.Sprintf("%d", row.OtherErr)
|
||||
edits := fmt.Sprintf("%d", row.Edits)
|
||||
|
||||
if len(style) > 0 {
|
||||
cmdName = style.Sprint(cmdName)
|
||||
seen = style.Sprint(seen)
|
||||
predSkip = style.Sprint(predSkip)
|
||||
filesMod = style.Sprint(filesMod)
|
||||
unchanged = style.Sprint(unchanged)
|
||||
luaErr = style.Sprint(luaErr)
|
||||
parseErr = style.Sprint(parseErr)
|
||||
otherErr = style.Sprint(otherErr)
|
||||
edits = style.Sprint(edits)
|
||||
}
|
||||
|
||||
tw.AppendRow(table.Row{cmdName, seen, predSkip, filesMod, unchanged, luaErr, parseErr, otherErr, edits})
|
||||
}
|
||||
|
||||
tw.Render()
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user