Compare commits
138 Commits
v2.0.0
...
bbc7c50fae
Author | SHA1 | Date | |
---|---|---|---|
bbc7c50fae | |||
779d1e0a0e | |||
54581f0216 | |||
3d01822e77 | |||
4e0ca92c77 | |||
388e54b3e3 | |||
6f2e76221a | |||
e0d3b938e3 | |||
491a030bf8 | |||
bff7cc2a27 | |||
ff30b00e71 | |||
e1eb5eeaa6 | |||
2a2e11d8e0 | |||
6eb4f31127 | |||
4b58e00c26 | |||
8ffd8af13c | |||
67861d4455 | |||
299e6d8bfe | |||
388822e90a | |||
91993b4548 | |||
bb69558aaa | |||
052c670627 | |||
67fd215d0e | |||
9ecbbff6fa | |||
774ac0f0ca | |||
b785d24a08 | |||
22f991e72e | |||
5518b27663 | |||
0b899dea2c | |||
3424fea8ad | |||
ddc1d83d58 | |||
4b0a85411d | |||
46e871b626 | |||
258dcc88e7 | |||
75bf449bed | |||
58586395fb | |||
c5a68af5e6 | |||
b4c0284734 | |||
c5d1dad8de | |||
4ff2ee80ee | |||
633eebfd2a | |||
5a31703840 | |||
162d0c758d | |||
14d64495b6 | |||
fe6e97e832 | |||
35b3d8b099 | |||
2e3e958e15 | |||
955afc4295 | |||
2c487bc443 | |||
b77224176b | |||
a2201053c5 | |||
04cedf5ece | |||
ebb07854cc | |||
8a86ae2f40 | |||
e8f16dda2b | |||
513773f641 | |||
22914fe243 | |||
2d523dfe64 | |||
2629722f67 | |||
1f6c4e4976 | |||
bfd08e754e | |||
750010b71a | |||
9064a53820 | |||
294c04a11a | |||
ba7ac07001 | |||
5d10178bf9 | |||
f91c2b4795 | |||
057db23d09 | |||
bf72734b90 | |||
cc30c2bdcb | |||
f453079c72 | |||
e634fe28bd | |||
4e4b7bbd19 | |||
89eed3f847 | |||
f008efd5e1 | |||
f6def1e5a5 | |||
867b188718 | |||
aac29a4074 | |||
8a40f463f7 | |||
8d4db1da91 | |||
d41e2afe17 | |||
76457d22cf | |||
912950d463 | |||
25326ea11b | |||
df212b7fcc | |||
f4a963760a | |||
d236811cb9 | |||
da93770334 | |||
d9f54a8354 | |||
dc8da8ab63 | |||
24262a7dca | |||
d77b13c363 | |||
a9c60a3698 | |||
66bcf21d79 | |||
e847e5c3ce | |||
9a70c9696e | |||
9cea103042 | |||
81d8259dfc | |||
5c5fbac63f | |||
3e818e61c7 | |||
001470ffe4 | |||
d88a76c4e2 | |||
d3a1f1bd96 | |||
07a5f3f1a4 | |||
e2257e082a | |||
b3fce4244d | |||
bd443067b6 | |||
a9b6f7f984 | |||
10c39b02a0 | |||
7f4392b10e | |||
7e19cf4e2c | |||
c5fb20e96a | |||
a8c2257f20 | |||
b63b4d1352 | |||
6a3d44ccd0 | |||
c22e6ff41f | |||
068c64d714 | |||
2c7a4f5d97 | |||
0d7d251e76 | |||
0d8c447ff6 | |||
bb14087598 | |||
66a522aa12 | |||
1a4b4f76f2 | |||
2bfd9f951e | |||
e5092edf53 | |||
e31c0e4e8f | |||
73d93367a0 | |||
64f690f6b4 | |||
34477b2c34 | |||
d5c08d86f5 | |||
68127fe453 | |||
872f2dd46d | |||
4eed05c7c2 | |||
4640281fbf | |||
aba10267d1 | |||
fed140254b | |||
db92033642 | |||
1b0b198297 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1 +1,4 @@
|
||||
*.exe
|
||||
.qodo
|
||||
*.sqlite
|
||||
testfiles
|
||||
|
92
.vscode/launch.json
vendored
92
.vscode/launch.json
vendored
@@ -5,16 +5,98 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch Package",
|
||||
"name": "Launch Package (Barotrauma)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"-cook",
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Payday 2)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Payday2",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Barotrauma cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"-cook",
|
||||
"cookassistant.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Quasimorph cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Quasimorph",
|
||||
"args": [
|
||||
"cook.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Rimworld cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Rimworld/294100",
|
||||
"args": [
|
||||
"cookVehicles.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Workspace)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"args": [
|
||||
"-mode=json",
|
||||
"$..name",
|
||||
"v='pero'",
|
||||
"test.json"
|
||||
"tester.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Avorion)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Avorion/Avorion",
|
||||
"args": [
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Minecraft)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Minecraft",
|
||||
"args": [
|
||||
"cook_tacz.yml",
|
||||
]
|
||||
}
|
||||
]
|
||||
|
116
README.md
Normal file
116
README.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# Big Chef
|
||||
|
||||
A Go-based tool for modifying XML, JSON, and text documents using XPath/JSONPath/Regex expressions and Lua transformations.
|
||||
|
||||
## Features
|
||||
|
||||
- **Multi-Format Processing**:
|
||||
- XML (XPath)
|
||||
- JSON (JSONPath)
|
||||
- Text (Regex)
|
||||
- **Node Value Modification**: Update text values in XML elements, JSON properties or text matches
|
||||
- **Attribute Manipulation**: Modify XML attributes, JSON object keys or regex capture groups
|
||||
- **Conditional Logic**: Apply transformations based on document content
|
||||
- **Complex Operations**:
|
||||
- Mathematical calculations
|
||||
- String manipulations
|
||||
- Date conversions
|
||||
- Structural changes
|
||||
- Whole ass Lua environment
|
||||
- **Error Handling**: Comprehensive error detection for:
|
||||
- Invalid XML/JSON
|
||||
- Malformed XPath/JSONPath
|
||||
- Lua syntax errors
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### 1. Basic field modification
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<price>44.95</price>
|
||||
|
||||
<!-- Command -->
|
||||
chef -xml "//price" "v=v*2" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<price>89.9</price>
|
||||
```
|
||||
|
||||
### 2. Supports glob patterns
|
||||
```xml
|
||||
chef -xml "//price" "v=v*2" data/**.xml
|
||||
```
|
||||
|
||||
### 3. Attribute Update
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<item price="10.50"/>
|
||||
|
||||
<!-- Command -->
|
||||
chef -xml "//item/@price" "v=v*2" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<item price="21"/>
|
||||
```
|
||||
|
||||
### 3. JSONPath Transformation
|
||||
```json
|
||||
// Input
|
||||
{
|
||||
"products": [
|
||||
{"name": "Widget", "price": 19.99},
|
||||
{"name": "Gadget", "price": 29.99}
|
||||
]
|
||||
}
|
||||
|
||||
// Command
|
||||
chef -json "$.products[*].price" "v=v*0.75" input.json
|
||||
|
||||
// Output
|
||||
{
|
||||
"products": [
|
||||
{"name": "Widget", "price": 14.99},
|
||||
{"name": "Gadget", "price": 22.49}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Regex Text Replacement
|
||||
Regex works slightly differently, up to 12 match groups are provided as v1..v12 and s1..s12 for numbers and strings respectively.
|
||||
A special shorthand "!num" is also provided that simply expands to `(\d*\.?\d+)`.
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<description>Price: $15.00 Special Offer</description>
|
||||
|
||||
<!-- Command -->
|
||||
chef "Price: $!num Special Offer" "v1 = v1 * 0.92" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<description>Price: $13.80 Special Offer</description>
|
||||
```
|
||||
|
||||
### 5. Conditional Transformation
|
||||
```xml
|
||||
<!-- Input -->
|
||||
<item stock="5" price="10.00"/>
|
||||
|
||||
<!-- Command -->
|
||||
chef -xml "//item" "if tonumber(v.stock) > 0 then v.price = v.price * 0.8 end" input.xml
|
||||
|
||||
<!-- Output -->
|
||||
<item stock="5" price="8.00"/>
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go build -o chef main.go
|
||||
```
|
||||
|
||||
```bash
|
||||
# Process XML file
|
||||
./chef -xml "//price" "v=v*1.2" input.xml
|
||||
|
||||
# Process JSON file
|
||||
./chef -json "$.prices[*]" "v=v*0.9" input.json
|
||||
```
|
@@ -1,651 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Talents>
|
||||
<Talent identifier="powerarmor">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.powerarmor">
|
||||
<Replace tag="[bonusmovement]" value="25" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.exosuit" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionHasItem tags="deepdivinglarge" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyStat stattype="MovementSpeed" value="0.25" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AddedRecipe itemidentifier="exosuit"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="foolhardy">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="4,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.lowhealthstatboost">
|
||||
<Replace tag="[health]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0.5"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="foolhardy" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="berserker">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.lowhealthstatboost">
|
||||
<Replace tag="[health]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.meleedamagebonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0.5"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="berserker" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="mudraptorwrestler">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.mudraptorwrestler">
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattypeself">
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionAttackData weapontype="NoWeapon,Melee" />
|
||||
<AbilityConditionCharacter>
|
||||
<Conditional group="eq mudraptor" />
|
||||
</AbilityConditionCharacter>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveResistance resistanceid="damage" multiplier="0.9"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="heavylifting">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="1,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.heavylifting">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionHoldingItem tags="alienartifact,crate"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyStat stattype="MovementSpeed" value="0.2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="iamthatguy">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.iamthatguy">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.skillbonus">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[skillname]" value="stattypenames.weaponsskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.heavywrench" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="WeaponsSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAddDamageAffliction">
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAffliction afflictionidentifiers="blunttrauma" addedmultiplier="0.2" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="heavywrench"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="robotics">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.robotics"/>
|
||||
<Description tag="talentdescription.roboticsreminder">
|
||||
<Replace tag="[amount]" value="2" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.defensebotspawner,entityname.defensebotammobox" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="defensebotspawner"/>
|
||||
<AddedRecipe itemidentifier="defensebotammobox"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="ironstorm">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="7,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.ironstorm">
|
||||
<Replace tag="[chance]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.scrapcannon" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilitySetMetadataInt identifier="tiermodifieroverride" value="3"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="scrapcannon"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="residualwaste">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.residualwaste">
|
||||
<Replace tag="[chance]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
|
||||
<Conditions>
|
||||
<AbilityConditionServerRandom randomChance="0.2"/>
|
||||
<!-- don't allow duplicating genetic materials, and prevent infinite FPGA circuits -->
|
||||
<AbilityConditionItem tags="geneticmaterial,unidentifiedgeneticmaterial,circuitboxcomponent,lightcomponent" invert="true"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="massproduction">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.massproduction">
|
||||
<Replace tag="[chance]" value="40" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemFabricatedIngredients">
|
||||
<Conditions>
|
||||
<AbilityConditionServerRandom randomChance="0.4" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityRemoveRandomIngredient>
|
||||
<AbilityConditionItem category="Material"/>
|
||||
</CharacterAbilityRemoveRandomIngredient>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="toolmaintenance">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.toolmaintenance">
|
||||
<Replace tag="[amount]" value="1" color="gui.green"/>
|
||||
</Description>
|
||||
<!-- Give once when unlocking the talent -->
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="tool~toolmaintenance" stattype="IncreaseFabricationQuality" value="1" targetallies="true" setvalue="true"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<!-- Give every 60 seconds for late comers -->
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="tool~toolmaintenance" stattype="IncreaseFabricationQuality" value="1" targetallies="true" setvalue="true"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="miner">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="2,3" sheetelementsize="428,428"/>
|
||||
<Description tag="talentdescription.miner">
|
||||
<Replace tag="[probability]" value="320" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.gainoredetachspeed">
|
||||
<Replace tag="[amount]" value="1600" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="RepairToolDeattachTimeMultiplier" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
|
||||
<Conditions>
|
||||
<AbilityConditionServerRandom randomchance="12.8"/>
|
||||
<AbilityConditionItem tags="ore"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="retrofit">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.retrofit" />
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilitySetMetadataInt identifier="tiermodifiers.increasewallhealth" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="ironman">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.ironhelmet,entityname.makeshiftarmor" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="ironhelmet"/>
|
||||
<AddedRecipe itemidentifier="makeshiftarmor"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="oiledmachinery">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="4,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.oiledmachinery">
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="fabricator" stattype="FabricationSpeed" value="1.5" />
|
||||
<CharacterAbilityGiveItemStatToTags tags="deconstructor" stattype="DeconstructorSpeed" value="1.5" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="fabricator" stattype="FabricationSpeed" value="1.5" />
|
||||
<CharacterAbilityGiveItemStatToTags tags="deconstructor" stattype="DeconstructorSpeed" value="1.5" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="pumpndump">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="1,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.pumpndump">
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.maxflow" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<conditions>
|
||||
<AbilityConditionItem tags="pump"/>
|
||||
</conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStat stattype="PumpSpeed" value="1.1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="ballastdenizen">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="7,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.ballastdenizen">
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="HoldBreathMultiplier" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="engineengineer">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.engineengineer">
|
||||
<Replace tag="[amount]" value="2.5" color="gui.green"/>
|
||||
<Replace tag="[max]" value="5" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.maxspeed" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="1" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.025" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="2" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.05" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="3" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.075" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="4" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.1" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="5" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.125" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="6" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.15" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="7" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.175" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel minlevel="8" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.2" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="multifunctional">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.multifunctional">
|
||||
<Replace tag="[powerincrease]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionAttackData tags="wrenchitem"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionAttackData tags="crowbaritem"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="salvagecrew">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.bonusxponmission">
|
||||
<Replace tag="[xpbonus]" value="30" color="gui.green"/>
|
||||
<Replace tag="[missiontype]" value="missiontype.salvage" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.salvagecrew">
|
||||
<Replace tag="[swimbonus]" value="50" color="gui.green"/>
|
||||
<Replace tag="[resistanceamount]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionMission missiontype="Salvage"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="1.3"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionInSubmarine submarinetype="Wreck" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="This" disabledeltatime="true">
|
||||
<Affliction identifier="salvagecrew" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="machinemaniac" trackedstat="machinemaniac_counter" trackedmax="100">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="3,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.machinemaniac">
|
||||
<Replace tag="[bonus]" value="80" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="3" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.machinemaniac.30">
|
||||
<Replace tag="[requirement]" value="12" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[skill]" value="stattypenames.mechanicalskillbonus" color="gui.orange"/>
|
||||
<Replace tag="[xpamount]" value="500" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.machinemaniac.50">
|
||||
<Replace tag="[requirement]" value="20" color="gui.green"/>
|
||||
<Replace tag="[level]" value="1" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.machinemaniac.100">
|
||||
<Replace tag="[requirement]" value="40" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
|
||||
<!-- Give the player stats that tracks if the rewards should be given -->
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_30" value="1" maxvalue="1" setvalue="true" />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_50" value="1" maxvalue="1" setvalue="true" />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_100" value="1" maxvalue="1" setvalue="true" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_counter" value="1" removeondeath="false" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_30" min="1"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="12"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveExperience amount="2000"/>
|
||||
<CharacterAbilityGivePermanentStat stattype="MechanicalSkillBonus" statidentifier="machinemaniac" value="10" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_30" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_50" min="1"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="20"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityUpgradeSubmarine upgradeprefab="increasemaxpumpflow" upgradecategory="pumps" level="1" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_50" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_100" min="1"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="40"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="MechanicalRepairSpeed" statidentifier="machinemaniac" value="0.5" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_100" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="tinkerer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.increasemaxrepairmechanical">
|
||||
<Replace tag="[percentage]" value="40" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MaxRepairConditionMultiplierMechanical" value="0.4"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="modularrepairs">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.repairpack" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.freeupgrade">
|
||||
<Replace tag="[level]" value="1" color="gui.green"/>
|
||||
<Replace tag="[upgradename]" value="upgradename.decreaselowskillfixduration" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="repairpack"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityUpgradeSubmarine upgradeprefab="decreaselowskillfixduration" upgradecategory="electricaldevices" level="1" />
|
||||
<CharacterAbilityUpgradeSubmarine upgradeprefab="decreaselowskillfixduration" upgradecategory="mechanicaldevices" level="1" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="hullfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="0,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.fixfoamgrenade,entityname.handheldstatusmonitor" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="25" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.repairtoolstructurerepairmultiplier" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="RepairToolStructureRepairMultiplier" value="0.25"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="fixfoamgrenade"/>
|
||||
<AddedRecipe itemidentifier="handheldstatusmonitor"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="letitdrain">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="1,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.letitdrain"/>
|
||||
<Description tag="talentdescription.letitdrainreminder">
|
||||
<Replace tag="[itemcount]" value="2" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.portablepump" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="portablepump" stattype="MaxAttachableCount" value="2" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="portablepump"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="quickfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.quickfixer">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="quickfixer" amount="10.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="scrapsavant">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,3" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.doublescrapoutput" />
|
||||
<Description tag="talentdescription.findadditionalscrap">
|
||||
<Replace tag="[probability]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="scrap"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnOpenItemContainer">
|
||||
<Conditions>
|
||||
<AbilityConditionItemInSubmarine submarinetype="Wreck"/>
|
||||
<AbilityConditionItem tags="container"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilitySpawnItemsToContainer randomchance="0.2" oncepercontainer="true">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="UseTarget" >
|
||||
<SpawnItem identifiers="scrap" spawnposition="ThisInventory" spawnifcantbecontained="false" />
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilitySpawnItemsToContainer>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="safetyfirst">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.safetyharness" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="safetyharness"/>
|
||||
</Talent>
|
||||
|
||||
</Talents>
|
28
cmd/log_format_test/main.go
Normal file
28
cmd/log_format_test/main.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Initialize logger with DEBUG level
|
||||
logger.Init(logger.LevelDebug)
|
||||
|
||||
// Test different log levels
|
||||
logger.Info("This is an info message")
|
||||
logger.Debug("This is a debug message")
|
||||
logger.Warning("This is a warning message")
|
||||
logger.Error("This is an error message")
|
||||
logger.Trace("This is a trace message (not visible at DEBUG level)")
|
||||
|
||||
// Test with a goroutine
|
||||
logger.SafeGo(func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
logger.Info("Message from goroutine")
|
||||
})
|
||||
|
||||
// Wait for goroutine to complete
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
}
|
10
glob_test.go
10
glob_test.go
@@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
@@ -76,9 +77,14 @@ func TestGlobExpansion(t *testing.T) {
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
files, err := expandFilePatterns(tc.patterns)
|
||||
// Convert string patterns to map[string]struct{} for ExpandGLobs
|
||||
patternMap := make(map[string]struct{})
|
||||
for _, pattern := range tc.patterns {
|
||||
patternMap[pattern] = struct{}{}
|
||||
}
|
||||
files, err := utils.ExpandGLobs(patternMap)
|
||||
if err != nil {
|
||||
t.Fatalf("expandFilePatterns failed: %v", err)
|
||||
t.Fatalf("ExpandGLobs failed: %v", err)
|
||||
}
|
||||
|
||||
if len(files) != tc.expected {
|
||||
|
38
go.mod
38
go.mod
@@ -1,20 +1,36 @@
|
||||
module modify
|
||||
module cook
|
||||
|
||||
go 1.24.1
|
||||
go 1.23.2
|
||||
|
||||
require (
|
||||
github.com/antchfx/xmlquery v1.4.4
|
||||
git.site.quack-lab.dev/dave/cylogger v1.3.0
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/yuin/gopher-lua v1.1.1
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gorm.io/gorm v1.30.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/PaesslerAG/gval v1.0.0 // indirect
|
||||
github.com/PaesslerAG/jsonpath v0.1.1 // indirect
|
||||
github.com/antchfx/xpath v1.3.3 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/sergi/go-diff v1.3.1 // indirect
|
||||
github.com/stretchr/testify v1.10.0 // indirect
|
||||
golang.org/x/net v0.33.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/hexops/valast v1.5.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
golang.org/x/mod v0.21.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/tools v0.26.0 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
|
||||
mvdan.cc/gofumpt v0.4.0 // indirect
|
||||
)
|
||||
|
||||
require gorm.io/driver/sqlite v1.6.0
|
||||
|
142
go.sum
142
go.sum
@@ -1,98 +1,68 @@
|
||||
github.com/PaesslerAG/gval v1.0.0 h1:GEKnRwkWDdf9dOmKcNrar9EA1bz1z9DqPIO1+iLzhd8=
|
||||
github.com/PaesslerAG/gval v1.0.0/go.mod h1:y/nm5yEyTeX6av0OfKJNp9rBNj2XrGhAf5+v24IBN1I=
|
||||
github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8=
|
||||
github.com/PaesslerAG/jsonpath v0.1.1 h1:c1/AToHQMVsduPAa4Vh6xp2U0evy4t8SWp8imEsylIk=
|
||||
github.com/PaesslerAG/jsonpath v0.1.1/go.mod h1:lVboNxFGal/VwW6d9JzIy56bUsYAP6tH/x80vjnCseY=
|
||||
github.com/antchfx/xmlquery v1.4.4 h1:mxMEkdYP3pjKSftxss4nUHfjBhnMk4imGoR96FRY2dg=
|
||||
github.com/antchfx/xmlquery v1.4.4/go.mod h1:AEPEEPYE9GnA2mj5Ur2L5Q5/2PycJ0N9Fusrx9b12fc=
|
||||
github.com/antchfx/xpath v1.3.3 h1:tmuPQa1Uye0Ym1Zn65vxPgfltWb/Lxu2jeqIGteJSRs=
|
||||
github.com/antchfx/xpath v1.3.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||
git.site.quack-lab.dev/dave/cylogger v1.3.0 h1:eTWPUD+ThVi8kGIsRcE0XDeoH3yFb5miFEODyKUdWJw=
|
||||
git.site.quack-lab.dev/dave/cylogger v1.3.0/go.mod h1:wctgZplMvroA4X6p8f4B/LaCKtiBcT1Pp+L14kcS8jk=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
|
||||
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/hexops/autogold v0.8.1 h1:wvyd/bAJ+Dy+DcE09BoLk6r4Fa5R5W+O+GUzmR985WM=
|
||||
github.com/hexops/autogold v0.8.1/go.mod h1:97HLDXyG23akzAoRYJh/2OBs3kd80eHyKPvZw0S5ZBY=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/hexops/valast v1.5.0 h1:FBTuvVi0wjTngtXJRZXMbkN/Dn6DgsUsBwch2DUJU8Y=
|
||||
github.com/hexops/valast v1.5.0/go.mod h1:Jcy1pNH7LNraVaAZDLyv21hHg2WBv9Nf9FL6fGxU7o4=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
|
||||
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
|
||||
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ=
|
||||
golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
|
||||
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
|
||||
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
|
||||
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
|
||||
mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM=
|
||||
mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ=
|
||||
|
745
main.go
745
main.go
@@ -1,59 +1,56 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
|
||||
"modify/processor"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// mainLogger is a scoped logger for the main package.
|
||||
var mainLogger = logger.Default.WithPrefix("main")
|
||||
|
||||
type GlobalStats struct {
|
||||
TotalMatches int
|
||||
TotalModifications int
|
||||
ProcessedFiles int
|
||||
FailedFiles int
|
||||
TotalMatches int64
|
||||
TotalModifications int64
|
||||
ProcessedFiles int64
|
||||
FailedFiles int64
|
||||
ModificationsPerCommand sync.Map
|
||||
}
|
||||
|
||||
type FileMode string
|
||||
|
||||
const (
|
||||
ModeRegex FileMode = "regex"
|
||||
ModeXML FileMode = "xml"
|
||||
ModeJSON FileMode = "json"
|
||||
)
|
||||
|
||||
var stats GlobalStats
|
||||
var logger *log.Logger
|
||||
|
||||
var (
|
||||
fileModeFlag = flag.String("mode", "regex", "Processing mode: regex, xml, json")
|
||||
stats GlobalStats = GlobalStats{
|
||||
ModificationsPerCommand: sync.Map{},
|
||||
}
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(log.Lmicroseconds | log.Lshortfile)
|
||||
logger = log.New(os.Stdout, "", log.Lmicroseconds|log.Lshortfile)
|
||||
|
||||
stats = GlobalStats{}
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Usage = func() {
|
||||
CreateExampleConfig()
|
||||
fmt.Fprintf(os.Stderr, "Usage: %s [options] <pattern> <lua_expression> <...files_or_globs>\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\nOptions:\n")
|
||||
fmt.Fprintf(os.Stderr, " -mode string\n")
|
||||
fmt.Fprintf(os.Stderr, " Processing mode: regex, xml, json (default \"regex\")\n")
|
||||
fmt.Fprintf(os.Stderr, " -reset\n")
|
||||
fmt.Fprintf(os.Stderr, " Reset files to their original state\n")
|
||||
fmt.Fprintf(os.Stderr, " -loglevel string\n")
|
||||
fmt.Fprintf(os.Stderr, " Set logging level: ERROR, WARNING, INFO, DEBUG, TRACE (default \"INFO\")\n")
|
||||
fmt.Fprintf(os.Stderr, " -json\n")
|
||||
fmt.Fprintf(os.Stderr, " Enable JSON mode for processing JSON files\n")
|
||||
fmt.Fprintf(os.Stderr, "\nExamples:\n")
|
||||
fmt.Fprintf(os.Stderr, " Regex mode (default):\n")
|
||||
fmt.Fprintf(os.Stderr, " %s \"<value>(\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " XML mode:\n")
|
||||
fmt.Fprintf(os.Stderr, " %s -mode=xml -xpath=\"//value\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " %s \"<value>(\\\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " JSON mode:\n")
|
||||
fmt.Fprintf(os.Stderr, " %s -mode=json -jsonpath=\"$.items[*].value\" \"*1.5\" data.json\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " %s -json data.json\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\nNote: v1, v2, etc. are used to refer to capture groups as numbers.\n")
|
||||
fmt.Fprintf(os.Stderr, " s1, s2, etc. are used to refer to capture groups as strings.\n")
|
||||
fmt.Fprintf(os.Stderr, " Helper functions: num(str) converts string to number, str(num) converts number to string\n")
|
||||
@@ -61,108 +58,646 @@ func main() {
|
||||
fmt.Fprintf(os.Stderr, " If expression starts with an operator like *, /, +, -, =, etc., v1 is automatically prepended\n")
|
||||
fmt.Fprintf(os.Stderr, " You can use any valid Lua code, including if statements, loops, etc.\n")
|
||||
fmt.Fprintf(os.Stderr, " Glob patterns are supported for file selection (*.xml, data/**.xml, etc.)\n")
|
||||
fmt.Fprintf(os.Stderr, "\nLua Functions Available:\n")
|
||||
fmt.Fprintf(os.Stderr, "%s\n", processor.GetLuaFunctionsHelp())
|
||||
}
|
||||
|
||||
// TODO: Fix bed shitting when doing *.yml in barotrauma directory
|
||||
flag.Parse()
|
||||
args := flag.Args()
|
||||
|
||||
if len(args) < 3 {
|
||||
fmt.Fprintf(os.Stderr, "%s mode requires %d arguments minimum\n", *fileModeFlag, 3)
|
||||
logger.InitFlag()
|
||||
mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String())
|
||||
mainLogger.Trace("Full argv: %v", os.Args)
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
|
||||
// Get the appropriate pattern and expression based on mode
|
||||
var pattern, luaExpr string
|
||||
var filePatterns []string
|
||||
|
||||
pattern = args[0]
|
||||
luaExpr = args[1]
|
||||
filePatterns = args[2:]
|
||||
|
||||
// Prepare the Lua expression
|
||||
originalLuaExpr := luaExpr
|
||||
luaExpr = processor.BuildLuaScript(luaExpr)
|
||||
if originalLuaExpr != luaExpr {
|
||||
logger.Printf("Transformed Lua expression from %q to %q", originalLuaExpr, luaExpr)
|
||||
}
|
||||
|
||||
// Expand file patterns with glob support
|
||||
files, err := expandFilePatterns(filePatterns)
|
||||
mainLogger.Debug("Getting database connection")
|
||||
db, err := utils.GetDB()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
|
||||
mainLogger.Error("Failed to get database: %v", err)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("Database connection established")
|
||||
|
||||
workdone, err := HandleSpecialArgs(args, err, db)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to handle special args: %v", err)
|
||||
return
|
||||
}
|
||||
if workdone {
|
||||
mainLogger.Info("Special arguments handled, exiting.")
|
||||
return
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No files found matching the specified patterns\n")
|
||||
// The plan is:
|
||||
// Load all commands
|
||||
mainLogger.Debug("Loading commands from arguments")
|
||||
mainLogger.Trace("Arguments: %v", args)
|
||||
commands, err := utils.LoadCommands(args)
|
||||
if err != nil || len(commands) == 0 {
|
||||
mainLogger.Error("Failed to load commands: %v", err)
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
// Collect global modifiers from special entries and filter them out
|
||||
vars := map[string]interface{}{}
|
||||
filtered := make([]utils.ModifyCommand, 0, len(commands))
|
||||
for _, c := range commands {
|
||||
if len(c.Modifiers) > 0 && c.Name == "" && c.Regex == "" && len(c.Regexes) == 0 && c.Lua == "" && len(c.Files) == 0 {
|
||||
for k, v := range c.Modifiers {
|
||||
vars[k] = v
|
||||
}
|
||||
continue
|
||||
}
|
||||
filtered = append(filtered, c)
|
||||
}
|
||||
if len(vars) > 0 {
|
||||
mainLogger.Info("Loaded %d global modifiers", len(vars))
|
||||
processor.SetVariables(vars)
|
||||
}
|
||||
commands = filtered
|
||||
mainLogger.Info("Loaded %d commands", len(commands))
|
||||
|
||||
// Create the processor based on mode
|
||||
var proc processor.Processor
|
||||
switch *fileModeFlag {
|
||||
case "regex":
|
||||
proc = &processor.RegexProcessor{}
|
||||
logger.Printf("Starting regex modifier with pattern %q, expression %q on %d files",
|
||||
pattern, luaExpr, len(files))
|
||||
// case "xml":
|
||||
// proc = &processor.XMLProcessor{}
|
||||
// pattern = *xpathFlag
|
||||
// logger.Printf("Starting XML modifier with XPath %q, expression %q on %d files",
|
||||
// pattern, luaExpr, len(files))
|
||||
case "json":
|
||||
proc = &processor.JSONProcessor{}
|
||||
logger.Printf("Starting JSON modifier with JSONPath %q, expression %q on %d files",
|
||||
pattern, luaExpr, len(files))
|
||||
if *utils.Filter != "" {
|
||||
mainLogger.Info("Filtering commands by name: %s", *utils.Filter)
|
||||
commands = utils.FilterCommands(commands, *utils.Filter)
|
||||
mainLogger.Info("Filtered %d commands", len(commands))
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
// Process each file
|
||||
for _, file := range files {
|
||||
wg.Add(1)
|
||||
go func(file string) {
|
||||
defer wg.Done()
|
||||
logger.Printf("Processing file: %s", file)
|
||||
// Then aggregate all the globs and deduplicate them
|
||||
mainLogger.Debug("Aggregating globs and deduplicating")
|
||||
globs := utils.AggregateGlobs(commands)
|
||||
mainLogger.Debug("Aggregated %d globs before deduplication", utils.CountGlobsBeforeDedup(commands))
|
||||
|
||||
modCount, matchCount, err := proc.Process(file, pattern, luaExpr)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Failed to process file %s: %v\n", file, err)
|
||||
stats.FailedFiles++
|
||||
for _, command := range commands {
|
||||
mainLogger.Trace("Command: %s", command.Name)
|
||||
if len(command.Regexes) > 0 {
|
||||
mainLogger.Trace("Regexes: %v", command.Regexes)
|
||||
} else {
|
||||
logger.Printf("Successfully processed file: %s", file)
|
||||
stats.ProcessedFiles++
|
||||
stats.TotalMatches += matchCount
|
||||
stats.TotalModifications += modCount
|
||||
mainLogger.Trace("Regex: %s", command.Regex)
|
||||
}
|
||||
}(file)
|
||||
mainLogger.Trace("Files: %v", command.Files)
|
||||
mainLogger.Trace("Lua: %s", command.Lua)
|
||||
mainLogger.Trace("Reset: %t", command.Reset)
|
||||
mainLogger.Trace("Isolate: %t", command.Isolate)
|
||||
mainLogger.Trace("LogLevel: %s", command.LogLevel)
|
||||
}
|
||||
|
||||
// Resolve all the files for all the globs
|
||||
mainLogger.Info("Found %d unique file patterns", len(globs))
|
||||
mainLogger.Debug("Expanding glob patterns to files")
|
||||
files, err := utils.ExpandGLobs(globs)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to expand file patterns: %v", err)
|
||||
return
|
||||
}
|
||||
mainLogger.Info("Found %d files to process", len(files))
|
||||
mainLogger.Trace("Files to process: %v", files)
|
||||
|
||||
// Somehow connect files to commands via globs..
|
||||
// For each file check every glob of every command
|
||||
// Maybe memoize this part
|
||||
// That way we know what commands affect what files
|
||||
mainLogger.Debug("Associating files with commands")
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to associate files with commands: %v", err)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("Files associated with commands")
|
||||
mainLogger.Trace("File-command associations: %v", associations)
|
||||
// Per-file association summary for better visibility when debugging
|
||||
for file, assoc := range associations {
|
||||
cmdNames := make([]string, 0, len(assoc.Commands))
|
||||
for _, c := range assoc.Commands {
|
||||
cmdNames = append(cmdNames, c.Name)
|
||||
}
|
||||
isoNames := make([]string, 0, len(assoc.IsolateCommands))
|
||||
for _, c := range assoc.IsolateCommands {
|
||||
isoNames = append(isoNames, c.Name)
|
||||
}
|
||||
mainLogger.Debug("File %q has %d regular and %d isolate commands", file, len(assoc.Commands), len(assoc.IsolateCommands))
|
||||
mainLogger.Trace("\tRegular: %v", cmdNames)
|
||||
mainLogger.Trace("\tIsolate: %v", isoNames)
|
||||
}
|
||||
|
||||
mainLogger.Debug("Resetting files where necessary")
|
||||
err = utils.ResetWhereNecessary(associations, db)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to reset files where necessary: %v", err)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("Files reset where necessary")
|
||||
|
||||
// Then for each file run all commands associated with the file
|
||||
workers := make(chan struct{}, *utils.ParallelFiles)
|
||||
wg := sync.WaitGroup{}
|
||||
mainLogger.Debug("Starting file processing with %d parallel workers", *utils.ParallelFiles)
|
||||
|
||||
// Add performance tracking
|
||||
startTime := time.Now()
|
||||
|
||||
// Create a map to store loggers for each command
|
||||
commandLoggers := make(map[string]*logger.Logger)
|
||||
for _, command := range commands {
|
||||
// Create a named logger for each command
|
||||
cmdName := command.Name
|
||||
if cmdName == "" {
|
||||
// If no name is provided, use a short version of the regex pattern
|
||||
if len(command.Regex) > 20 {
|
||||
cmdName = command.Regex[:17] + "..."
|
||||
} else {
|
||||
cmdName = command.Regex
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the log level for this specific command
|
||||
cmdLogLevel := logger.ParseLevel(command.LogLevel)
|
||||
|
||||
// Create a logger with the command name as a field
|
||||
commandLoggers[command.Name] = logger.Default.WithField("command", cmdName)
|
||||
commandLoggers[command.Name].SetLevel(cmdLogLevel)
|
||||
|
||||
mainLogger.Debug("Created logger for command %q with log level %s", cmdName, cmdLogLevel.String())
|
||||
}
|
||||
|
||||
for file, association := range associations {
|
||||
workers <- struct{}{}
|
||||
wg.Add(1)
|
||||
logger.SafeGoWithArgs(func(args ...interface{}) {
|
||||
defer func() { <-workers }()
|
||||
defer wg.Done()
|
||||
// Track per-file processing time
|
||||
fileStartTime := time.Now()
|
||||
|
||||
mainLogger.Debug("Reading file %q", file)
|
||||
fileData, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to read file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
fileDataStr := string(fileData)
|
||||
mainLogger.Trace("File %q content: %s", file, utils.LimitString(fileDataStr, 500))
|
||||
|
||||
isChanged := false
|
||||
mainLogger.Debug("Running isolate commands for file %q", file)
|
||||
fileDataStr, err = RunIsolateCommands(association, file, fileDataStr)
|
||||
if err != nil && err != NothingToDo {
|
||||
mainLogger.Error("Failed to run isolate commands for file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
if err != NothingToDo {
|
||||
isChanged = true
|
||||
}
|
||||
|
||||
mainLogger.Debug("Running other commands for file %q", file)
|
||||
fileDataStr, err = RunOtherCommands(file, fileDataStr, association, commandLoggers)
|
||||
if err != nil && err != NothingToDo {
|
||||
mainLogger.Error("Failed to run other commands for file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
if err != NothingToDo {
|
||||
isChanged = true
|
||||
}
|
||||
|
||||
if isChanged {
|
||||
mainLogger.Debug("Saving file %q to database", file)
|
||||
err = db.SaveFile(file, fileData)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to save file %q to database: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("File %q saved to database", file)
|
||||
}
|
||||
|
||||
mainLogger.Debug("Writing file %q", file)
|
||||
err = os.WriteFile(file, []byte(fileDataStr), 0644)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to write file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("File %q written", file)
|
||||
|
||||
// Only increment ProcessedFiles once per file, after all processing is complete
|
||||
atomic.AddInt64(&stats.ProcessedFiles, 1)
|
||||
|
||||
mainLogger.Debug("File %q processed in %v", file, time.Since(fileStartTime))
|
||||
}, file, commands)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
processingTime := time.Since(startTime)
|
||||
mainLogger.Info("Processing completed in %v", processingTime)
|
||||
processedFiles := atomic.LoadInt64(&stats.ProcessedFiles)
|
||||
if processedFiles > 0 {
|
||||
mainLogger.Info("Average time per file: %v", processingTime/time.Duration(processedFiles))
|
||||
}
|
||||
|
||||
// TODO: Also give each command its own logger, maybe prefix it with something... Maybe give commands a name?
|
||||
// Do that with logger.WithField("loglevel", level.String())
|
||||
// Since each command also has its own log level
|
||||
// TODO: Maybe even figure out how to run individual commands...?
|
||||
// TODO: What to do with git? Figure it out ....
|
||||
|
||||
// if *gitFlag {
|
||||
// mainLogger.Info("Git integration enabled, setting up git repository")
|
||||
// err := setupGit()
|
||||
// if err != nil {
|
||||
// mainLogger.Error("Failed to setup git: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error setting up git: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
|
||||
// mainLogger.Debug("Expanding file patterns")
|
||||
// files, err := expandFilePatterns(filePatterns)
|
||||
// if err != nil {
|
||||
// mainLogger.Error("Failed to expand file patterns: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// if *gitFlag {
|
||||
// mainLogger.Info("Cleaning up git files before processing")
|
||||
// err := cleanupGitFiles(files)
|
||||
// if err != nil {
|
||||
// mainLogger.Error("Failed to cleanup git files: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error cleaning up git files: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
// if *resetFlag {
|
||||
// mainLogger.Info("Files reset to their original state, nothing more to do")
|
||||
// log.Printf("Files reset to their original state, nothing more to do")
|
||||
// return
|
||||
// }
|
||||
|
||||
// Print summary
|
||||
if stats.TotalModifications == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No modifications were made in any files\n")
|
||||
totalModifications := atomic.LoadInt64(&stats.TotalModifications)
|
||||
if totalModifications == 0 {
|
||||
mainLogger.Warning("No modifications were made in any files")
|
||||
} else {
|
||||
fmt.Printf("Operation complete! Modified %d values in %d/%d files\n",
|
||||
stats.TotalModifications, stats.ProcessedFiles, stats.ProcessedFiles+stats.FailedFiles)
|
||||
failedFiles := atomic.LoadInt64(&stats.FailedFiles)
|
||||
mainLogger.Info("Operation complete! Modified %d values in %d/%d files",
|
||||
totalModifications, processedFiles, processedFiles+failedFiles)
|
||||
sortedCommands := []string{}
|
||||
stats.ModificationsPerCommand.Range(func(key, value interface{}) bool {
|
||||
sortedCommands = append(sortedCommands, key.(string))
|
||||
return true
|
||||
})
|
||||
sort.Strings(sortedCommands)
|
||||
|
||||
for _, command := range sortedCommands {
|
||||
count, _ := stats.ModificationsPerCommand.Load(command)
|
||||
if count.(int) > 0 {
|
||||
mainLogger.Info("\tCommand %q made %d modifications", command, count)
|
||||
} else {
|
||||
mainLogger.Warning("\tCommand %q made no modifications", command)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func expandFilePatterns(patterns []string) ([]string, error) {
|
||||
var files []string
|
||||
filesMap := make(map[string]bool)
|
||||
|
||||
for _, pattern := range patterns {
|
||||
matches, _ := doublestar.Glob(os.DirFS("."), pattern)
|
||||
for _, m := range matches {
|
||||
if info, err := os.Stat(m); err == nil && !info.IsDir() && !filesMap[m] {
|
||||
filesMap[m], files = true, append(files, m)
|
||||
func HandleSpecialArgs(args []string, err error, db utils.DB) (bool, error) {
|
||||
handleSpecialArgsLogger := logger.Default.WithPrefix("HandleSpecialArgs")
|
||||
handleSpecialArgsLogger.Debug("Handling special arguments: %v", args)
|
||||
switch args[0] {
|
||||
case "reset":
|
||||
handleSpecialArgsLogger.Info("Resetting all files")
|
||||
err = utils.ResetAllFiles(db)
|
||||
if err != nil {
|
||||
handleSpecialArgsLogger.Error("Failed to reset all files: %v", err)
|
||||
return true, err
|
||||
}
|
||||
handleSpecialArgsLogger.Info("All files reset")
|
||||
return true, nil
|
||||
case "dump":
|
||||
handleSpecialArgsLogger.Info("Dumping all files from database")
|
||||
err = db.RemoveAllFiles()
|
||||
if err != nil {
|
||||
handleSpecialArgsLogger.Error("Failed to remove all files from database: %v", err)
|
||||
return true, err
|
||||
}
|
||||
handleSpecialArgsLogger.Info("All files removed from database")
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
logger.Printf("Found %d files to process", len(files))
|
||||
}
|
||||
return files, nil
|
||||
handleSpecialArgsLogger.Debug("No special arguments handled, returning false")
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func CreateExampleConfig() {
|
||||
createExampleConfigLogger := logger.Default.WithPrefix("CreateExampleConfig")
|
||||
createExampleConfigLogger.Debug("Creating example configuration file")
|
||||
commands := []utils.ModifyCommand{
|
||||
// Global modifiers only entry (no name/regex/lua/files)
|
||||
{
|
||||
Modifiers: map[string]interface{}{
|
||||
"foobar": 4,
|
||||
"multiply": 1.5,
|
||||
"prefix": "NEW_",
|
||||
"enabled": true,
|
||||
},
|
||||
},
|
||||
// Multi-regex example using $variable in Lua
|
||||
{
|
||||
Name: "RFToolsMultiply",
|
||||
Regexes: []string{"generatePerTick = !num", "ticksPer\\w+ = !num", "generatorRFPerTick = !num"},
|
||||
Lua: "* $foobar",
|
||||
Files: []string{"polymc/instances/**/rftools*.toml", `polymc\\instances\\**\\rftools*.toml`},
|
||||
Reset: true,
|
||||
// LogLevel defaults to INFO
|
||||
},
|
||||
// Named capture groups with arithmetic and string ops
|
||||
{
|
||||
Name: "UpdateAmountsAndItems",
|
||||
Regex: `(?P<amount>!num)\s+units\s+of\s+(?P<item>[A-Za-z_\-]+)`,
|
||||
Lua: `amount = amount * $multiply; item = upper(item); return true`,
|
||||
Files: []string{"data/**/*.txt"},
|
||||
// INFO log level
|
||||
},
|
||||
// Full replacement via Lua 'replacement' variable
|
||||
{
|
||||
Name: "BumpMinorVersion",
|
||||
Regex: `version\s*=\s*"(?P<major>!num)\.(?P<minor>!num)\.(?P<patch>!num)"`,
|
||||
Lua: `replacement = format("version=\"%s.%s.%s\"", major, num(minor)+1, 0); return true`,
|
||||
Files: []string{"config/*.ini", "config/*.cfg"},
|
||||
},
|
||||
// Multiline regex example (DOTALL is auto-enabled). Captures numeric in nested XML.
|
||||
{
|
||||
Name: "XMLNestedValueMultiply",
|
||||
Regex: `<item>\s*\s*<name>!any<\/name>\s*\s*<value>(!num)<\/value>\s*\s*<\/item>`,
|
||||
Lua: `* $multiply`,
|
||||
Files: []string{"data/**/*.xml"},
|
||||
// Demonstrates multiline regex in YAML
|
||||
},
|
||||
// Multiline regexES array, with different patterns handled by same Lua
|
||||
{
|
||||
Name: "MultiLinePatterns",
|
||||
Regexes: []string{
|
||||
`<entry>\s*\n\s*<id>(?P<id>!num)</id>\s*\n\s*<score>(?P<score>!num)</score>\s*\n\s*</entry>`,
|
||||
`\[block\]\nkey=(?P<key>[A-Za-z_]+)\nvalue=(?P<val>!num)`,
|
||||
},
|
||||
Lua: `if is_number(score) then score = score * 2 end; if is_number(val) then val = val * 3 end; return true`,
|
||||
Files: []string{"examples/**/*.*"},
|
||||
LogLevel: "DEBUG",
|
||||
},
|
||||
// Use equals operator shorthand and boolean variable
|
||||
{
|
||||
Name: "EnableFlags",
|
||||
Regex: `enabled\s*=\s*(true|false)`,
|
||||
Lua: `= $enabled`,
|
||||
Files: []string{"**/*.toml"},
|
||||
},
|
||||
// Demonstrate NoDedup to allow overlapping replacements
|
||||
{
|
||||
Name: "OverlappingGroups",
|
||||
Regex: `(?P<a>!num)(?P<b>!num)`,
|
||||
Lua: `a = num(a) + 1; b = num(b) + 1; return true`,
|
||||
Files: []string{"overlap/**/*.txt"},
|
||||
NoDedup: true,
|
||||
},
|
||||
// Isolate command example operating on entire matched block
|
||||
{
|
||||
Name: "IsolateUppercaseBlock",
|
||||
Regex: `BEGIN\n(?P<block>!any)\nEND`,
|
||||
Lua: `block = upper(block); return true`,
|
||||
Files: []string{"logs/**/*.log"},
|
||||
Isolate: true,
|
||||
LogLevel: "TRACE",
|
||||
},
|
||||
// Using !rep placeholder and arrays of files
|
||||
{
|
||||
Name: "RepeatPlaceholderExample",
|
||||
Regex: `name: (.*) !rep(, .* , 2)`,
|
||||
Lua: `-- no-op, just demonstrate placeholder; return false`,
|
||||
Files: []string{"lists/**/*.yml", "lists/**/*.yaml"},
|
||||
},
|
||||
// Using string variable in Lua expression
|
||||
{
|
||||
Name: "PrefixKeys",
|
||||
Regex: `(?P<key>[A-Za-z0-9_]+)\s*=`,
|
||||
Lua: `key = $prefix .. key; return true`,
|
||||
Files: []string{"**/*.properties"},
|
||||
},
|
||||
// JSON mode examples
|
||||
{
|
||||
Name: "JSONArrayMultiply",
|
||||
JSON: true,
|
||||
Lua: `for i, item in ipairs(data.items) do data.items[i].value = item.value * 2 end; return true`,
|
||||
Files: []string{"data/**/*.json"},
|
||||
},
|
||||
{
|
||||
Name: "JSONObjectUpdate",
|
||||
JSON: true,
|
||||
Lua: `data.version = "2.0.0"; data.enabled = true; return true`,
|
||||
Files: []string{"config/**/*.json"},
|
||||
},
|
||||
{
|
||||
Name: "JSONNestedModify",
|
||||
JSON: true,
|
||||
Lua: `if data.settings and data.settings.performance then data.settings.performance.multiplier = data.settings.performance.multiplier * 1.5 end; return true`,
|
||||
Files: []string{"settings/**/*.json"},
|
||||
},
|
||||
}
|
||||
|
||||
data, err := yaml.Marshal(commands)
|
||||
if err != nil {
|
||||
createExampleConfigLogger.Error("Failed to marshal example config: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
createExampleConfigLogger.Debug("Writing example_cook.yml")
|
||||
err = os.WriteFile("example_cook.yml", data, 0644)
|
||||
if err != nil {
|
||||
createExampleConfigLogger.Error("Failed to write example_cook.yml: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
createExampleConfigLogger.Info("Wrote example_cook.yml")
|
||||
}
|
||||
|
||||
var NothingToDo = errors.New("nothing to do")
|
||||
|
||||
func RunOtherCommands(file string, fileDataStr string, association utils.FileCommandAssociation, commandLoggers map[string]*logger.Logger) (string, error) {
|
||||
runOtherCommandsLogger := mainLogger.WithPrefix("RunOtherCommands").WithField("file", file)
|
||||
runOtherCommandsLogger.Debug("Running other commands for file")
|
||||
runOtherCommandsLogger.Trace("File data before modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
// Separate JSON and regex commands for different processing approaches
|
||||
jsonCommands := []utils.ModifyCommand{}
|
||||
regexCommands := []utils.ModifyCommand{}
|
||||
|
||||
for _, command := range association.Commands {
|
||||
if command.JSON || *utils.JSON {
|
||||
jsonCommands = append(jsonCommands, command)
|
||||
} else {
|
||||
regexCommands = append(regexCommands, command)
|
||||
}
|
||||
}
|
||||
|
||||
// Process JSON commands sequentially (each operates on the entire file)
|
||||
for _, command := range jsonCommands {
|
||||
cmdLogger := logger.Default
|
||||
if cmdLog, ok := commandLoggers[command.Name]; ok {
|
||||
cmdLogger = cmdLog
|
||||
}
|
||||
|
||||
cmdLogger.Debug("Processing file with JSON mode for command %q", command.Name)
|
||||
newModifications, err := processor.ProcessJSON(fileDataStr, command, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with JSON command %q: %v", command.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Apply JSON modifications immediately
|
||||
if len(newModifications) > 0 {
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(newModifications, fileDataStr)
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
cmdLogger.Debug("Applied %d JSON modifications for command %q", count, command.Name)
|
||||
}
|
||||
|
||||
count, ok := stats.ModificationsPerCommand.Load(command.Name)
|
||||
if !ok {
|
||||
count = 0
|
||||
}
|
||||
stats.ModificationsPerCommand.Store(command.Name, count.(int)+len(newModifications))
|
||||
}
|
||||
|
||||
// Aggregate regex modifications and execute them
|
||||
modifications := []utils.ReplaceCommand{}
|
||||
numCommandsConsidered := 0
|
||||
for _, command := range regexCommands {
|
||||
cmdLogger := logger.Default
|
||||
if cmdLog, ok := commandLoggers[command.Name]; ok {
|
||||
cmdLogger = cmdLog
|
||||
}
|
||||
|
||||
patterns := command.Regexes
|
||||
if len(patterns) == 0 {
|
||||
patterns = []string{command.Regex}
|
||||
}
|
||||
for idx, pattern := range patterns {
|
||||
tmpCmd := command
|
||||
tmpCmd.Regex = pattern
|
||||
cmdLogger.Debug("Begin processing file with command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
|
||||
numCommandsConsidered++
|
||||
newModifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with command %q: %v", command.Name, err)
|
||||
continue
|
||||
}
|
||||
modifications = append(modifications, newModifications...)
|
||||
count, ok := stats.ModificationsPerCommand.Load(command.Name)
|
||||
if !ok {
|
||||
count = 0
|
||||
}
|
||||
stats.ModificationsPerCommand.Store(command.Name, count.(int)+len(newModifications))
|
||||
|
||||
cmdLogger.Debug("Command %q generated %d modifications (pattern %d/%d)", command.Name, len(newModifications), idx+1, len(patterns))
|
||||
cmdLogger.Trace("Modifications generated by command %q: %v", command.Name, newModifications)
|
||||
if len(newModifications) == 0 {
|
||||
cmdLogger.Debug("No modifications yielded by command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
runOtherCommandsLogger.Debug("Aggregated %d modifications from %d command-pattern runs", len(modifications), numCommandsConsidered)
|
||||
runOtherCommandsLogger.Trace("All aggregated modifications: %v", modifications)
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runOtherCommandsLogger.Warning("No modifications found for file")
|
||||
return fileDataStr, NothingToDo
|
||||
}
|
||||
runOtherCommandsLogger.Debug("Executing %d modifications for file", len(modifications))
|
||||
|
||||
// Sort commands in reverse order for safe replacements
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
|
||||
runOtherCommandsLogger.Trace("File data after modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runOtherCommandsLogger.Info("Executed %d modifications for file", count)
|
||||
return fileDataStr, nil
|
||||
}
|
||||
|
||||
func RunIsolateCommands(association utils.FileCommandAssociation, file string, fileDataStr string) (string, error) {
|
||||
runIsolateCommandsLogger := mainLogger.WithPrefix("RunIsolateCommands").WithField("file", file)
|
||||
runIsolateCommandsLogger.Debug("Running isolate commands for file")
|
||||
runIsolateCommandsLogger.Trace("File data before isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
anythingDone := false
|
||||
for _, isolateCommand := range association.IsolateCommands {
|
||||
// Check if this isolate command should use JSON mode
|
||||
if isolateCommand.JSON || *utils.JSON {
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with JSON isolate command %q", isolateCommand.Name)
|
||||
modifications, err := processor.ProcessJSON(fileDataStr, isolateCommand, file)
|
||||
if err != nil {
|
||||
runIsolateCommandsLogger.Error("Failed to process file with JSON isolate command %q: %v", isolateCommand.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("JSON isolate command %q produced no modifications", isolateCommand.Name)
|
||||
continue
|
||||
}
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d JSON isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("JSON isolate modifications: %v", modifications)
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
|
||||
runIsolateCommandsLogger.Trace("File data after JSON isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d JSON isolate modifications for file", count)
|
||||
} else {
|
||||
// Regular regex processing for isolate commands
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with isolate command %q", isolateCommand.Regex)
|
||||
patterns := isolateCommand.Regexes
|
||||
if len(patterns) == 0 {
|
||||
patterns = []string{isolateCommand.Regex}
|
||||
}
|
||||
for idx, pattern := range patterns {
|
||||
tmpCmd := isolateCommand
|
||||
tmpCmd.Regex = pattern
|
||||
modifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
|
||||
if err != nil {
|
||||
runIsolateCommandsLogger.Error("Failed to process file with isolate command %q (pattern %d/%d): %v", isolateCommand.Name, idx+1, len(patterns), err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("Isolate command %q produced no modifications (pattern %d/%d)", isolateCommand.Name, idx+1, len(patterns))
|
||||
continue
|
||||
}
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("Isolate modifications: %v", modifications)
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
|
||||
runIsolateCommandsLogger.Trace("File data after isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d isolate modifications for file", count)
|
||||
}
|
||||
}
|
||||
}
|
||||
if !anythingDone {
|
||||
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
|
||||
return fileDataStr, NothingToDo
|
||||
}
|
||||
return fileDataStr, nil
|
||||
}
|
||||
|
@@ -1,189 +1,634 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"modify/processor/jsonpath"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/tidwall/gjson"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// JSONProcessor implements the Processor interface for JSON documents
|
||||
type JSONProcessor struct{}
|
||||
// jsonLogger is a scoped logger for the processor/json package.
|
||||
var jsonLogger = logger.Default.WithPrefix("processor/json")
|
||||
|
||||
// Process implements the Processor interface for JSONProcessor
|
||||
func (p *JSONProcessor) Process(filename string, pattern string, luaExpr string) (int, int, error) {
|
||||
// Read file content
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error getting current working directory: %v", err)
|
||||
}
|
||||
// ProcessJSON applies Lua processing to JSON content
|
||||
func ProcessJSON(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||
processJsonLogger := jsonLogger.WithPrefix("ProcessJSON").WithField("commandName", command.Name).WithField("file", filename)
|
||||
processJsonLogger.Debug("Starting JSON processing for file")
|
||||
processJsonLogger.Trace("Initial file content length: %d", len(content))
|
||||
|
||||
fullPath := filepath.Join(cwd, filename)
|
||||
content, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error reading file: %v", err)
|
||||
}
|
||||
var commands []utils.ReplaceCommand
|
||||
startTime := time.Now()
|
||||
|
||||
fileContent := string(content)
|
||||
|
||||
// Process the content
|
||||
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// If we made modifications, save the file
|
||||
if modCount > 0 {
|
||||
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error writing file: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return modCount, matchCount, nil
|
||||
}
|
||||
|
||||
// ProcessContent implements the Processor interface for JSONProcessor
|
||||
func (p *JSONProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
|
||||
// Parse JSON document
|
||||
// Parse JSON content
|
||||
var jsonData interface{}
|
||||
err := json.Unmarshal([]byte(content), &jsonData)
|
||||
if err != nil {
|
||||
return content, 0, 0, fmt.Errorf("error parsing JSON: %v", err)
|
||||
processJsonLogger.Error("Failed to parse JSON content: %v", err)
|
||||
return commands, fmt.Errorf("failed to parse JSON: %v", err)
|
||||
}
|
||||
processJsonLogger.Debug("Successfully parsed JSON content")
|
||||
|
||||
// Find nodes matching the JSONPath pattern
|
||||
nodes, err := jsonpath.Get(jsonData, pattern)
|
||||
if err != nil {
|
||||
return content, 0, 0, fmt.Errorf("error getting nodes: %v", err)
|
||||
}
|
||||
|
||||
matchCount := len(nodes)
|
||||
if matchCount == 0 {
|
||||
return content, 0, 0, nil
|
||||
}
|
||||
|
||||
for _, node := range nodes {
|
||||
log.Printf("Processing node at path: %s with value: %v", node.Path, node.Value)
|
||||
|
||||
// Initialize Lua
|
||||
// Create Lua state
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
return content, len(nodes), 0, fmt.Errorf("error creating Lua state: %v", err)
|
||||
processJsonLogger.Error("Error creating Lua state: %v", err)
|
||||
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
defer L.Close()
|
||||
log.Println("Lua state initialized successfully.")
|
||||
|
||||
err = p.ToLua(L, node.Value)
|
||||
// Set filename global
|
||||
L.SetGlobal("file", lua.LString(filename))
|
||||
|
||||
// Convert JSON data to Lua table
|
||||
luaTable, err := ToLuaTable(L, jsonData)
|
||||
if err != nil {
|
||||
return content, len(nodes), 0, fmt.Errorf("error converting to Lua: %v", err)
|
||||
processJsonLogger.Error("Failed to convert JSON to Lua table: %v", err)
|
||||
return commands, fmt.Errorf("failed to convert JSON to Lua table: %v", err)
|
||||
}
|
||||
log.Printf("Converted node value to Lua: %v", node.Value)
|
||||
|
||||
// Execute Lua script
|
||||
log.Printf("Executing Lua script: %s", luaExpr)
|
||||
// Set the JSON data as a global variable
|
||||
L.SetGlobal("data", luaTable)
|
||||
processJsonLogger.Debug("Set JSON data as Lua global 'data'")
|
||||
|
||||
// Build and execute Lua script for JSON mode
|
||||
luaExpr := BuildJSONLuaScript(command.Lua)
|
||||
processJsonLogger.Debug("Built Lua script from expression: %q", command.Lua)
|
||||
processJsonLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
||||
|
||||
if err := L.DoString(luaExpr); err != nil {
|
||||
return content, len(nodes), 0, fmt.Errorf("error executing Lua %s: %v", luaExpr, err)
|
||||
processJsonLogger.Error("Lua script execution failed: %v\nScript: %s", err, utils.LimitString(luaExpr, 200))
|
||||
return commands, fmt.Errorf("lua script execution failed: %v", err)
|
||||
}
|
||||
log.Println("Lua script executed successfully.")
|
||||
processJsonLogger.Debug("Lua script executed successfully")
|
||||
|
||||
// Get modified value
|
||||
result, err := p.FromLua(L)
|
||||
// Check if modification flag is set
|
||||
modifiedVal := L.GetGlobal("modified")
|
||||
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
||||
processJsonLogger.Debug("Skipping - no modifications indicated by Lua script")
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// Get the modified data from Lua
|
||||
modifiedData := L.GetGlobal("data")
|
||||
if modifiedData.Type() != lua.LTTable {
|
||||
processJsonLogger.Error("Expected 'data' to be a table after Lua processing, got %s", modifiedData.Type().String())
|
||||
return commands, fmt.Errorf("expected 'data' to be a table after Lua processing")
|
||||
}
|
||||
|
||||
// Convert back to Go interface
|
||||
goData, err := FromLua(L, modifiedData)
|
||||
if err != nil {
|
||||
return content, len(nodes), 0, fmt.Errorf("error getting result from Lua: %v", err)
|
||||
processJsonLogger.Error("Failed to convert Lua table back to Go: %v", err)
|
||||
return commands, fmt.Errorf("failed to convert Lua table back to Go: %v", err)
|
||||
}
|
||||
log.Printf("Retrieved modified value from Lua: %v", result)
|
||||
|
||||
// Apply the modification to the JSON data
|
||||
err = p.updateJSONValue(jsonData, node.Path, result)
|
||||
commands, err = applyJSONChanges(content, jsonData, goData)
|
||||
if err != nil {
|
||||
return content, len(nodes), 0, fmt.Errorf("error updating JSON: %v", err)
|
||||
}
|
||||
log.Printf("Updated JSON at path: %s with new value: %v", node.Path, result)
|
||||
processJsonLogger.Error("Failed to apply JSON changes: %v", err)
|
||||
return commands, fmt.Errorf("failed to apply JSON changes: %v", err)
|
||||
}
|
||||
|
||||
// Convert the modified JSON back to a string with same formatting
|
||||
var jsonBytes []byte
|
||||
if indent, err := detectJsonIndentation(content); err == nil && indent != "" {
|
||||
// Use detected indentation for output formatting
|
||||
jsonBytes, err = json.MarshalIndent(jsonData, "", indent)
|
||||
} else {
|
||||
// Fall back to standard 2-space indent
|
||||
jsonBytes, err = json.MarshalIndent(jsonData, "", " ")
|
||||
}
|
||||
|
||||
// We changed all the nodes trust me bro
|
||||
return string(jsonBytes), len(nodes), len(nodes), nil
|
||||
processJsonLogger.Debug("Total JSON processing time: %v", time.Since(startTime))
|
||||
processJsonLogger.Debug("Generated %d total modifications", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// detectJsonIndentation tries to determine the indentation used in the original JSON
|
||||
func detectJsonIndentation(content string) (string, error) {
|
||||
lines := strings.Split(content, "\n")
|
||||
if len(lines) < 2 {
|
||||
return "", fmt.Errorf("not enough lines to detect indentation")
|
||||
// applyJSONChanges compares original and modified data and applies changes surgically
|
||||
func applyJSONChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
||||
var commands []utils.ReplaceCommand
|
||||
|
||||
appliedCommands, err := applyChanges(content, originalData, modifiedData)
|
||||
if err == nil && len(appliedCommands) > 0 {
|
||||
return appliedCommands, nil
|
||||
}
|
||||
|
||||
// Look for the first indented line
|
||||
for i := 1; i < len(lines); i++ {
|
||||
line := lines[i]
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if trimmed == "" {
|
||||
return commands, fmt.Errorf("failed to make any changes to the json")
|
||||
}
|
||||
|
||||
// applyChanges attempts to make surgical changes while preserving exact formatting
|
||||
func applyChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
||||
var commands []utils.ReplaceCommand
|
||||
|
||||
// Find all changes between original and modified data
|
||||
changes := findDeepChanges("", originalData, modifiedData)
|
||||
|
||||
jsonLogger.Debug("applyChanges: Found %d changes: %v", len(changes), changes)
|
||||
|
||||
if len(changes) == 0 {
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// Sort removal operations by index in descending order to avoid index shifting
|
||||
var removals []string
|
||||
var additions []string
|
||||
var valueChanges []string
|
||||
|
||||
for path := range changes {
|
||||
if strings.HasSuffix(path, "@remove") {
|
||||
removals = append(removals, path)
|
||||
} else if strings.HasSuffix(path, "@add") {
|
||||
additions = append(additions, path)
|
||||
} else {
|
||||
valueChanges = append(valueChanges, path)
|
||||
}
|
||||
}
|
||||
|
||||
jsonLogger.Debug("applyChanges: %d removals, %d additions, %d value changes", len(removals), len(additions), len(valueChanges))
|
||||
|
||||
// Apply removals first (from end to beginning to avoid index shifting)
|
||||
for _, removalPath := range removals {
|
||||
actualPath := strings.TrimSuffix(removalPath, "@remove")
|
||||
index := extractIndexFromRemovalPath(removalPath)
|
||||
arrayPath := getArrayPathFromElementPath(actualPath)
|
||||
|
||||
// Get the array element to remove
|
||||
result := gjson.Get(content, actualPath)
|
||||
if !result.Exists() {
|
||||
continue
|
||||
}
|
||||
|
||||
// Calculate leading whitespace
|
||||
indent := line[:len(line)-len(trimmed)]
|
||||
if len(indent) > 0 {
|
||||
return indent, nil
|
||||
// Find the exact byte range to remove (including comma/formatting)
|
||||
startPos, endPos := findArrayElementRemovalRange(content, arrayPath, index)
|
||||
if startPos >= 0 && endPos > startPos {
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: startPos,
|
||||
To: endPos,
|
||||
With: "", // Remove the element
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no indentation detected")
|
||||
// Apply additions (new fields)
|
||||
for _, additionPath := range additions {
|
||||
actualPath := strings.TrimSuffix(additionPath, "@add")
|
||||
newValue := changes[additionPath]
|
||||
|
||||
jsonLogger.Debug("Processing addition: path=%s, value=%v", actualPath, newValue)
|
||||
|
||||
// Find the parent object to add the field to
|
||||
parentPath := getParentPath(actualPath)
|
||||
fieldName := getFieldName(actualPath)
|
||||
|
||||
jsonLogger.Debug("Parent path: %s, field name: %s", parentPath, fieldName)
|
||||
|
||||
// Get the parent object
|
||||
var parentResult gjson.Result
|
||||
if parentPath == "" {
|
||||
// Adding to root object - get the entire JSON
|
||||
parentResult = gjson.Parse(content)
|
||||
} else {
|
||||
parentResult = gjson.Get(content, parentPath)
|
||||
}
|
||||
|
||||
if !parentResult.Exists() {
|
||||
jsonLogger.Debug("Parent path %s does not exist, skipping", parentPath)
|
||||
continue
|
||||
}
|
||||
|
||||
// Find where to insert the new field (at the end of the object)
|
||||
startPos := int(parentResult.Index + len(parentResult.Raw) - 1) // Before closing brace
|
||||
|
||||
jsonLogger.Debug("Inserting at pos %d", startPos)
|
||||
|
||||
// Convert the new value to JSON string
|
||||
newValueStr := convertValueToJSONString(newValue)
|
||||
|
||||
// Insert the new field
|
||||
insertText := fmt.Sprintf(`,"%s":%s`, fieldName, newValueStr)
|
||||
|
||||
jsonLogger.Debug("Inserting text: %q", insertText)
|
||||
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: startPos,
|
||||
To: startPos,
|
||||
With: insertText,
|
||||
})
|
||||
|
||||
jsonLogger.Debug("Added addition command: From=%d, To=%d, With=%q", startPos, startPos, insertText)
|
||||
}
|
||||
|
||||
// Apply value changes (in reverse order to avoid position shifting)
|
||||
sort.Slice(valueChanges, func(i, j int) bool {
|
||||
// Get positions for comparison
|
||||
resultI := gjson.Get(content, valueChanges[i])
|
||||
resultJ := gjson.Get(content, valueChanges[j])
|
||||
return resultI.Index > resultJ.Index // Descending order
|
||||
})
|
||||
|
||||
for _, path := range valueChanges {
|
||||
newValue := changes[path]
|
||||
|
||||
jsonLogger.Debug("Processing value change: path=%s, value=%v", path, newValue)
|
||||
|
||||
// Get the current value and its position in the original JSON
|
||||
result := gjson.Get(content, path)
|
||||
if !result.Exists() {
|
||||
jsonLogger.Debug("Path %s does not exist, skipping", path)
|
||||
continue // Skip if path doesn't exist
|
||||
}
|
||||
|
||||
// Get the exact byte positions of this value
|
||||
startPos := result.Index
|
||||
endPos := startPos + len(result.Raw)
|
||||
|
||||
jsonLogger.Debug("Found value at pos %d-%d: %q", startPos, endPos, result.Raw)
|
||||
|
||||
// Convert the new value to JSON string
|
||||
newValueStr := convertValueToJSONString(newValue)
|
||||
|
||||
jsonLogger.Debug("Converting to: %q", newValueStr)
|
||||
|
||||
// Create a replacement command for this specific value
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: int(startPos),
|
||||
To: int(endPos),
|
||||
With: newValueStr,
|
||||
})
|
||||
|
||||
jsonLogger.Debug("Added command: From=%d, To=%d, With=%q", int(startPos), int(endPos), newValueStr)
|
||||
}
|
||||
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// / Selects from the root node
|
||||
// // Selects nodes in the document from the current node that match the selection no matter where they are
|
||||
// . Selects the current node
|
||||
// @ Selects attributes
|
||||
// extractIndexFromRemovalPath extracts the array index from a removal path like "Rows.0.Inputs.1@remove"
|
||||
func extractIndexFromRemovalPath(path string) int {
|
||||
parts := strings.Split(strings.TrimSuffix(path, "@remove"), ".")
|
||||
if len(parts) > 0 {
|
||||
lastPart := parts[len(parts)-1]
|
||||
if index, err := strconv.Atoi(lastPart); err == nil {
|
||||
return index
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// /bookstore/* Selects all the child element nodes of the bookstore element
|
||||
// //* Selects all elements in the document
|
||||
// getArrayPathFromElementPath converts "Rows.0.Inputs.1" to "Rows.0.Inputs"
|
||||
func getArrayPathFromElementPath(elementPath string) string {
|
||||
parts := strings.Split(elementPath, ".")
|
||||
if len(parts) > 0 {
|
||||
return strings.Join(parts[:len(parts)-1], ".")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// /bookstore/book[1] Selects the first book element that is the child of the bookstore element.
|
||||
// /bookstore/book[last()] Selects the last book element that is the child of the bookstore element
|
||||
// /bookstore/book[last()-1] Selects the last but one book element that is the child of the bookstore element
|
||||
// /bookstore/book[position()<3] Selects the first two book elements that are children of the bookstore element
|
||||
// //title[@lang] Selects all the title elements that have an attribute named lang
|
||||
// //title[@lang='en'] Selects all the title elements that have a "lang" attribute with a value of "en"
|
||||
// /bookstore/book[price>35.00] Selects all the book elements of the bookstore element that have a price element with a value greater than 35.00
|
||||
// /bookstore/book[price>35.00]/title Selects all the title elements of the book elements of the bookstore element that have a price element with a value greater than 35.00
|
||||
// getParentPath extracts the parent path from a full path like "Rows.0.Inputs.1"
|
||||
func getParentPath(fullPath string) string {
|
||||
parts := strings.Split(fullPath, ".")
|
||||
if len(parts) > 0 {
|
||||
return strings.Join(parts[:len(parts)-1], ".")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// updateJSONValue updates a value in the JSON structure based on its JSONPath
|
||||
func (p *JSONProcessor) updateJSONValue(jsonData interface{}, path string, newValue interface{}) error {
|
||||
err := jsonpath.Set(jsonData, path, newValue)
|
||||
// getFieldName extracts the field name from a full path like "Rows.0.Inputs.1"
|
||||
func getFieldName(fullPath string) string {
|
||||
parts := strings.Split(fullPath, ".")
|
||||
if len(parts) > 0 {
|
||||
return parts[len(parts)-1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// convertValueToJSONString converts a Go interface{} to a JSON string representation
|
||||
func convertValueToJSONString(value interface{}) string {
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
return `"` + strings.ReplaceAll(v, `"`, `\"`) + `"`
|
||||
case float64:
|
||||
if v == float64(int64(v)) {
|
||||
return strconv.FormatInt(int64(v), 10)
|
||||
}
|
||||
return strconv.FormatFloat(v, 'f', -1, 64)
|
||||
case bool:
|
||||
return strconv.FormatBool(v)
|
||||
case nil:
|
||||
return "null"
|
||||
default:
|
||||
// For complex types, we need to avoid json.Marshal
|
||||
// This should not happen if we're doing true surgical edits
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// findArrayElementRemovalRange finds the exact byte range to remove for an array element
|
||||
func findArrayElementRemovalRange(content, arrayPath string, elementIndex int) (int, int) {
|
||||
// Get the array using gjson
|
||||
arrayResult := gjson.Get(content, arrayPath)
|
||||
if !arrayResult.Exists() || !arrayResult.IsArray() {
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
// Get all array elements
|
||||
elements := arrayResult.Array()
|
||||
if elementIndex >= len(elements) {
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
// Get the target element
|
||||
elementResult := elements[elementIndex]
|
||||
startPos := int(elementResult.Index)
|
||||
endPos := int(elementResult.Index + len(elementResult.Raw))
|
||||
|
||||
// Handle comma removal properly
|
||||
if elementIndex == 0 && len(elements) > 1 {
|
||||
// First element but not the only one - remove comma after
|
||||
for i := endPos; i < len(content) && i < endPos+50; i++ {
|
||||
if content[i] == ',' {
|
||||
endPos = i + 1
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if elementIndex == len(elements)-1 && len(elements) > 1 {
|
||||
// Last element and not the only one - remove comma before
|
||||
prevElementEnd := int(elements[elementIndex-1].Index + len(elements[elementIndex-1].Raw))
|
||||
for i := prevElementEnd; i < startPos && i < len(content); i++ {
|
||||
if content[i] == ',' {
|
||||
startPos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
// If it's the only element, don't remove any commas
|
||||
|
||||
return startPos, endPos
|
||||
}
|
||||
|
||||
// findDeepChanges recursively finds all paths that need to be changed
|
||||
func findDeepChanges(basePath string, original, modified interface{}) map[string]interface{} {
|
||||
changes := make(map[string]interface{})
|
||||
|
||||
switch orig := original.(type) {
|
||||
case map[string]interface{}:
|
||||
if mod, ok := modified.(map[string]interface{}); ok {
|
||||
// Check for new keys added in modified data
|
||||
for key, modValue := range mod {
|
||||
var currentPath string
|
||||
if basePath == "" {
|
||||
currentPath = key
|
||||
} else {
|
||||
currentPath = basePath + "." + key
|
||||
}
|
||||
|
||||
if origValue, exists := orig[key]; exists {
|
||||
// Key exists in both, check if value changed
|
||||
switch modValue.(type) {
|
||||
case map[string]interface{}, []interface{}:
|
||||
// Recursively check nested structures
|
||||
nestedChanges := findDeepChanges(currentPath, origValue, modValue)
|
||||
for nestedPath, nestedValue := range nestedChanges {
|
||||
changes[nestedPath] = nestedValue
|
||||
}
|
||||
default:
|
||||
// Primitive value - check if changed
|
||||
if !deepEqual(origValue, modValue) {
|
||||
changes[currentPath] = modValue
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// New key added - mark for addition
|
||||
changes[currentPath+"@add"] = modValue
|
||||
}
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
if mod, ok := modified.([]interface{}); ok {
|
||||
// Handle array changes by detecting specific element operations
|
||||
if len(orig) != len(mod) {
|
||||
// Array length changed - detect if it's element removal
|
||||
if len(orig) > len(mod) {
|
||||
// Element(s) removed - find which ones by comparing content
|
||||
removedIndices := findRemovedArrayElements(orig, mod)
|
||||
for _, removedIndex := range removedIndices {
|
||||
var currentPath string
|
||||
if basePath == "" {
|
||||
currentPath = fmt.Sprintf("%d@remove", removedIndex)
|
||||
} else {
|
||||
currentPath = fmt.Sprintf("%s.%d@remove", basePath, removedIndex)
|
||||
}
|
||||
changes[currentPath] = nil // Mark for removal
|
||||
}
|
||||
} else {
|
||||
// Elements added - more complex, skip for now
|
||||
}
|
||||
} else {
|
||||
// Same length - check individual elements for value changes
|
||||
for i, modValue := range mod {
|
||||
var currentPath string
|
||||
if basePath == "" {
|
||||
currentPath = strconv.Itoa(i)
|
||||
} else {
|
||||
currentPath = basePath + "." + strconv.Itoa(i)
|
||||
}
|
||||
|
||||
if i < len(orig) {
|
||||
// Index exists in both, check if value changed
|
||||
switch modValue.(type) {
|
||||
case map[string]interface{}, []interface{}:
|
||||
// Recursively check nested structures
|
||||
nestedChanges := findDeepChanges(currentPath, orig[i], modValue)
|
||||
for nestedPath, nestedValue := range nestedChanges {
|
||||
changes[nestedPath] = nestedValue
|
||||
}
|
||||
default:
|
||||
// Primitive value - check if changed
|
||||
if !deepEqual(orig[i], modValue) {
|
||||
changes[currentPath] = modValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
// For primitive types, compare directly
|
||||
if !deepEqual(original, modified) {
|
||||
if basePath == "" {
|
||||
changes[""] = modified
|
||||
} else {
|
||||
changes[basePath] = modified
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
// findRemovedArrayElements compares two arrays and returns indices of removed elements
|
||||
func findRemovedArrayElements(original, modified []interface{}) []int {
|
||||
var removedIndices []int
|
||||
|
||||
// Simple approach: find elements in original that don't exist in modified
|
||||
for i, origElement := range original {
|
||||
found := false
|
||||
for _, modElement := range modified {
|
||||
if deepEqual(origElement, modElement) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
removedIndices = append(removedIndices, i)
|
||||
}
|
||||
}
|
||||
|
||||
return removedIndices
|
||||
}
|
||||
|
||||
// deepEqual performs deep comparison of two values
|
||||
func deepEqual(a, b interface{}) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
switch av := a.(type) {
|
||||
case map[string]interface{}:
|
||||
if bv, ok := b.(map[string]interface{}); ok {
|
||||
if len(av) != len(bv) {
|
||||
return false
|
||||
}
|
||||
for k, v := range av {
|
||||
if !deepEqual(v, bv[k]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
case []interface{}:
|
||||
if bv, ok := b.([]interface{}); ok {
|
||||
if len(av) != len(bv) {
|
||||
return false
|
||||
}
|
||||
for i, v := range av {
|
||||
if !deepEqual(v, bv[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
default:
|
||||
return a == b
|
||||
}
|
||||
}
|
||||
|
||||
// ToLuaTable converts a Go interface{} to a Lua table recursively
|
||||
func ToLuaTable(L *lua.LState, data interface{}) (*lua.LTable, error) {
|
||||
toLuaTableLogger := jsonLogger.WithPrefix("ToLuaTable")
|
||||
toLuaTableLogger.Debug("Converting Go interface to Lua table")
|
||||
toLuaTableLogger.Trace("Input data type: %T", data)
|
||||
|
||||
switch v := data.(type) {
|
||||
case map[string]interface{}:
|
||||
toLuaTableLogger.Debug("Converting map to Lua table")
|
||||
table := L.CreateTable(0, len(v))
|
||||
for key, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update JSON value at path '%s': %w", path, err)
|
||||
toLuaTableLogger.Error("Failed to convert map value for key %q: %v", key, err)
|
||||
return nil, err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
table.RawSetString(key, luaValue)
|
||||
}
|
||||
return table, nil
|
||||
|
||||
// ToLua converts JSON values to Lua variables
|
||||
func (p *JSONProcessor) ToLua(L *lua.LState, data interface{}) error {
|
||||
table, err := ToLua(L, data)
|
||||
case []interface{}:
|
||||
toLuaTableLogger.Debug("Converting slice to Lua table")
|
||||
table := L.CreateTable(len(v), 0)
|
||||
for i, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
return err
|
||||
toLuaTableLogger.Error("Failed to convert slice value at index %d: %v", i, err)
|
||||
return nil, err
|
||||
}
|
||||
table.RawSetInt(i+1, luaValue) // Lua arrays are 1-indexed
|
||||
}
|
||||
return table, nil
|
||||
|
||||
case string:
|
||||
toLuaTableLogger.Debug("Converting string to Lua string")
|
||||
return nil, fmt.Errorf("expected table or array, got string")
|
||||
|
||||
case float64:
|
||||
toLuaTableLogger.Debug("Converting float64 to Lua number")
|
||||
return nil, fmt.Errorf("expected table or array, got number")
|
||||
|
||||
case bool:
|
||||
toLuaTableLogger.Debug("Converting bool to Lua boolean")
|
||||
return nil, fmt.Errorf("expected table or array, got boolean")
|
||||
|
||||
case nil:
|
||||
toLuaTableLogger.Debug("Converting nil to Lua nil")
|
||||
return nil, fmt.Errorf("expected table or array, got nil")
|
||||
|
||||
default:
|
||||
toLuaTableLogger.Error("Unsupported type for Lua table conversion: %T", v)
|
||||
return nil, fmt.Errorf("unsupported type for Lua table conversion: %T", v)
|
||||
}
|
||||
L.SetGlobal("v", table)
|
||||
return nil
|
||||
}
|
||||
|
||||
// FromLua retrieves values from Lua
|
||||
func (p *JSONProcessor) FromLua(L *lua.LState) (interface{}, error) {
|
||||
luaValue := L.GetGlobal("v")
|
||||
return FromLua(L, luaValue)
|
||||
// ToLuaValue converts a Go interface{} to a Lua value
|
||||
func ToLuaValue(L *lua.LState, data interface{}) (lua.LValue, error) {
|
||||
toLuaValueLogger := jsonLogger.WithPrefix("ToLuaValue")
|
||||
toLuaValueLogger.Debug("Converting Go interface to Lua value")
|
||||
toLuaValueLogger.Trace("Input data type: %T", data)
|
||||
|
||||
switch v := data.(type) {
|
||||
case map[string]interface{}:
|
||||
toLuaValueLogger.Debug("Converting map to Lua table")
|
||||
table := L.CreateTable(0, len(v))
|
||||
for key, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
toLuaValueLogger.Error("Failed to convert map value for key %q: %v", key, err)
|
||||
return lua.LNil, err
|
||||
}
|
||||
table.RawSetString(key, luaValue)
|
||||
}
|
||||
return table, nil
|
||||
|
||||
case []interface{}:
|
||||
toLuaValueLogger.Debug("Converting slice to Lua table")
|
||||
table := L.CreateTable(len(v), 0)
|
||||
for i, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
toLuaValueLogger.Error("Failed to convert slice value at index %d: %v", i, err)
|
||||
return lua.LNil, err
|
||||
}
|
||||
table.RawSetInt(i+1, luaValue) // Lua arrays are 1-indexed
|
||||
}
|
||||
return table, nil
|
||||
|
||||
case string:
|
||||
toLuaValueLogger.Debug("Converting string to Lua string")
|
||||
return lua.LString(v), nil
|
||||
|
||||
case float64:
|
||||
toLuaValueLogger.Debug("Converting float64 to Lua number")
|
||||
return lua.LNumber(v), nil
|
||||
|
||||
case bool:
|
||||
toLuaValueLogger.Debug("Converting bool to Lua boolean")
|
||||
return lua.LBool(v), nil
|
||||
|
||||
case nil:
|
||||
toLuaValueLogger.Debug("Converting nil to Lua nil")
|
||||
return lua.LNil, nil
|
||||
|
||||
default:
|
||||
toLuaValueLogger.Error("Unsupported type for Lua value conversion: %T", v)
|
||||
return lua.LNil, fmt.Errorf("unsupported type for Lua value conversion: %T", v)
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,495 +0,0 @@
|
||||
package jsonpath
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// JSONStep represents a single step in a JSONPath query
|
||||
type JSONStep struct {
|
||||
Type StepType
|
||||
Key string // For Child/RecursiveDescent
|
||||
Index int // For Index (use -1 for wildcard "*")
|
||||
}
|
||||
|
||||
// JSONNode represents a value in the JSON data with its path
|
||||
type JSONNode struct {
|
||||
Value interface{} // The value found at the path
|
||||
Path string // The exact JSONPath where the value was found
|
||||
}
|
||||
|
||||
// StepType defines the types of steps in a JSONPath
|
||||
type StepType int
|
||||
|
||||
const (
|
||||
RootStep StepType = iota // $ - The root element
|
||||
ChildStep // .key - Direct child access
|
||||
RecursiveDescentStep // ..key - Recursive search for key
|
||||
WildcardStep // .* - All children of an object
|
||||
IndexStep // [n] - Array index access (or [*] for all elements)
|
||||
)
|
||||
|
||||
// TraversalMode determines how the traversal behaves
|
||||
type TraversalMode int
|
||||
|
||||
const (
|
||||
CollectMode TraversalMode = iota // Just collect matched nodes
|
||||
ModifyFirstMode // Modify first matching node
|
||||
ModifyAllMode // Modify all matching nodes
|
||||
)
|
||||
|
||||
// ParseJSONPath parses a JSONPath string into a sequence of steps
|
||||
func ParseJSONPath(path string) ([]JSONStep, error) {
|
||||
if len(path) == 0 || path[0] != '$' {
|
||||
return nil, fmt.Errorf("path must start with $; received: %q", path)
|
||||
}
|
||||
|
||||
steps := []JSONStep{}
|
||||
i := 0
|
||||
|
||||
for i < len(path) {
|
||||
switch path[i] {
|
||||
case '$':
|
||||
steps = append(steps, JSONStep{Type: RootStep})
|
||||
i++
|
||||
case '.':
|
||||
i++
|
||||
if i < len(path) && path[i] == '.' {
|
||||
// Recursive descent
|
||||
i++
|
||||
key, nextPos := readKey(path, i)
|
||||
steps = append(steps, JSONStep{Type: RecursiveDescentStep, Key: key})
|
||||
i = nextPos
|
||||
} else {
|
||||
// Child step or wildcard
|
||||
key, nextPos := readKey(path, i)
|
||||
if key == "*" {
|
||||
steps = append(steps, JSONStep{Type: WildcardStep})
|
||||
} else {
|
||||
steps = append(steps, JSONStep{Type: ChildStep, Key: key})
|
||||
}
|
||||
i = nextPos
|
||||
}
|
||||
case '[':
|
||||
// Index step
|
||||
i++
|
||||
indexStr, nextPos := readIndex(path, i)
|
||||
if indexStr == "*" {
|
||||
steps = append(steps, JSONStep{Type: IndexStep, Index: -1})
|
||||
} else {
|
||||
index, err := strconv.Atoi(indexStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid index: %s; error: %w", indexStr, err)
|
||||
}
|
||||
steps = append(steps, JSONStep{Type: IndexStep, Index: index})
|
||||
}
|
||||
i = nextPos + 1 // Skip closing ]
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected character: %c at position %d; path: %q", path[i], i, path)
|
||||
}
|
||||
}
|
||||
|
||||
return steps, nil
|
||||
}
|
||||
|
||||
// readKey extracts a key name from the path
|
||||
func readKey(path string, start int) (string, int) {
|
||||
i := start
|
||||
for ; i < len(path); i++ {
|
||||
if path[i] == '.' || path[i] == '[' {
|
||||
break
|
||||
}
|
||||
}
|
||||
return path[start:i], i
|
||||
}
|
||||
|
||||
// readIndex extracts an array index or wildcard from the path
|
||||
func readIndex(path string, start int) (string, int) {
|
||||
i := start
|
||||
for ; i < len(path); i++ {
|
||||
if path[i] == ']' {
|
||||
break
|
||||
}
|
||||
}
|
||||
return path[start:i], i
|
||||
}
|
||||
|
||||
// Get retrieves values with their paths from data at the specified JSONPath
|
||||
// Each returned JSONNode contains both the value and its exact path in the data structure
|
||||
func Get(data interface{}, path string) ([]JSONNode, error) {
|
||||
steps, err := ParseJSONPath(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
|
||||
}
|
||||
|
||||
results := []JSONNode{}
|
||||
err = traverseWithPaths(data, steps, &results, "$")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to traverse JSONPath %q: %w", path, err)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// Set updates the value at the specified JSONPath in the original data structure.
|
||||
// It only modifies the first matching node.
|
||||
func Set(data interface{}, path string, value interface{}) error {
|
||||
steps, err := ParseJSONPath(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
|
||||
}
|
||||
|
||||
if len(steps) <= 1 {
|
||||
return fmt.Errorf("cannot set root node; the provided path %q is invalid", path)
|
||||
}
|
||||
|
||||
success := false
|
||||
err = setWithPath(data, steps, &success, value, "$", ModifyFirstMode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetAll updates all matching values at the specified JSONPath.
|
||||
func SetAll(data interface{}, path string, value interface{}) error {
|
||||
steps, err := ParseJSONPath(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
|
||||
}
|
||||
|
||||
if len(steps) <= 1 {
|
||||
return fmt.Errorf("cannot set root node; the provided path %q is invalid", path)
|
||||
}
|
||||
|
||||
success := false
|
||||
err = setWithPath(data, steps, &success, value, "$", ModifyAllMode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// setWithPath modifies values while tracking paths
|
||||
func setWithPath(node interface{}, steps []JSONStep, success *bool, value interface{}, currentPath string, mode TraversalMode) error {
|
||||
if node == nil || *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Skip root step
|
||||
actualSteps := steps
|
||||
if len(steps) > 0 && steps[0].Type == RootStep {
|
||||
if len(steps) == 1 {
|
||||
return fmt.Errorf("cannot set root node; the provided path %q is invalid", currentPath)
|
||||
}
|
||||
actualSteps = steps[1:]
|
||||
}
|
||||
|
||||
// Process the first step
|
||||
if len(actualSteps) == 0 {
|
||||
return fmt.Errorf("cannot set root node; no steps provided for path %q", currentPath)
|
||||
}
|
||||
|
||||
step := actualSteps[0]
|
||||
remainingSteps := actualSteps[1:]
|
||||
isLastStep := len(remainingSteps) == 0
|
||||
|
||||
switch step.Type {
|
||||
case ChildStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node at path %q is not a map; actual type: %T", currentPath, node)
|
||||
}
|
||||
|
||||
childPath := currentPath + "." + step.Key
|
||||
|
||||
if isLastStep {
|
||||
// We've reached the target, set the value
|
||||
m[step.Key] = value
|
||||
*success = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create intermediate nodes if necessary
|
||||
child, exists := m[step.Key]
|
||||
if !exists {
|
||||
// Create missing intermediate node
|
||||
if len(remainingSteps) > 0 && remainingSteps[0].Type == IndexStep {
|
||||
child = []interface{}{}
|
||||
} else {
|
||||
child = map[string]interface{}{}
|
||||
}
|
||||
m[step.Key] = child
|
||||
}
|
||||
|
||||
err := setWithPath(child, remainingSteps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
|
||||
case IndexStep:
|
||||
arr, ok := node.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node at path %q is not an array; actual type: %T", currentPath, node)
|
||||
}
|
||||
|
||||
// Handle wildcard index
|
||||
if step.Index == -1 {
|
||||
for i, item := range arr {
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
if isLastStep {
|
||||
arr[i] = value
|
||||
*success = true
|
||||
if mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err := setWithPath(item, remainingSteps, success, value, itemPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle specific index
|
||||
if step.Index >= 0 && step.Index < len(arr) {
|
||||
item := arr[step.Index]
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, step.Index)
|
||||
if isLastStep {
|
||||
arr[step.Index] = value
|
||||
*success = true
|
||||
} else {
|
||||
err := setWithPath(item, remainingSteps, success, value, itemPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case RecursiveDescentStep:
|
||||
// For recursive descent, first check direct match at this level
|
||||
if m, ok := node.(map[string]interface{}); ok && step.Key != "*" {
|
||||
if val, exists := m[step.Key]; exists {
|
||||
directPath := currentPath + "." + step.Key
|
||||
if isLastStep {
|
||||
m[step.Key] = value
|
||||
*success = true
|
||||
if mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err := setWithPath(val, remainingSteps, success, value, directPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", directPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then continue recursion to all children
|
||||
switch n := node.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, v := range n {
|
||||
childPath := currentPath + "." + k
|
||||
// Skip keys we've already processed directly
|
||||
if step.Key != "*" && k == step.Key {
|
||||
continue
|
||||
}
|
||||
err := setWithPath(v, steps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, v := range n {
|
||||
childPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
err := setWithPath(v, steps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case WildcardStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node at path %q is not a map; actual type: %T", currentPath, node)
|
||||
}
|
||||
|
||||
for k, v := range m {
|
||||
childPath := currentPath + "." + k
|
||||
if isLastStep {
|
||||
m[k] = value
|
||||
*success = true
|
||||
if mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err := setWithPath(v, remainingSteps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// traverseWithPaths tracks both nodes and their paths during traversal
|
||||
func traverseWithPaths(node interface{}, steps []JSONStep, results *[]JSONNode, currentPath string) error {
|
||||
if len(steps) == 0 || node == nil {
|
||||
return fmt.Errorf("cannot traverse with empty steps or nil node; steps length: %d, node: %v", len(steps), node)
|
||||
}
|
||||
|
||||
// Skip root step
|
||||
actualSteps := steps
|
||||
if steps[0].Type == RootStep {
|
||||
if len(steps) == 1 {
|
||||
*results = append(*results, JSONNode{Value: node, Path: currentPath})
|
||||
return nil
|
||||
}
|
||||
actualSteps = steps[1:]
|
||||
}
|
||||
|
||||
// Process the first step
|
||||
step := actualSteps[0]
|
||||
remainingSteps := actualSteps[1:]
|
||||
isLastStep := len(remainingSteps) == 0
|
||||
|
||||
switch step.Type {
|
||||
case ChildStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node is not a map; actual type: %T", node)
|
||||
}
|
||||
|
||||
child, exists := m[step.Key]
|
||||
if !exists {
|
||||
return fmt.Errorf("key not found: %s in node at path: %s", step.Key, currentPath)
|
||||
}
|
||||
|
||||
childPath := currentPath + "." + step.Key
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: child, Path: childPath})
|
||||
} else {
|
||||
err := traverseWithPaths(child, remainingSteps, results, childPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
case IndexStep:
|
||||
arr, ok := node.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node is not an array; actual type: %T", node)
|
||||
}
|
||||
|
||||
// Handle wildcard index
|
||||
if step.Index == -1 {
|
||||
for i, item := range arr {
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: item, Path: itemPath})
|
||||
} else {
|
||||
err := traverseWithPaths(item, remainingSteps, results, itemPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle specific index
|
||||
if step.Index >= 0 && step.Index < len(arr) {
|
||||
item := arr[step.Index]
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, step.Index)
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: item, Path: itemPath})
|
||||
} else {
|
||||
err := traverseWithPaths(item, remainingSteps, results, itemPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("index %d out of bounds for array at path: %s", step.Index, currentPath)
|
||||
}
|
||||
|
||||
case RecursiveDescentStep:
|
||||
// For recursive descent, first check direct match at this level
|
||||
if m, ok := node.(map[string]interface{}); ok && step.Key != "*" {
|
||||
if val, exists := m[step.Key]; exists {
|
||||
directPath := currentPath + "." + step.Key
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: val, Path: directPath})
|
||||
} else {
|
||||
err := traverseWithPaths(val, remainingSteps, results, directPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", directPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For wildcard, collect this node
|
||||
if step.Key == "*" && isLastStep {
|
||||
*results = append(*results, JSONNode{Value: node, Path: currentPath})
|
||||
}
|
||||
|
||||
// Then continue recursion to all children
|
||||
switch n := node.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, v := range n {
|
||||
childPath := currentPath + "." + k
|
||||
err := traverseWithPaths(v, steps, results, childPath) // Use the same steps
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, v := range n {
|
||||
childPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
err := traverseWithPaths(v, steps, results, childPath) // Use the same steps
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case WildcardStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node is not a map; actual type: %T", node)
|
||||
}
|
||||
|
||||
for k, v := range m {
|
||||
childPath := currentPath + "." + k
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: v, Path: childPath})
|
||||
} else {
|
||||
err := traverseWithPaths(v, remainingSteps, results, childPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
@@ -1,577 +0,0 @@
|
||||
package jsonpath
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetWithPathsBasic(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data map[string]interface{}
|
||||
path string
|
||||
expected []JSONNode
|
||||
error bool
|
||||
}{
|
||||
{
|
||||
name: "simple property",
|
||||
data: map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
path: "$.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested property",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
},
|
||||
path: "$.user.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.user.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array access",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[1].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[*].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.users[0].name"},
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive descent",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"profile": map[string]interface{}{
|
||||
"email": "john@example.com",
|
||||
},
|
||||
},
|
||||
"admin": map[string]interface{}{
|
||||
"email": "admin@example.com",
|
||||
},
|
||||
},
|
||||
path: "$..email",
|
||||
expected: []JSONNode{
|
||||
{Value: "john@example.com", Path: "$.user.profile.email"},
|
||||
{Value: "admin@example.com", Path: "$.admin.email"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nonexistent path",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
},
|
||||
path: "$.user.email",
|
||||
expected: []JSONNode{},
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(tt.data, tt.path)
|
||||
if err != nil {
|
||||
if !tt.error {
|
||||
t.Errorf("GetWithPaths() returned error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For nonexistent path, we expect empty slice
|
||||
if tt.name == "nonexistent path" {
|
||||
if len(result) > 0 {
|
||||
t.Errorf("GetWithPaths() returned %v, expected empty result", result)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Check if lengths match
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
|
||||
return
|
||||
}
|
||||
|
||||
// For wildcard results, we need to check containment rather than exact order
|
||||
if tt.name == "wildcard" || tt.name == "recursive descent" {
|
||||
// For each expected item, check if it exists in the results by both value and path
|
||||
for _, expected := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
if reflect.DeepEqual(r.Value, expected.Value) && r.Path == expected.Path {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("GetWithPaths() missing expected value: %v with path: %s", expected.Value, expected.Path)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Otherwise check exact equality of both values and paths
|
||||
for i, expected := range tt.expected {
|
||||
if !reflect.DeepEqual(result[i].Value, expected.Value) {
|
||||
t.Errorf("GetWithPaths() value at [%d] = %v, expected %v", i, result[i].Value, expected.Value)
|
||||
}
|
||||
if result[i].Path != expected.Path {
|
||||
t.Errorf("GetWithPaths() path at [%d] = %s, expected %s", i, result[i].Path, expected.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSet(t *testing.T) {
|
||||
t.Run("simple property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
}
|
||||
err := Set(data, "$.name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if data["name"] != "Jane" {
|
||||
t.Errorf("Set() failed: expected name to be 'Jane', got %v", data["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("nested property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
}
|
||||
err := Set(data, "$.user.name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
user, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
if user["name"] != "Jane" {
|
||||
t.Errorf("Set() failed: expected user.name to be 'Jane', got %v", user["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("array element", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
}
|
||||
err := Set(data, "$.users[0].name", "Bob")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
users, ok := data["users"].([]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Users is not a slice")
|
||||
}
|
||||
user0, ok := users[0].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
if user0["name"] != "Bob" {
|
||||
t.Errorf("Set() failed: expected users[0].name to be 'Bob', got %v", user0["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("complex value", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"profile": map[string]interface{}{
|
||||
"email": "john@example.com",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
newProfile := map[string]interface{}{
|
||||
"email": "john.doe@example.com",
|
||||
"phone": "123-456-7890",
|
||||
}
|
||||
|
||||
err := Set(data, "$.user.profile", newProfile)
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
userMap, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
|
||||
profile, ok := userMap["profile"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Profile is not a map")
|
||||
}
|
||||
|
||||
if profile["email"] != "john.doe@example.com" || profile["phone"] != "123-456-7890" {
|
||||
t.Errorf("Set() failed: expected profile to be updated with new values")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("create new property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
}
|
||||
|
||||
err := Set(data, "$.user.email", "john@example.com")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
userMap, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
|
||||
if email, exists := userMap["email"]; !exists || email != "john@example.com" {
|
||||
t.Errorf("Set() failed: expected user.email to be 'john@example.com', got %v", userMap["email"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("create nested properties", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
}
|
||||
|
||||
err := Set(data, "$.user.contact.email", "john@example.com")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
userMap, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
|
||||
contact, ok := userMap["contact"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Contact is not a map")
|
||||
}
|
||||
|
||||
if email, exists := contact["email"]; !exists || email != "john@example.com" {
|
||||
t.Errorf("Set() failed: expected user.contact.email to be 'john@example.com', got %v", contact["email"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("create array and element", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
}
|
||||
|
||||
// This should create an empty addresses array, but won't be able to set index 0
|
||||
// since the array is empty
|
||||
err := Set(data, "$.user.addresses[0].street", "123 Main St")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("multiple targets (should only update first)", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"active": true},
|
||||
map[string]interface{}{"active": true},
|
||||
},
|
||||
}
|
||||
|
||||
err := Set(data, "$.users[*].active", false)
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
users, ok := data["users"].([]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Users is not a slice")
|
||||
}
|
||||
|
||||
user0, ok := users[0].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User0 is not a map")
|
||||
}
|
||||
|
||||
user1, ok := users[1].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User1 is not a map")
|
||||
}
|
||||
|
||||
// Only the first one should be changed
|
||||
if active, exists := user0["active"]; !exists || active != false {
|
||||
t.Errorf("Set() failed: expected users[0].active to be false, got %v", user0["active"])
|
||||
}
|
||||
|
||||
// The second one should remain unchanged
|
||||
if active, exists := user1["active"]; !exists || active != true {
|
||||
t.Errorf("Set() incorrectly modified users[1].active: expected true, got %v", user1["active"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("setting on root should fail", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"name": "John",
|
||||
}
|
||||
|
||||
err := Set(data, "$", "Jane")
|
||||
if err == nil {
|
||||
t.Errorf("Set() returned no error, expected error for setting on root")
|
||||
return
|
||||
}
|
||||
|
||||
// Data should be unchanged
|
||||
if data["name"] != "John" {
|
||||
t.Errorf("Data was modified when setting on root")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSetAll(t *testing.T) {
|
||||
t.Run("simple property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
}
|
||||
err := SetAll(data, "$.name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if data["name"] != "Jane" {
|
||||
t.Errorf("SetAll() failed: expected name to be 'Jane', got %v", data["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("all array elements", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"active": true},
|
||||
map[string]interface{}{"active": true},
|
||||
},
|
||||
}
|
||||
|
||||
err := SetAll(data, "$.users[*].active", false)
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
users, ok := data["users"].([]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Users is not a slice")
|
||||
}
|
||||
|
||||
// Both elements should be updated
|
||||
for i, user := range users {
|
||||
userMap, ok := user.(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User%d is not a map", i)
|
||||
}
|
||||
|
||||
if active, exists := userMap["active"]; !exists || active != false {
|
||||
t.Errorf("SetAll() failed: expected users[%d].active to be false, got %v", i, userMap["active"])
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("recursive descent", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"profile": map[string]interface{}{
|
||||
"active": true,
|
||||
},
|
||||
},
|
||||
"admin": map[string]interface{}{
|
||||
"profile": map[string]interface{}{
|
||||
"active": true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := SetAll(data, "$..active", false)
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check user profile
|
||||
userProfile, ok := data["user"].(map[string]interface{})["profile"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Failed to access user.profile")
|
||||
}
|
||||
if active, exists := userProfile["active"]; !exists || active != false {
|
||||
t.Errorf("SetAll() didn't update user.profile.active, got: %v", active)
|
||||
}
|
||||
|
||||
// Check admin profile
|
||||
adminProfile, ok := data["admin"].(map[string]interface{})["profile"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Failed to access admin.profile")
|
||||
}
|
||||
if active, exists := adminProfile["active"]; !exists || active != false {
|
||||
t.Errorf("SetAll() didn't update admin.profile.active, got: %v", active)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetWithPathsExtended(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data map[string]interface{}
|
||||
path string
|
||||
expected []JSONNode
|
||||
}{
|
||||
{
|
||||
name: "simple property",
|
||||
data: map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
path: "$.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested property",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
},
|
||||
path: "$.user.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.user.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array access",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[1].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[*].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.users[0].name"},
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive descent",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"profile": map[string]interface{}{
|
||||
"email": "john@example.com",
|
||||
},
|
||||
},
|
||||
"admin": map[string]interface{}{
|
||||
"email": "admin@example.com",
|
||||
},
|
||||
},
|
||||
path: "$..email",
|
||||
expected: []JSONNode{
|
||||
{Value: "john@example.com", Path: "$.user.profile.email"},
|
||||
{Value: "admin@example.com", Path: "$.admin.email"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(tt.data, tt.path)
|
||||
if err != nil {
|
||||
t.Errorf("GetWithPaths() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if lengths match
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
|
||||
return
|
||||
}
|
||||
|
||||
// For each expected item, find its match in the results and verify both value and path
|
||||
for _, expected := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
// Check if value matches
|
||||
if reflect.DeepEqual(r.Value, expected.Value) {
|
||||
found = true
|
||||
// Check if path matches
|
||||
if r.Path != expected.Path {
|
||||
t.Errorf("Path mismatch for value %v: got %s, expected %s", r.Value, r.Path, expected.Path)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected node with value %v and path %s not found in results", expected.Value, expected.Path)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -1,318 +0,0 @@
|
||||
package jsonpath
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var testData = map[string]interface{}{
|
||||
"store": map[string]interface{}{
|
||||
"book": []interface{}{
|
||||
map[string]interface{}{
|
||||
"title": "The Fellowship of the Ring",
|
||||
"price": 22.99,
|
||||
},
|
||||
map[string]interface{}{
|
||||
"title": "The Two Towers",
|
||||
"price": 23.45,
|
||||
},
|
||||
},
|
||||
"bicycle": map[string]interface{}{
|
||||
"color": "red",
|
||||
"price": 199.95,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
path string
|
||||
steps []JSONStep
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
path: "$.store.bicycle.color",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: ChildStep, Key: "store"},
|
||||
{Type: ChildStep, Key: "bicycle"},
|
||||
{Type: ChildStep, Key: "color"},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "$..price",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: RecursiveDescentStep, Key: "price"},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "$.store.book[*].title",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: ChildStep, Key: "store"},
|
||||
{Type: ChildStep, Key: "book"},
|
||||
{Type: IndexStep, Index: -1}, // Wildcard
|
||||
{Type: ChildStep, Key: "title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "$.store.book[0]",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: ChildStep, Key: "store"},
|
||||
{Type: ChildStep, Key: "book"},
|
||||
{Type: IndexStep, Index: 0},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "invalid.path",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
path: "$.store.book[abc]",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.path, func(t *testing.T) {
|
||||
steps, err := ParseJSONPath(tt.path)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Fatalf("ParseJSONPath() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
if !tt.wantErr && !reflect.DeepEqual(steps, tt.steps) {
|
||||
t.Errorf("ParseJSONPath() steps = %+v, want %+v", steps, tt.steps)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluator(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expected []JSONNode
|
||||
error bool
|
||||
}{
|
||||
{
|
||||
name: "simple_property_access",
|
||||
path: "$.store.bicycle.color",
|
||||
expected: []JSONNode{
|
||||
{Value: "red", Path: "$.store.bicycle.color"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array_index_access",
|
||||
path: "$.store.book[0].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard_array_access",
|
||||
path: "$.store.book[*].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
{Value: "The Two Towers", Path: "$.store.book[1].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive_price_search",
|
||||
path: "$..price",
|
||||
expected: []JSONNode{
|
||||
{Value: 22.99, Path: "$.store.book[0].price"},
|
||||
{Value: 23.45, Path: "$.store.book[1].price"},
|
||||
{Value: 199.95, Path: "$.store.bicycle.price"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard_recursive",
|
||||
path: "$..*",
|
||||
expected: []JSONNode{
|
||||
// These will be compared by value only, paths will be validated separately
|
||||
{Value: testData["store"].(map[string]interface{})["book"]},
|
||||
{Value: testData["store"].(map[string]interface{})["bicycle"]},
|
||||
{Value: testData["store"].(map[string]interface{})["book"].([]interface{})[0]},
|
||||
{Value: testData["store"].(map[string]interface{})["book"].([]interface{})[1]},
|
||||
{Value: "The Fellowship of the Ring"},
|
||||
{Value: 22.99},
|
||||
{Value: "The Two Towers"},
|
||||
{Value: 23.45},
|
||||
{Value: "red"},
|
||||
{Value: 199.95},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid_index",
|
||||
path: "$.store.book[5]",
|
||||
expected: []JSONNode{},
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "nonexistent_property",
|
||||
path: "$.store.nonexistent",
|
||||
expected: []JSONNode{},
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Use GetWithPaths directly
|
||||
result, err := Get(testData, tt.path)
|
||||
if err != nil {
|
||||
if !tt.error {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Special handling for wildcard recursive test
|
||||
if tt.name == "wildcard_recursive" {
|
||||
// Skip length check for wildcard recursive since it might vary
|
||||
// Just verify that each expected item is in the results
|
||||
|
||||
// Validate values match and paths are filled in
|
||||
for _, e := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
if reflect.DeepEqual(r.Value, e.Value) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected value %v not found in results", e.Value)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("Expected %d items, got %d", len(tt.expected), len(result))
|
||||
}
|
||||
|
||||
// Validate both values and paths
|
||||
for i, e := range tt.expected {
|
||||
if i < len(result) {
|
||||
if !reflect.DeepEqual(result[i].Value, e.Value) {
|
||||
t.Errorf("Value at [%d]: got %v, expected %v", i, result[i].Value, e.Value)
|
||||
}
|
||||
if result[i].Path != e.Path {
|
||||
t.Errorf("Path at [%d]: got %s, expected %s", i, result[i].Path, e.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEdgeCases(t *testing.T) {
|
||||
t.Run("empty_data", func(t *testing.T) {
|
||||
result, err := Get(nil, "$.a.b")
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for empty data")
|
||||
return
|
||||
}
|
||||
if len(result) > 0 {
|
||||
t.Errorf("Expected empty result, got %v", result)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("empty_path", func(t *testing.T) {
|
||||
_, err := ParseJSONPath("")
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty path")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("numeric_keys", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"42": "answer",
|
||||
}
|
||||
result, err := Get(data, "$.42")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) == 0 || result[0].Value != "answer" {
|
||||
t.Errorf("Expected 'answer', got %v", result)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetWithPaths(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expected []JSONNode
|
||||
}{
|
||||
{
|
||||
name: "simple_property_access",
|
||||
path: "$.store.bicycle.color",
|
||||
expected: []JSONNode{
|
||||
{Value: "red", Path: "$.store.bicycle.color"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array_index_access",
|
||||
path: "$.store.book[0].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard_array_access",
|
||||
path: "$.store.book[*].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
{Value: "The Two Towers", Path: "$.store.book[1].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive_price_search",
|
||||
path: "$..price",
|
||||
expected: []JSONNode{
|
||||
{Value: 22.99, Path: "$.store.book[0].price"},
|
||||
{Value: 23.45, Path: "$.store.book[1].price"},
|
||||
{Value: 199.95, Path: "$.store.bicycle.price"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(testData, tt.path)
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if lengths match
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
|
||||
return
|
||||
}
|
||||
|
||||
// For each expected item, find its match in the results and verify both value and path
|
||||
for _, expected := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
// First verify the value matches
|
||||
if reflect.DeepEqual(r.Value, expected.Value) {
|
||||
found = true
|
||||
// Then verify the path matches
|
||||
if r.Path != expected.Path {
|
||||
t.Errorf("Path mismatch for value %v: got %s, expected %s", r.Value, r.Path, expected.Path)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected node with value %v and path %s not found in results", expected.Value, expected.Path)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -2,138 +2,210 @@ package processor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"cook/utils"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// Processor defines the interface for all file processors
|
||||
type Processor interface {
|
||||
// Process handles processing a file with the given pattern and Lua expression
|
||||
Process(filename string, pattern string, luaExpr string) (int, int, error)
|
||||
// processorLogger is a scoped logger for the processor package.
|
||||
var processorLogger = logger.Default.WithPrefix("processor")
|
||||
|
||||
// ProcessContent handles processing a string content directly with the given pattern and Lua expression
|
||||
// Returns the modified content, modification count, match count, and any error
|
||||
ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error)
|
||||
// Maybe we make this an interface again for the shits and giggles
|
||||
// We will see, it could easily be...
|
||||
|
||||
// ToLua converts processor-specific data to Lua variables
|
||||
ToLua(L *lua.LState, data interface{}) error
|
||||
var globalVariables = map[string]interface{}{}
|
||||
|
||||
// FromLua retrieves modified data from Lua
|
||||
FromLua(L *lua.LState) (interface{}, error)
|
||||
}
|
||||
|
||||
// ModificationRecord tracks a single value modification
|
||||
type ModificationRecord struct {
|
||||
File string
|
||||
OldValue string
|
||||
NewValue string
|
||||
Operation string
|
||||
Context string
|
||||
func SetVariables(vars map[string]interface{}) {
|
||||
for k, v := range vars {
|
||||
globalVariables[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
func NewLuaState() (*lua.LState, error) {
|
||||
newLStateLogger := processorLogger.WithPrefix("NewLuaState")
|
||||
newLStateLogger.Debug("Creating new Lua state")
|
||||
L := lua.NewState()
|
||||
// defer L.Close()
|
||||
|
||||
// Load math library
|
||||
newLStateLogger.Debug("Loading Lua math library")
|
||||
L.Push(L.GetGlobal("require"))
|
||||
L.Push(lua.LString("math"))
|
||||
if err := L.PCall(1, 1, nil); err != nil {
|
||||
newLStateLogger.Error("Failed to load Lua math library: %v", err)
|
||||
return nil, fmt.Errorf("error loading Lua math library: %v", err)
|
||||
}
|
||||
newLStateLogger.Debug("Lua math library loaded")
|
||||
|
||||
// Initialize helper functions
|
||||
newLStateLogger.Debug("Initializing Lua helper functions")
|
||||
if err := InitLuaHelpers(L); err != nil {
|
||||
newLStateLogger.Error("Failed to initialize Lua helper functions: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
newLStateLogger.Debug("Lua helper functions initialized")
|
||||
|
||||
return L, nil
|
||||
}
|
||||
|
||||
// ToLua converts a struct or map to a Lua table recursively
|
||||
func ToLua(L *lua.LState, data interface{}) (lua.LValue, error) {
|
||||
switch v := data.(type) {
|
||||
case map[string]interface{}:
|
||||
luaTable := L.NewTable()
|
||||
for key, value := range v {
|
||||
luaValue, err := ToLua(L, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
luaTable.RawSetString(key, luaValue)
|
||||
}
|
||||
return luaTable, nil
|
||||
case []interface{}:
|
||||
luaTable := L.NewTable()
|
||||
for i, value := range v {
|
||||
luaValue, err := ToLua(L, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
luaTable.RawSetInt(i+1, luaValue) // Lua arrays are 1-indexed
|
||||
}
|
||||
return luaTable, nil
|
||||
case string:
|
||||
return lua.LString(v), nil
|
||||
case bool:
|
||||
return lua.LBool(v), nil
|
||||
// Inject global variables
|
||||
if len(globalVariables) > 0 {
|
||||
newLStateLogger.Debug("Injecting %d global variables into Lua state", len(globalVariables))
|
||||
for k, v := range globalVariables {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case int64:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case float32:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case float64:
|
||||
return lua.LNumber(v), nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported data type: %T", data)
|
||||
L.SetGlobal(k, lua.LNumber(val))
|
||||
case string:
|
||||
L.SetGlobal(k, lua.LString(val))
|
||||
case bool:
|
||||
if val {
|
||||
L.SetGlobal(k, lua.LTrue)
|
||||
} else {
|
||||
L.SetGlobal(k, lua.LFalse)
|
||||
}
|
||||
default:
|
||||
// Fallback to string representation
|
||||
L.SetGlobal(k, lua.LString(fmt.Sprintf("%v", val)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newLStateLogger.Debug("New Lua state created successfully")
|
||||
return L, nil
|
||||
}
|
||||
|
||||
// FromLua converts a Lua table to a struct or map recursively
|
||||
func FromLua(L *lua.LState, luaValue lua.LValue) (interface{}, error) {
|
||||
fromLuaLogger := processorLogger.WithPrefix("FromLua").WithField("luaType", luaValue.Type().String())
|
||||
fromLuaLogger.Debug("Converting Lua value to Go interface")
|
||||
switch v := luaValue.(type) {
|
||||
case *lua.LTable:
|
||||
fromLuaLogger.Debug("Processing Lua table")
|
||||
isArray, err := IsLuaTableArray(L, v)
|
||||
if err != nil {
|
||||
fromLuaLogger.Error("Failed to determine if Lua table is array: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
fromLuaLogger.Debug("Lua table is array: %t", isArray)
|
||||
if isArray {
|
||||
fromLuaLogger.Debug("Converting Lua table to Go array")
|
||||
result := make([]interface{}, 0)
|
||||
v.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
converted, _ := FromLua(L, value)
|
||||
result = append(result, converted)
|
||||
})
|
||||
fromLuaLogger.Trace("Converted Go array: %v", result)
|
||||
return result, nil
|
||||
} else {
|
||||
fromLuaLogger.Debug("Converting Lua table to Go map")
|
||||
result := make(map[string]interface{})
|
||||
v.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
result[key.String()], _ = FromLua(L, value)
|
||||
converted, _ := FromLua(L, value)
|
||||
result[key.String()] = converted
|
||||
})
|
||||
// This may be a bit wasteful...
|
||||
// Hopefully it won't run often enough to matter
|
||||
isArray := true
|
||||
for key := range result {
|
||||
_, err := strconv.Atoi(key)
|
||||
if err != nil {
|
||||
isArray = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if isArray {
|
||||
list := make([]interface{}, 0, len(result))
|
||||
for _, value := range result {
|
||||
list = append(list, value)
|
||||
}
|
||||
return list, nil
|
||||
}
|
||||
fromLuaLogger.Trace("Converted Go map: %v", result)
|
||||
return result, nil
|
||||
}
|
||||
case lua.LString:
|
||||
fromLuaLogger.Debug("Converting Lua string to Go string")
|
||||
fromLuaLogger.Trace("Lua string: %q", string(v))
|
||||
return string(v), nil
|
||||
case lua.LBool:
|
||||
fromLuaLogger.Debug("Converting Lua boolean to Go boolean")
|
||||
fromLuaLogger.Trace("Lua boolean: %t", bool(v))
|
||||
return bool(v), nil
|
||||
case lua.LNumber:
|
||||
fromLuaLogger.Debug("Converting Lua number to Go float64")
|
||||
fromLuaLogger.Trace("Lua number: %f", float64(v))
|
||||
return float64(v), nil
|
||||
default:
|
||||
fromLuaLogger.Debug("Unsupported Lua type, returning nil")
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
func IsLuaTableArray(L *lua.LState, v *lua.LTable) (bool, error) {
|
||||
isLuaTableArrayLogger := processorLogger.WithPrefix("IsLuaTableArray")
|
||||
isLuaTableArrayLogger.Debug("Checking if Lua table is an array")
|
||||
isLuaTableArrayLogger.Trace("Lua table input: %v", v)
|
||||
L.SetGlobal("table_to_check", v)
|
||||
|
||||
// Use our predefined helper function from InitLuaHelpers
|
||||
err := L.DoString(`is_array = isArray(table_to_check)`)
|
||||
if err != nil {
|
||||
isLuaTableArrayLogger.Error("Error determining if table is an array: %v", err)
|
||||
return false, fmt.Errorf("error determining if table is array: %w", err)
|
||||
}
|
||||
|
||||
// Check the result of our Lua function
|
||||
isArray := L.GetGlobal("is_array")
|
||||
// LVIsFalse returns true if a given LValue is a nil or false otherwise false.
|
||||
result := !lua.LVIsFalse(isArray)
|
||||
isLuaTableArrayLogger.Debug("Lua table is array: %t", result)
|
||||
isLuaTableArrayLogger.Trace("isArray result Lua value: %v", isArray)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// InitLuaHelpers initializes common Lua helper functions
|
||||
func InitLuaHelpers(L *lua.LState) error {
|
||||
initLuaHelpersLogger := processorLogger.WithPrefix("InitLuaHelpers")
|
||||
initLuaHelpersLogger.Debug("Loading Lua helper functions")
|
||||
|
||||
helperScript := `
|
||||
-- Custom Lua helpers for math operations
|
||||
function min(a, b) return math.min(a, b) end
|
||||
function max(a, b) return math.max(a, b) end
|
||||
function round(x) return math.floor(x + 0.5) end
|
||||
function round(x, n)
|
||||
if n == nil then n = 0 end
|
||||
return math.floor(x * 10^n + 0.5) / 10^n
|
||||
end
|
||||
function floor(x) return math.floor(x) end
|
||||
function ceil(x) return math.ceil(x) end
|
||||
function upper(s) return string.upper(s) end
|
||||
function lower(s) return string.lower(s) end
|
||||
function format(s, ...) return string.format(s, ...) end
|
||||
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
||||
|
||||
-- String split helper
|
||||
function strsplit(inputstr, sep)
|
||||
if sep == nil then
|
||||
sep = "%s"
|
||||
end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
---@param table table
|
||||
---@param depth number?
|
||||
function DumpTable(table, depth)
|
||||
if depth == nil then
|
||||
depth = 0
|
||||
end
|
||||
if (depth > 200) then
|
||||
print("Error: Depth > 200 in dumpTable()")
|
||||
return
|
||||
end
|
||||
for k, v in pairs(table) do
|
||||
if (type(v) == "table") then
|
||||
print(string.rep(" ", depth) .. k .. ":")
|
||||
DumpTable(v, depth + 1)
|
||||
else
|
||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- String to number conversion helper
|
||||
function num(str)
|
||||
@@ -149,26 +221,40 @@ end
|
||||
function is_number(str)
|
||||
return tonumber(str) ~= nil
|
||||
end
|
||||
|
||||
function isArray(t)
|
||||
if type(t) ~= "table" then return false end
|
||||
local max = 0
|
||||
local count = 0
|
||||
for k, _ in pairs(t) do
|
||||
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then
|
||||
return false
|
||||
end
|
||||
max = math.max(max, k)
|
||||
count = count + 1
|
||||
end
|
||||
return max == count
|
||||
end
|
||||
|
||||
modified = false
|
||||
`
|
||||
if err := L.DoString(helperScript); err != nil {
|
||||
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
|
||||
return fmt.Errorf("error loading helper functions: %v", err)
|
||||
}
|
||||
initLuaHelpersLogger.Debug("Lua helper functions loaded")
|
||||
|
||||
initLuaHelpersLogger.Debug("Setting up Lua print function to Go")
|
||||
L.SetGlobal("print", L.NewFunction(printToGo))
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
L.SetGlobal("re", L.NewFunction(evalRegex))
|
||||
initLuaHelpersLogger.Debug("Lua print and fetch functions bound to Go")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Helper utility functions
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
func LimitString(s string, maxLen int) string {
|
||||
s = strings.ReplaceAll(s, "\n", "\\n")
|
||||
if len(s) <= maxLen {
|
||||
return s
|
||||
}
|
||||
return s[:maxLen-3] + "..."
|
||||
}
|
||||
|
||||
// BuildLuaScript prepares a Lua expression from shorthand notation
|
||||
func BuildLuaScript(luaExpr string) string {
|
||||
func PrependLuaAssignment(luaExpr string) string {
|
||||
prependLuaAssignmentLogger := processorLogger.WithPrefix("PrependLuaAssignment").WithField("originalLuaExpr", luaExpr)
|
||||
prependLuaAssignmentLogger.Debug("Prepending Lua assignment if necessary")
|
||||
// Auto-prepend v1 for expressions starting with operators
|
||||
if strings.HasPrefix(luaExpr, "*") ||
|
||||
strings.HasPrefix(luaExpr, "/") ||
|
||||
@@ -177,31 +263,290 @@ func BuildLuaScript(luaExpr string) string {
|
||||
strings.HasPrefix(luaExpr, "^") ||
|
||||
strings.HasPrefix(luaExpr, "%") {
|
||||
luaExpr = "v1 = v1" + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1 = v1' due to operator prefix")
|
||||
} else if strings.HasPrefix(luaExpr, "=") {
|
||||
// Handle direct assignment with = operator
|
||||
luaExpr = "v1 " + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1' due to direct assignment operator")
|
||||
}
|
||||
|
||||
// Add assignment if needed
|
||||
if !strings.Contains(luaExpr, "=") {
|
||||
luaExpr = "v1 = " + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1 =' as no assignment was found")
|
||||
}
|
||||
|
||||
prependLuaAssignmentLogger.Trace("Final Lua expression after prepending: %q", luaExpr)
|
||||
return luaExpr
|
||||
}
|
||||
|
||||
// Max returns the maximum of two integers
|
||||
func Max(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
// BuildLuaScript prepares a Lua expression from shorthand notation
|
||||
func BuildLuaScript(luaExpr string) string {
|
||||
buildLuaScriptLogger := processorLogger.WithPrefix("BuildLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||
buildLuaScriptLogger.Debug("Building full Lua script from expression")
|
||||
|
||||
// Perform $var substitutions from globalVariables
|
||||
luaExpr = replaceVariables(luaExpr)
|
||||
|
||||
luaExpr = PrependLuaAssignment(luaExpr)
|
||||
|
||||
fullScript := fmt.Sprintf(`
|
||||
function run()
|
||||
%s
|
||||
end
|
||||
local res = run()
|
||||
modified = res == nil or res
|
||||
`, luaExpr)
|
||||
buildLuaScriptLogger.Trace("Generated full Lua script: %q", utils.LimitString(fullScript, 200))
|
||||
|
||||
return fullScript
|
||||
}
|
||||
|
||||
// Min returns the minimum of two integers
|
||||
func Min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
// BuildJSONLuaScript prepares a Lua expression for JSON mode
|
||||
func BuildJSONLuaScript(luaExpr string) string {
|
||||
buildJsonLuaScriptLogger := processorLogger.WithPrefix("BuildJSONLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||
buildJsonLuaScriptLogger.Debug("Building full Lua script for JSON mode from expression")
|
||||
|
||||
// Perform $var substitutions from globalVariables
|
||||
luaExpr = replaceVariables(luaExpr)
|
||||
|
||||
fullScript := fmt.Sprintf(`
|
||||
function run()
|
||||
%s
|
||||
end
|
||||
local res = run()
|
||||
modified = res == nil or res
|
||||
`, luaExpr)
|
||||
buildJsonLuaScriptLogger.Trace("Generated full JSON Lua script: %q", utils.LimitString(fullScript, 200))
|
||||
|
||||
return fullScript
|
||||
}
|
||||
|
||||
func replaceVariables(expr string) string {
|
||||
// $varName -> literal value
|
||||
varNameRe := regexp.MustCompile(`\$(\w+)`)
|
||||
return varNameRe.ReplaceAllStringFunc(expr, func(m string) string {
|
||||
name := varNameRe.FindStringSubmatch(m)[1]
|
||||
if v, ok := globalVariables[name]; ok {
|
||||
switch val := v.(type) {
|
||||
case int, int64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case bool:
|
||||
if val {
|
||||
return "true"
|
||||
} else {
|
||||
return "false"
|
||||
}
|
||||
case string:
|
||||
// Quote strings for Lua literal
|
||||
return fmt.Sprintf("%q", val)
|
||||
default:
|
||||
return fmt.Sprintf("%q", fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return m
|
||||
})
|
||||
}
|
||||
|
||||
func printToGo(L *lua.LState) int {
|
||||
printToGoLogger := processorLogger.WithPrefix("printToGo")
|
||||
printToGoLogger.Debug("Lua print function called, redirecting to Go logger")
|
||||
top := L.GetTop()
|
||||
|
||||
args := make([]interface{}, top)
|
||||
for i := 1; i <= top; i++ {
|
||||
args[i-1] = L.Get(i)
|
||||
}
|
||||
|
||||
// Format the message with proper spacing between arguments
|
||||
var parts []string
|
||||
for _, arg := range args {
|
||||
parts = append(parts, fmt.Sprintf("%v", arg))
|
||||
}
|
||||
message := strings.Join(parts, " ")
|
||||
printToGoLogger.Trace("Lua print message: %q", message)
|
||||
|
||||
// Use the LUA log level with a script tag
|
||||
logger.Lua("%s", message)
|
||||
printToGoLogger.Debug("Message logged from Lua")
|
||||
return 0
|
||||
}
|
||||
|
||||
func fetch(L *lua.LState) int {
|
||||
fetchLogger := processorLogger.WithPrefix("fetch")
|
||||
fetchLogger.Debug("Lua fetch function called")
|
||||
// Get URL from first argument
|
||||
url := L.ToString(1)
|
||||
if url == "" {
|
||||
fetchLogger.Error("Fetch failed: URL is required")
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString("URL is required"))
|
||||
return 2
|
||||
}
|
||||
fetchLogger.Debug("Fetching URL: %q", url)
|
||||
|
||||
// Get options from second argument if provided
|
||||
var method string = "GET"
|
||||
var headers map[string]string = make(map[string]string)
|
||||
var body string = ""
|
||||
|
||||
if L.GetTop() > 1 {
|
||||
options := L.ToTable(2)
|
||||
if options != nil {
|
||||
fetchLogger.Debug("Processing fetch options")
|
||||
// Get method
|
||||
if methodVal := options.RawGetString("method"); methodVal != lua.LNil {
|
||||
method = methodVal.String()
|
||||
fetchLogger.Trace("Method from options: %q", method)
|
||||
}
|
||||
|
||||
// Get headers
|
||||
if headersVal := options.RawGetString("headers"); headersVal != lua.LNil {
|
||||
if headersTable, ok := headersVal.(*lua.LTable); ok {
|
||||
fetchLogger.Trace("Processing headers table")
|
||||
headersTable.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
headers[key.String()] = value.String()
|
||||
fetchLogger.Trace("Header: %q = %q", key.String(), value.String())
|
||||
})
|
||||
}
|
||||
fetchLogger.Trace("All headers: %v", headers)
|
||||
}
|
||||
|
||||
// Get body
|
||||
if bodyVal := options.RawGetString("body"); bodyVal != lua.LNil {
|
||||
body = bodyVal.String()
|
||||
fetchLogger.Trace("Body from options: %q", utils.LimitString(body, 100))
|
||||
}
|
||||
}
|
||||
}
|
||||
fetchLogger.Debug("Fetch request details: Method=%q, URL=%q, BodyLength=%d, Headers=%v", method, url, len(body), headers)
|
||||
|
||||
// Create HTTP request
|
||||
req, err := http.NewRequest(method, url, strings.NewReader(body))
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error creating HTTP request: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error creating request: %v", err)))
|
||||
return 2
|
||||
}
|
||||
|
||||
// Set headers
|
||||
for key, value := range headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
fetchLogger.Debug("HTTP request created and headers set")
|
||||
fetchLogger.Trace("HTTP Request: %+v", req)
|
||||
|
||||
// Make request
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error making HTTP request: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error making request: %v", err)))
|
||||
return 2
|
||||
}
|
||||
defer func() {
|
||||
fetchLogger.Debug("Closing HTTP response body")
|
||||
resp.Body.Close()
|
||||
}()
|
||||
fetchLogger.Debug("HTTP request executed. Status Code: %d", resp.StatusCode)
|
||||
|
||||
// Read response body
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error reading response body: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error reading response: %v", err)))
|
||||
return 2
|
||||
}
|
||||
fetchLogger.Trace("Response body length: %d", len(bodyBytes))
|
||||
|
||||
// Create response table
|
||||
responseTable := L.NewTable()
|
||||
responseTable.RawSetString("status", lua.LNumber(resp.StatusCode))
|
||||
responseTable.RawSetString("statusText", lua.LString(resp.Status))
|
||||
responseTable.RawSetString("ok", lua.LBool(resp.StatusCode >= 200 && resp.StatusCode < 300))
|
||||
responseTable.RawSetString("body", lua.LString(string(bodyBytes)))
|
||||
fetchLogger.Debug("Created Lua response table")
|
||||
|
||||
// Set headers in response
|
||||
headersTable := L.NewTable()
|
||||
for key, values := range resp.Header {
|
||||
headersTable.RawSetString(key, lua.LString(values[0]))
|
||||
fetchLogger.Trace("Response header: %q = %q", key, values[0])
|
||||
}
|
||||
responseTable.RawSetString("headers", headersTable)
|
||||
fetchLogger.Trace("Full response table: %v", responseTable)
|
||||
|
||||
L.Push(responseTable)
|
||||
fetchLogger.Debug("Pushed response table to Lua stack")
|
||||
return 1
|
||||
}
|
||||
|
||||
func evalRegex(L *lua.LState) int {
|
||||
evalRegexLogger := processorLogger.WithPrefix("evalRegex")
|
||||
evalRegexLogger.Debug("Lua evalRegex function called")
|
||||
pattern := L.ToString(1)
|
||||
input := L.ToString(2)
|
||||
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatch(input)
|
||||
|
||||
matchesTable := L.NewTable()
|
||||
for i, match := range matches {
|
||||
matchesTable.RawSetString(fmt.Sprintf("%d", i), lua.LString(match))
|
||||
}
|
||||
|
||||
L.Push(matchesTable)
|
||||
evalRegexLogger.Debug("Pushed matches table to Lua stack")
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
// GetLuaFunctionsHelp returns a comprehensive help string for all available Lua functions
|
||||
func GetLuaFunctionsHelp() string {
|
||||
return `Lua Functions Available in Global Environment:
|
||||
|
||||
MATH FUNCTIONS:
|
||||
min(a, b) - Returns the minimum of two numbers
|
||||
max(a, b) - Returns the maximum of two numbers
|
||||
round(x, n) - Rounds x to n decimal places (default 0)
|
||||
floor(x) - Returns the floor of x
|
||||
ceil(x) - Returns the ceiling of x
|
||||
|
||||
STRING FUNCTIONS:
|
||||
upper(s) - Converts string to uppercase
|
||||
lower(s) - Converts string to lowercase
|
||||
format(s, ...) - Formats string using Lua string.format
|
||||
trim(s) - Removes leading/trailing whitespace
|
||||
strsplit(inputstr, sep) - Splits string by separator (default: whitespace)
|
||||
num(str) - Converts string to number (returns 0 if invalid)
|
||||
str(num) - Converts number to string
|
||||
is_number(str) - Returns true if string is numeric
|
||||
|
||||
TABLE FUNCTIONS:
|
||||
DumpTable(table, depth) - Prints table structure recursively
|
||||
isArray(t) - Returns true if table is a sequential array
|
||||
|
||||
HTTP FUNCTIONS:
|
||||
fetch(url, options) - Makes HTTP request, returns response table
|
||||
options: {method="GET", headers={}, body=""}
|
||||
returns: {status, statusText, ok, body, headers}
|
||||
|
||||
REGEX FUNCTIONS:
|
||||
re(pattern, input) - Applies regex pattern to input string
|
||||
returns: table with matches (index 0 = full match, 1+ = groups)
|
||||
|
||||
UTILITY FUNCTIONS:
|
||||
print(...) - Prints arguments to Go logger
|
||||
|
||||
EXAMPLES:
|
||||
round(3.14159, 2) -> 3.14
|
||||
strsplit("a,b,c", ",") -> {"a", "b", "c"}
|
||||
upper("hello") -> "HELLO"
|
||||
min(5, 3) -> 3
|
||||
num("123") -> 123
|
||||
is_number("abc") -> false
|
||||
fetch("https://api.example.com/data")
|
||||
re("(\\w+)@(\\w+)", "user@domain.com") -> {"user@domain.com", "user", "domain.com"}`
|
||||
}
|
||||
|
@@ -1,134 +1,108 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// RegexProcessor implements the Processor interface using regex patterns
|
||||
type RegexProcessor struct{}
|
||||
// regexLogger is a scoped logger for the processor/regex package.
|
||||
var regexLogger = logger.Default.WithPrefix("processor/regex")
|
||||
|
||||
// Process implements the Processor interface for RegexProcessor
|
||||
func (p *RegexProcessor) Process(filename string, pattern string, luaExpr string) (int, int, error) {
|
||||
// Read file content
|
||||
fullPath := filepath.Join(".", filename)
|
||||
content, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error reading file: %v", err)
|
||||
}
|
||||
|
||||
fileContent := string(content)
|
||||
|
||||
// Process the content
|
||||
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// If we made modifications, save the file
|
||||
if modCount > 0 {
|
||||
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error writing file: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return modCount, matchCount, nil
|
||||
}
|
||||
|
||||
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
|
||||
func (p *RegexProcessor) ToLua(L *lua.LState, data interface{}) error {
|
||||
captures, ok := data.([]string)
|
||||
if !ok {
|
||||
return fmt.Errorf("expected []string for captures, got %T", data)
|
||||
}
|
||||
|
||||
// Set variables for each capture group, starting from v1/s1 for the first capture
|
||||
for i := 0; i < len(captures); i++ {
|
||||
// Set string version (always available as s1, s2, etc.)
|
||||
L.SetGlobal(fmt.Sprintf("s%d", i+1), lua.LString(captures[i]))
|
||||
|
||||
// Try to convert to number and set v1, v2, etc.
|
||||
if val, err := strconv.ParseFloat(captures[i], 64); err == nil {
|
||||
L.SetGlobal(fmt.Sprintf("v%d", i+1), lua.LNumber(val))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// FromLua implements the Processor interface for RegexProcessor
|
||||
func (p *RegexProcessor) FromLua(L *lua.LState) (interface{}, error) {
|
||||
// Get the modified values after Lua execution
|
||||
modifications := make(map[int]string)
|
||||
|
||||
// Check for modifications to v1-v12 and s1-s12
|
||||
for i := 0; i < 12; i++ {
|
||||
// Check both v and s variables to see if any were modified
|
||||
vVarName := fmt.Sprintf("v%d", i+1)
|
||||
sVarName := fmt.Sprintf("s%d", i+1)
|
||||
|
||||
vLuaVal := L.GetGlobal(vVarName)
|
||||
sLuaVal := L.GetGlobal(sVarName)
|
||||
|
||||
// If our value is a number then it's very likely we want it to be a number
|
||||
// And not a string
|
||||
// If we do want it to be a string we will cast it into a string in lua
|
||||
// wait that wouldn't work... Casting v to a string would not load it here
|
||||
if vLuaVal.Type() == lua.LTNumber {
|
||||
modifications[i] = vLuaVal.String()
|
||||
continue
|
||||
}
|
||||
if sLuaVal.Type() == lua.LTString {
|
||||
modifications[i] = sLuaVal.String()
|
||||
continue
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return modifications, nil
|
||||
type CaptureGroup struct {
|
||||
Name string
|
||||
Value string
|
||||
Updated string
|
||||
Range [2]int
|
||||
}
|
||||
|
||||
// ProcessContent applies regex replacement with Lua processing
|
||||
func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
|
||||
// Handle special pattern modifications
|
||||
if !strings.HasPrefix(pattern, "(?s)") {
|
||||
pattern = "(?s)" + pattern
|
||||
}
|
||||
// The filename here exists ONLY so we can pass it to the lua environment
|
||||
// It's not used for anything else
|
||||
func ProcessRegex(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||
processRegexLogger := regexLogger.WithPrefix("ProcessRegex").WithField("commandName", command.Name).WithField("file", filename)
|
||||
processRegexLogger.Debug("Starting regex processing for file")
|
||||
processRegexLogger.Trace("Initial file content length: %d", len(content))
|
||||
processRegexLogger.Trace("Command details: %+v", command)
|
||||
|
||||
var commands []utils.ReplaceCommand
|
||||
// Start timing the regex processing
|
||||
startTime := time.Now()
|
||||
|
||||
// We don't HAVE to do this multiple times for a pattern
|
||||
// But it's quick enough for us to not care
|
||||
pattern := resolveRegexPlaceholders(command.Regex)
|
||||
processRegexLogger.Debug("Resolved regex placeholders. Pattern: %s", pattern)
|
||||
|
||||
// I'm not too happy about having to trim regex, we could have meaningful whitespace or newlines
|
||||
// But it's a compromise that allows us to use | in yaml
|
||||
// Otherwise we would have to escape every god damn pair of quotation marks
|
||||
// And a bunch of other shit
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
processRegexLogger.Debug("Trimmed regex pattern: %s", pattern)
|
||||
|
||||
patternCompileStart := time.Now()
|
||||
compiledPattern, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return "", 0, 0, fmt.Errorf("error compiling pattern: %v", err)
|
||||
processRegexLogger.Error("Error compiling pattern %q: %v", pattern, err)
|
||||
return commands, fmt.Errorf("error compiling pattern: %v", err)
|
||||
}
|
||||
processRegexLogger.Debug("Compiled pattern successfully in %v", time.Since(patternCompileStart))
|
||||
|
||||
previous := luaExpr
|
||||
luaExpr = BuildLuaScript(luaExpr)
|
||||
fmt.Printf("Changing Lua expression from: %s to: %s\n", previous, luaExpr)
|
||||
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
return "", 0, 0, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
defer L.Close()
|
||||
|
||||
// Initialize Lua environment
|
||||
modificationCount := 0
|
||||
// Same here, it's just string concatenation, it won't kill us
|
||||
// More important is that we don't fuck up the command
|
||||
// But we shouldn't be able to since it's passed by value
|
||||
previousLuaExpr := command.Lua
|
||||
luaExpr := BuildLuaScript(command.Lua)
|
||||
processRegexLogger.Debug("Transformed Lua expression: %q → %q", previousLuaExpr, luaExpr)
|
||||
processRegexLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
||||
|
||||
// Process all regex matches
|
||||
result := content
|
||||
matchFindStart := time.Now()
|
||||
indices := compiledPattern.FindAllStringSubmatchIndex(content, -1)
|
||||
matchFindDuration := time.Since(matchFindStart)
|
||||
|
||||
processRegexLogger.Debug("Found %d matches in content of length %d (search took %v)",
|
||||
len(indices), len(content), matchFindDuration)
|
||||
processRegexLogger.Trace("Match indices: %v", indices)
|
||||
|
||||
// Log pattern complexity metrics
|
||||
patternComplexity := estimatePatternComplexity(pattern)
|
||||
processRegexLogger.Debug("Pattern complexity estimate: %d", patternComplexity)
|
||||
|
||||
if len(indices) == 0 {
|
||||
processRegexLogger.Warning("No matches found for regex: %q", pattern)
|
||||
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// We walk backwards because we're replacing something with something else that might be longer
|
||||
// And in the case it is longer than the original all indicces past that change will be fucked up
|
||||
// By going backwards we fuck up all the indices to the end of the file that we don't care about
|
||||
// Because there either aren't any (last match) or they're already modified (subsequent matches)
|
||||
for i := len(indices) - 1; i >= 0; i-- {
|
||||
matchIndices := indices[i]
|
||||
for i, matchIndices := range indices {
|
||||
matchLogger := processRegexLogger.WithField("matchNum", i+1)
|
||||
matchLogger.Debug("Processing match %d of %d", i+1, len(indices))
|
||||
matchLogger.Trace("Match indices: %v (match position %d-%d)", matchIndices, matchIndices[0], matchIndices[1])
|
||||
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
matchLogger.Error("Error creating Lua state: %v", err)
|
||||
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
L.SetGlobal("file", lua.LString(filename))
|
||||
// Hmm... Maybe we don't want to defer this..
|
||||
// Maybe we want to close them every iteration
|
||||
// We'll leave it as is for now
|
||||
defer L.Close()
|
||||
matchLogger.Trace("Lua state created successfully for match %d", i+1)
|
||||
|
||||
// Why we're doing this whole song and dance of indices is to properly handle empty matches
|
||||
// Plus it's a little cleaner to surgically replace our matches
|
||||
// If we were to use string.replace and encountered an empty match there'd be nothing to replace
|
||||
@@ -136,61 +110,390 @@ func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr
|
||||
// So when we're cutting open the array we say 0:7 + modified + 7:end
|
||||
// As if concatenating in the middle of the array
|
||||
// Plus it supports lookarounds
|
||||
match := content[matchIndices[0]:matchIndices[1]]
|
||||
matchContent := content[matchIndices[0]:matchIndices[1]]
|
||||
matchPreview := utils.LimitString(matchContent, 50)
|
||||
matchLogger.Trace("Matched content: %q (length: %d)", matchPreview, len(matchContent))
|
||||
|
||||
groups := matchIndices[2:]
|
||||
if len(groups) <= 0 {
|
||||
fmt.Println("No capture groups for lua to chew on")
|
||||
matchLogger.Warning("No capture groups found for match %q and regex %q", matchPreview, pattern)
|
||||
continue
|
||||
}
|
||||
if len(groups)%2 == 1 {
|
||||
fmt.Println("Odd number of indices of groups, what the fuck?")
|
||||
matchLogger.Warning("Invalid number of group indices (%d), should be even: %v", len(groups), groups)
|
||||
continue
|
||||
}
|
||||
|
||||
captures := make([]string, 0, len(groups)/2)
|
||||
// Count how many valid groups we have
|
||||
validGroups := 0
|
||||
for j := 0; j < len(groups); j += 2 {
|
||||
captures = append(captures, content[groups[j]:groups[j+1]])
|
||||
if groups[j] != -1 && groups[j+1] != -1 {
|
||||
validGroups++
|
||||
}
|
||||
}
|
||||
matchLogger.Debug("Found %d valid capture groups in match", validGroups)
|
||||
|
||||
if err := p.ToLua(L, captures); err != nil {
|
||||
fmt.Println("Error setting Lua variables:", err)
|
||||
for _, index := range groups {
|
||||
if index == -1 {
|
||||
matchLogger.Warning("Negative index encountered in match indices %v. This may indicate an issue with the regex pattern or an empty/optional capture group.", matchIndices)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// We have to use array to preserve order
|
||||
// Very important for the reconstruction step
|
||||
// Because we must overwrite the values in reverse order
|
||||
// See comments a few dozen lines above for more details
|
||||
captureGroups := make([]*CaptureGroup, 0, len(groups)/2)
|
||||
groupNames := compiledPattern.SubexpNames()[1:]
|
||||
for i, name := range groupNames {
|
||||
start := groups[i*2]
|
||||
end := groups[i*2+1]
|
||||
if start == -1 || end == -1 {
|
||||
matchLogger.Debug("Skipping empty or unmatched capture group #%d (name: %q)", i+1, name)
|
||||
continue
|
||||
}
|
||||
|
||||
value := content[start:end]
|
||||
captureGroups = append(captureGroups, &CaptureGroup{
|
||||
Name: name,
|
||||
Value: value,
|
||||
Range: [2]int{start, end},
|
||||
})
|
||||
|
||||
// Include name info in log if available
|
||||
if name != "" {
|
||||
matchLogger.Trace("Capture group '%s': %q (pos %d-%d)", name, value, start, end)
|
||||
} else {
|
||||
matchLogger.Trace("Capture group #%d: %q (pos %d-%d)", i+1, value, start, end)
|
||||
}
|
||||
}
|
||||
|
||||
// Use the DeduplicateGroups flag to control whether to deduplicate capture groups
|
||||
if !command.NoDedup {
|
||||
matchLogger.Debug("Deduplicating capture groups as specified in command settings")
|
||||
captureGroups = deduplicateGroups(captureGroups)
|
||||
matchLogger.Trace("Capture groups after deduplication: %v", captureGroups)
|
||||
} else {
|
||||
matchLogger.Debug("Skipping deduplication of capture groups (NoDedup is true)")
|
||||
}
|
||||
|
||||
if err := toLua(L, captureGroups); err != nil {
|
||||
matchLogger.Error("Failed to set Lua variables for capture groups: %v", err)
|
||||
continue
|
||||
}
|
||||
matchLogger.Debug("Set %d capture groups as Lua variables", len(captureGroups))
|
||||
matchLogger.Trace("Lua globals set for capture groups")
|
||||
|
||||
if err := L.DoString(luaExpr); err != nil {
|
||||
fmt.Printf("Error executing Lua code %s for group %s: %v", luaExpr, captures, err)
|
||||
matchLogger.Error("Lua script execution failed: %v\nScript: %s\nCapture Groups: %+v",
|
||||
err, utils.LimitString(luaExpr, 200), captureGroups)
|
||||
continue
|
||||
}
|
||||
matchLogger.Debug("Lua script executed successfully")
|
||||
|
||||
// Get modifications from Lua
|
||||
modResult, err := p.FromLua(L)
|
||||
updatedCaptureGroups, err := fromLua(L, captureGroups)
|
||||
if err != nil {
|
||||
fmt.Println("Error getting modifications:", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Apply modifications to the matched text
|
||||
modsMap, ok := modResult.(map[int]string)
|
||||
if !ok || len(modsMap) == 0 {
|
||||
fmt.Println("No modifications to apply")
|
||||
matchLogger.Error("Failed to retrieve modifications from Lua: %v", err)
|
||||
continue
|
||||
}
|
||||
matchLogger.Debug("Retrieved updated values from Lua")
|
||||
matchLogger.Trace("Updated capture groups from Lua: %v", updatedCaptureGroups)
|
||||
|
||||
replacement := ""
|
||||
replacementVar := L.GetGlobal("replacement")
|
||||
if replacementVar.Type() != lua.LTNil {
|
||||
replacement = replacementVar.String()
|
||||
matchLogger.Debug("Using global replacement variable from Lua: %q", replacement)
|
||||
}
|
||||
|
||||
// Check if modification flag is set
|
||||
modifiedVal := L.GetGlobal("modified")
|
||||
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
||||
matchLogger.Debug("Skipping match - no modifications indicated by Lua script")
|
||||
continue
|
||||
}
|
||||
|
||||
if replacement == "" {
|
||||
// Apply the modifications to the original match
|
||||
replacement := match
|
||||
for i := len(modsMap) - 1; i >= 0; i-- {
|
||||
newVal := modsMap[i]
|
||||
replacement = matchContent
|
||||
|
||||
// Count groups that were actually modified
|
||||
modifiedGroupsCount := 0
|
||||
for _, capture := range updatedCaptureGroups {
|
||||
if capture.Value != capture.Updated {
|
||||
modifiedGroupsCount++
|
||||
}
|
||||
}
|
||||
matchLogger.Info("%d of %d capture groups identified for modification", modifiedGroupsCount, len(updatedCaptureGroups))
|
||||
|
||||
for _, capture := range updatedCaptureGroups {
|
||||
if capture.Value == capture.Updated {
|
||||
matchLogger.Debug("Capture group unchanged: %s", utils.LimitString(capture.Value, 50))
|
||||
continue
|
||||
}
|
||||
|
||||
// Log what changed with context
|
||||
matchLogger.Debug("Capture group %q scheduled for modification: %q → %q",
|
||||
capture.Name, utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
|
||||
|
||||
// Indices of the group are relative to content
|
||||
// To relate them to match we have to subtract the match start index
|
||||
groupStart := groups[i*2] - matchIndices[0]
|
||||
groupEnd := groups[i*2+1] - matchIndices[0]
|
||||
replacement = replacement[:groupStart] + newVal + replacement[groupEnd:]
|
||||
// replacement = replacement[:groupStart] + newVal + replacement[groupEnd:]
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: capture.Range[0],
|
||||
To: capture.Range[1],
|
||||
With: capture.Updated,
|
||||
})
|
||||
matchLogger.Trace("Added replacement command: %+v", commands[len(commands)-1])
|
||||
}
|
||||
} else {
|
||||
matchLogger.Debug("Using full replacement string from Lua: %q", utils.LimitString(replacement, 50))
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: matchIndices[0],
|
||||
To: matchIndices[1],
|
||||
With: replacement,
|
||||
})
|
||||
matchLogger.Trace("Added full replacement command: %+v", commands[len(commands)-1])
|
||||
}
|
||||
}
|
||||
|
||||
modificationCount++
|
||||
result = result[:matchIndices[0]] + replacement + result[matchIndices[1]:]
|
||||
}
|
||||
|
||||
return result, modificationCount, len(indices), nil
|
||||
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
processRegexLogger.Debug("Generated %d total modifications", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func deduplicateGroups(captureGroups []*CaptureGroup) []*CaptureGroup {
|
||||
deduplicateGroupsLogger := regexLogger.WithPrefix("deduplicateGroups")
|
||||
deduplicateGroupsLogger.Debug("Starting deduplication of capture groups")
|
||||
deduplicateGroupsLogger.Trace("Input capture groups: %v", captureGroups)
|
||||
|
||||
// Preserve input order and drop any group that overlaps with an already accepted group
|
||||
accepted := make([]*CaptureGroup, 0, len(captureGroups))
|
||||
for _, group := range captureGroups {
|
||||
groupLogger := deduplicateGroupsLogger.WithField("groupName", group.Name).WithField("groupRange", group.Range)
|
||||
groupLogger.Debug("Processing capture group")
|
||||
|
||||
overlaps := false
|
||||
for _, kept := range accepted {
|
||||
// Overlap if start < keptEnd and end > keptStart (adjacent is allowed)
|
||||
if group.Range[0] < kept.Range[1] && group.Range[1] > kept.Range[0] {
|
||||
overlaps = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if overlaps {
|
||||
groupLogger.Warning("Overlapping capture group detected and skipped.")
|
||||
continue
|
||||
}
|
||||
|
||||
groupLogger.Debug("Capture group does not overlap with previously accepted groups. Adding.")
|
||||
accepted = append(accepted, group)
|
||||
}
|
||||
|
||||
deduplicateGroupsLogger.Debug("Finished deduplication. Original %d groups, %d deduplicated.", len(captureGroups), len(accepted))
|
||||
deduplicateGroupsLogger.Trace("Deduplicated groups: %v", accepted)
|
||||
|
||||
return accepted
|
||||
}
|
||||
|
||||
// The order of these replaces is important
|
||||
// This one handles !num-s inside of named capture groups
|
||||
// If it were not here our !num in a named capture group would
|
||||
// Expand to another capture group in the capture group
|
||||
// We really only want one (our named) capture group
|
||||
func resolveRegexPlaceholders(pattern string) string {
|
||||
resolveLogger := regexLogger.WithPrefix("resolveRegexPlaceholders").WithField("originalPattern", utils.LimitString(pattern, 100))
|
||||
resolveLogger.Debug("Resolving regex placeholders in pattern")
|
||||
|
||||
// Handle special pattern modifications
|
||||
if !strings.HasPrefix(pattern, "(?s)") {
|
||||
pattern = "(?s)" + pattern
|
||||
resolveLogger.Debug("Prepended '(?s)' to pattern for single-line mode")
|
||||
}
|
||||
|
||||
namedGroupNum := regexp.MustCompile(`(?:(\?<[^>]+>)(!num))`)
|
||||
pattern = namedGroupNum.ReplaceAllStringFunc(pattern, func(match string) string {
|
||||
funcLogger := resolveLogger.WithPrefix("namedGroupNumReplace").WithField("match", utils.LimitString(match, 50))
|
||||
funcLogger.Debug("Processing named group !num placeholder")
|
||||
parts := namedGroupNum.FindStringSubmatch(match)
|
||||
if len(parts) != 3 {
|
||||
funcLogger.Warning("Unexpected number of submatches for namedGroupNum: %d. Returning original match.", len(parts))
|
||||
return match
|
||||
}
|
||||
replacement := `-?\d*\.?\d+`
|
||||
funcLogger.Trace("Replacing !num in named group with: %q", replacement)
|
||||
return parts[1] + replacement
|
||||
})
|
||||
resolveLogger.Debug("Handled named group !num placeholders")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "!num", `(-?\d*\.?\d+)`)
|
||||
resolveLogger.Debug("Replaced !num with numeric capture group")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "!any", `.*?`)
|
||||
resolveLogger.Debug("Replaced !any with non-greedy wildcard")
|
||||
|
||||
repPattern := regexp.MustCompile(`!rep\(([^,]+),\s*(\d+)\)`)
|
||||
// !rep(pattern, count) repeats the pattern n times
|
||||
// Inserting !any between each repetition
|
||||
pattern = repPattern.ReplaceAllStringFunc(pattern, func(match string) string {
|
||||
funcLogger := resolveLogger.WithPrefix("repPatternReplace").WithField("match", utils.LimitString(match, 50))
|
||||
funcLogger.Debug("Processing !rep placeholder")
|
||||
parts := repPattern.FindStringSubmatch(match)
|
||||
if len(parts) != 3 {
|
||||
funcLogger.Warning("Unexpected number of submatches for repPattern: %d. Returning original match.", len(parts))
|
||||
return match
|
||||
}
|
||||
repeatedPattern := parts[1]
|
||||
countStr := parts[2]
|
||||
repetitions, err := strconv.Atoi(countStr)
|
||||
if err != nil {
|
||||
funcLogger.Error("Failed to parse repetition count %q: %v. Returning original match.", countStr, err)
|
||||
return match
|
||||
}
|
||||
|
||||
var finalReplacement string
|
||||
if repetitions > 0 {
|
||||
finalReplacement = strings.Repeat(repeatedPattern+".*?", repetitions-1) + repeatedPattern
|
||||
} else {
|
||||
finalReplacement = ""
|
||||
}
|
||||
|
||||
funcLogger.Trace("Replaced !rep with %d repetitions of %q: %q", repetitions, utils.LimitString(repeatedPattern, 30), utils.LimitString(finalReplacement, 100))
|
||||
return finalReplacement
|
||||
})
|
||||
resolveLogger.Debug("Handled !rep placeholders")
|
||||
|
||||
resolveLogger.Debug("Finished resolving regex placeholders")
|
||||
resolveLogger.Trace("Final resolved pattern: %q", utils.LimitString(pattern, 100))
|
||||
return pattern
|
||||
}
|
||||
|
||||
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
|
||||
func toLua(L *lua.LState, data interface{}) error {
|
||||
toLuaLogger := regexLogger.WithPrefix("toLua")
|
||||
toLuaLogger.Debug("Setting capture groups as Lua variables")
|
||||
|
||||
captureGroups, ok := data.([]*CaptureGroup)
|
||||
if !ok {
|
||||
toLuaLogger.Error("Invalid data type for toLua. Expected []*CaptureGroup, got %T", data)
|
||||
return fmt.Errorf("expected []*CaptureGroup for captures, got %T", data)
|
||||
}
|
||||
toLuaLogger.Trace("Input capture groups: %v", captureGroups)
|
||||
|
||||
groupindex := 0
|
||||
for _, capture := range captureGroups {
|
||||
groupLogger := toLuaLogger.WithField("captureGroup", capture.Name).WithField("value", utils.LimitString(capture.Value, 50))
|
||||
groupLogger.Debug("Processing capture group for Lua")
|
||||
|
||||
if capture.Name == "" {
|
||||
// We don't want to change the name of the capture group
|
||||
// Even if it's empty
|
||||
tempName := fmt.Sprintf("%d", groupindex+1)
|
||||
groupindex++
|
||||
groupLogger.Debug("Unnamed capture group, assigning temporary name: %q", tempName)
|
||||
|
||||
L.SetGlobal("s"+tempName, lua.LString(capture.Value))
|
||||
groupLogger.Trace("Set Lua global s%s = %q", tempName, capture.Value)
|
||||
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal("v"+tempName, lua.LNumber(val))
|
||||
groupLogger.Trace("Set Lua global v%s = %f", tempName, val)
|
||||
} else {
|
||||
groupLogger.Trace("Value %q is not numeric, skipping v%s assignment", capture.Value, tempName)
|
||||
}
|
||||
} else {
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal(capture.Name, lua.LNumber(val))
|
||||
groupLogger.Trace("Set Lua global %s = %f (numeric)", capture.Name, val)
|
||||
} else {
|
||||
L.SetGlobal(capture.Name, lua.LString(capture.Value))
|
||||
groupLogger.Trace("Set Lua global %s = %q (string)", capture.Name, capture.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toLuaLogger.Debug("Finished setting capture groups as Lua variables")
|
||||
return nil
|
||||
}
|
||||
|
||||
// FromLua implements the Processor interface for RegexProcessor
|
||||
func fromLua(L *lua.LState, captureGroups []*CaptureGroup) ([]*CaptureGroup, error) {
|
||||
fromLuaLogger := regexLogger.WithPrefix("fromLua")
|
||||
fromLuaLogger.Debug("Retrieving modifications from Lua for capture groups")
|
||||
fromLuaLogger.Trace("Initial capture groups: %v", captureGroups)
|
||||
|
||||
captureIndex := 0
|
||||
for _, capture := range captureGroups {
|
||||
groupLogger := fromLuaLogger.WithField("originalCaptureName", capture.Name).WithField("originalValue", utils.LimitString(capture.Value, 50))
|
||||
groupLogger.Debug("Processing capture group to retrieve updated value")
|
||||
|
||||
if capture.Name == "" {
|
||||
// This case means it was an unnamed capture group originally.
|
||||
// We need to reconstruct the original temporary name to fetch its updated value.
|
||||
// The name will be set to an integer if it was empty, then incremented.
|
||||
// So, we use the captureIndex to get the correct 'vX' and 'sX' variables.
|
||||
tempName := fmt.Sprintf("%d", captureIndex+1)
|
||||
groupLogger.Debug("Retrieving updated value for unnamed group (temp name: %q)", tempName)
|
||||
|
||||
vVarName := fmt.Sprintf("v%s", tempName)
|
||||
sVarName := fmt.Sprintf("s%s", tempName)
|
||||
captureIndex++
|
||||
|
||||
vLuaVal := L.GetGlobal(vVarName)
|
||||
sLuaVal := L.GetGlobal(sVarName)
|
||||
|
||||
groupLogger.Trace("Lua values for unnamed group: v=%v, s=%v", vLuaVal, sLuaVal)
|
||||
|
||||
if sLuaVal.Type() == lua.LTString {
|
||||
capture.Updated = sLuaVal.String()
|
||||
groupLogger.Trace("Updated value from s%s (string): %q", tempName, capture.Updated)
|
||||
}
|
||||
// Numbers have priority
|
||||
if vLuaVal.Type() == lua.LTNumber {
|
||||
capture.Updated = vLuaVal.String()
|
||||
groupLogger.Trace("Updated value from v%s (numeric): %q", tempName, capture.Updated)
|
||||
}
|
||||
} else {
|
||||
// Easy shit, directly use the named capture group
|
||||
updatedValue := L.GetGlobal(capture.Name)
|
||||
if updatedValue.Type() != lua.LTNil {
|
||||
capture.Updated = updatedValue.String()
|
||||
groupLogger.Trace("Updated value for named group %q: %q", capture.Name, capture.Updated)
|
||||
} else {
|
||||
groupLogger.Debug("Named capture group %q not found in Lua globals or is nil. Keeping original value.", capture.Name)
|
||||
capture.Updated = capture.Value // Keep original if not found or nil
|
||||
}
|
||||
}
|
||||
groupLogger.Debug("Finished processing capture group. Original: %q, Updated: %q", utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
|
||||
}
|
||||
|
||||
fromLuaLogger.Debug("Finished retrieving modifications from Lua")
|
||||
fromLuaLogger.Trace("Final updated capture groups: %v", captureGroups)
|
||||
return captureGroups, nil
|
||||
}
|
||||
|
||||
// estimatePatternComplexity gives a rough estimate of regex pattern complexity
|
||||
// This can help identify potentially problematic patterns
|
||||
func estimatePatternComplexity(pattern string) int {
|
||||
estimateComplexityLogger := regexLogger.WithPrefix("estimatePatternComplexity").WithField("pattern", utils.LimitString(pattern, 100))
|
||||
estimateComplexityLogger.Debug("Estimating regex pattern complexity")
|
||||
complexity := len(pattern)
|
||||
|
||||
// Add complexity for potentially expensive operations
|
||||
complexity += strings.Count(pattern, ".*") * 10 // Greedy wildcard
|
||||
complexity += strings.Count(pattern, ".*?") * 5 // Non-greedy wildcard
|
||||
complexity += strings.Count(pattern, "[^") * 3 // Negated character class
|
||||
complexity += strings.Count(pattern, "\\b") * 2 // Word boundary
|
||||
complexity += strings.Count(pattern, "(") * 2 // Capture groups
|
||||
complexity += strings.Count(pattern, "(?:") * 1 // Non-capture groups
|
||||
complexity += strings.Count(pattern, "\\1") * 3 // Backreferences
|
||||
complexity += strings.Count(pattern, "{") * 2 // Counted repetition
|
||||
|
||||
estimateComplexityLogger.Debug("Estimated pattern complexity: %d", complexity)
|
||||
return complexity
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
599
processor/surgical_json_test.go
Normal file
599
processor/surgical_json_test.go
Normal file
@@ -0,0 +1,599 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSurgicalJSONEditing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
content string
|
||||
luaCode string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Modify single field",
|
||||
content: `{
|
||||
"name": "test",
|
||||
"value": 42,
|
||||
"description": "original"
|
||||
}`,
|
||||
luaCode: `
|
||||
data.value = 84
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"name": "test",
|
||||
"value": 84,
|
||||
"description": "original"
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "Add new field",
|
||||
content: `{
|
||||
"name": "test",
|
||||
"value": 42
|
||||
}`,
|
||||
luaCode: `
|
||||
data.newField = "added"
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"name": "test",
|
||||
"value": 42
|
||||
,"newField":"added"}`, // sjson.Set() adds new fields in compact format
|
||||
},
|
||||
{
|
||||
name: "Modify nested field",
|
||||
content: `{
|
||||
"config": {
|
||||
"settings": {
|
||||
"enabled": false,
|
||||
"timeout": 30
|
||||
}
|
||||
}
|
||||
}`,
|
||||
luaCode: `
|
||||
data.config.settings.enabled = true
|
||||
data.config.settings.timeout = 60
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"config": {
|
||||
"settings": {
|
||||
"enabled": true,
|
||||
"timeout": 60
|
||||
}
|
||||
}
|
||||
}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: tt.luaCode,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(tt.content, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := tt.content
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Check the actual result matches expected
|
||||
if result != tt.expected {
|
||||
t.Errorf("Expected:\n%s\n\nGot:\n%s", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSurgicalJSONPreservesFormatting(t *testing.T) {
|
||||
// Test that surgical editing preserves the original formatting structure
|
||||
content := `{
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"Description": "",
|
||||
"DisplayName": "",
|
||||
"FlavorText": "",
|
||||
"Icon": "None",
|
||||
"MaxStack": 1,
|
||||
"Override_Glow_Icon": "None",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false
|
||||
},
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Rows": [
|
||||
{
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber",
|
||||
"Weight": 10
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"Description": "",
|
||||
"DisplayName": "",
|
||||
"FlavorText": "",
|
||||
"Icon": "None",
|
||||
"MaxStack": 1,
|
||||
"Override_Glow_Icon": "None",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false
|
||||
},
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Rows": [
|
||||
{
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber",
|
||||
"Weight": 500
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
-- Modify the weight of the first item
|
||||
data.Rows[1].Weight = 500
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(content, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := content
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Check that the result matches expected (preserves formatting and changes weight)
|
||||
if result != expected {
|
||||
t.Errorf("Expected:\n%s\n\nGot:\n%s", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSurgicalJSONPreservesFormatting2(t *testing.T) {
|
||||
// Test that surgical editing preserves the original formatting structure
|
||||
content := `
|
||||
{
|
||||
"RowStruct": "/Script/Icarus.ProcessorRecipe",
|
||||
"Defaults": {
|
||||
"bForceDisableRecipe": false,
|
||||
"Requirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_Talents"
|
||||
},
|
||||
"SessionRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"CharacterRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"RequiredMillijoules": 2500,
|
||||
"RecipeSets": [],
|
||||
"ResourceCostMultipliers": [],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Container": {
|
||||
"Value": "None"
|
||||
},
|
||||
"ResourceInputs": [],
|
||||
"bSelectOutputItemRandomly": false,
|
||||
"bContainsContainer": false,
|
||||
"ItemIconOverride": {
|
||||
"ItemStaticData": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"ItemDynamicData": [],
|
||||
"ItemCustomStats": [],
|
||||
"CustomProperties": {
|
||||
"StaticWorldStats": [],
|
||||
"StaticWorldHeldStats": [],
|
||||
"Stats": [],
|
||||
"Alterations": [],
|
||||
"LivingItemSlots": []
|
||||
},
|
||||
"DatabaseGUID": "",
|
||||
"ItemOwnerLookupId": -1,
|
||||
"RuntimeTags": {
|
||||
"GameplayTags": []
|
||||
}
|
||||
},
|
||||
"Outputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemTemplate"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"ResourceOutputs": [],
|
||||
"Refundable": "Inherit",
|
||||
"ExperienceMultiplier": 1,
|
||||
"Audio": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CraftingAudioData"
|
||||
}
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"Name": "Biofuel1",
|
||||
"RecipeSets": [
|
||||
{
|
||||
"RowName": "Composter",
|
||||
"DataTableName": "D_RecipeSets"
|
||||
}
|
||||
],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "Raw_Meat",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 2,
|
||||
"DynamicProperties": []
|
||||
},
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "Tree_Sap",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Outputs": [],
|
||||
"Audio": {
|
||||
"RowName": "Composter"
|
||||
},
|
||||
"ResourceOutputs": [
|
||||
{
|
||||
"Type": {
|
||||
"Value": "Biofuel"
|
||||
},
|
||||
"RequiredUnits": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
expected := `
|
||||
{
|
||||
"RowStruct": "/Script/Icarus.ProcessorRecipe",
|
||||
"Defaults": {
|
||||
"bForceDisableRecipe": false,
|
||||
"Requirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_Talents"
|
||||
},
|
||||
"SessionRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"CharacterRequirement": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CharacterFlags"
|
||||
},
|
||||
"RequiredMillijoules": 2500,
|
||||
"RecipeSets": [],
|
||||
"ResourceCostMultipliers": [],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Container": {
|
||||
"Value": "None"
|
||||
},
|
||||
"ResourceInputs": [],
|
||||
"bSelectOutputItemRandomly": false,
|
||||
"bContainsContainer": false,
|
||||
"ItemIconOverride": {
|
||||
"ItemStaticData": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"ItemDynamicData": [],
|
||||
"ItemCustomStats": [],
|
||||
"CustomProperties": {
|
||||
"StaticWorldStats": [],
|
||||
"StaticWorldHeldStats": [],
|
||||
"Stats": [],
|
||||
"Alterations": [],
|
||||
"LivingItemSlots": []
|
||||
},
|
||||
"DatabaseGUID": "",
|
||||
"ItemOwnerLookupId": -1,
|
||||
"RuntimeTags": {
|
||||
"GameplayTags": []
|
||||
}
|
||||
},
|
||||
"Outputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_ItemTemplate"
|
||||
},
|
||||
"Count": 1,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"ResourceOutputs": [],
|
||||
"Refundable": "Inherit",
|
||||
"ExperienceMultiplier": 1,
|
||||
"Audio": {
|
||||
"RowName": "None",
|
||||
"DataTableName": "D_CraftingAudioData"
|
||||
}
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"Name": "Biofuel1",
|
||||
"RecipeSets": [
|
||||
{
|
||||
"RowName": "Composter",
|
||||
"DataTableName": "D_RecipeSets"
|
||||
}
|
||||
],
|
||||
"Inputs": [
|
||||
{
|
||||
"Element": {
|
||||
"RowName": "Raw_Meat",
|
||||
"DataTableName": "D_ItemsStatic"
|
||||
},
|
||||
"Count": 2,
|
||||
"DynamicProperties": []
|
||||
}
|
||||
],
|
||||
"Outputs": [],
|
||||
"Audio": {
|
||||
"RowName": "Composter"
|
||||
},
|
||||
"ResourceOutputs": [
|
||||
{
|
||||
"Type": {
|
||||
"Value": "Biofuel"
|
||||
},
|
||||
"RequiredUnits": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
-- Define regex patterns for matching recipe names
|
||||
local function matchesPattern(name, pattern)
|
||||
local matches = re(pattern, name)
|
||||
-- Check if matches table has any content (index 0 or 1 should exist if there's a match)
|
||||
return matches and (matches[0] or matches[1])
|
||||
end
|
||||
|
||||
-- Selection pattern for recipes that get multiplied
|
||||
local selectionPattern = "(?-s)(Bulk_)?(Pistol|Rifle).*?Round.*?|(Carbon|Composite)_Paste.*|(Gold|Copper)_Wire|(Ironw|Copper)_Nail|(Platinum|Steel|Cold_Steel|Titanium)_Ingot|.*?Shotgun_Shell.*?|.*_Arrow|.*_Bolt|.*_Fertilizer_?\\d*|.*_Grenade|.*_Pill|.*_Tonic|Aluminum|Ammo_Casing|Animal_Fat|Carbon_Fiber|Composites|Concrete_Mix|Cured_Leather_?\\d?|Electronics|Epoxy_?\\d?|Glass\\d?|Gunpowder\\w*|Health_.*|Titanium_Plate|Organic_Resin|Platinum_Sheath|Refined_[a-zA-Z]+|Rope|Shotgun_Casing|Steel_Bloom\\d?|Tree_Sap\\w*"
|
||||
|
||||
-- Ingot pattern for recipes that get count set to 1
|
||||
local ingotPattern = "(?-s)(Platinum|Steel|Cold_Steel|Titanium)_Ingot|Aluminum|Refined_[a-zA-Z]+|Glass\\d?"
|
||||
|
||||
local factor = 16
|
||||
local bonus = 0.5
|
||||
|
||||
for _, row in ipairs(data.Rows) do
|
||||
local recipeName = row.Name
|
||||
|
||||
-- Special case: Biofuel recipes - remove Tree_Sap input
|
||||
if string.find(recipeName, "Biofuel") then
|
||||
if row.Inputs then
|
||||
for i = #row.Inputs, 1, -1 do
|
||||
local input = row.Inputs[i]
|
||||
if input.Element and input.Element.RowName and string.find(input.Element.RowName, "Tree_Sap") then
|
||||
table.remove(row.Inputs, i)
|
||||
print("Removing input 'Tree_Sap' from processor recipe '" .. recipeName .. "'")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Ingot recipes: set input and output counts to 1
|
||||
if matchesPattern(recipeName, ingotPattern) then
|
||||
if row.Inputs then
|
||||
for _, input in ipairs(row.Inputs) do
|
||||
input.Count = 1
|
||||
end
|
||||
end
|
||||
if row.Outputs then
|
||||
for _, output in ipairs(row.Outputs) do
|
||||
output.Count = 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Selected recipes: multiply inputs by factor, outputs by factor * (1 + bonus)
|
||||
if matchesPattern(recipeName, selectionPattern) then
|
||||
if row.Inputs then
|
||||
for _, input in ipairs(row.Inputs) do
|
||||
local oldCount = input.Count
|
||||
input.Count = input.Count * factor
|
||||
print("Recipe " .. recipeName .. " Input.Count: " .. oldCount .. " -> " .. input.Count)
|
||||
end
|
||||
end
|
||||
|
||||
if row.Outputs then
|
||||
for _, output in ipairs(row.Outputs) do
|
||||
local oldCount = output.Count
|
||||
output.Count = math.floor(output.Count * factor * (1 + bonus))
|
||||
print("Recipe " .. recipeName .. " Output.Count: " .. oldCount .. " -> " .. output.Count)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(content, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := content
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Check that the result matches expected (preserves formatting and changes weight)
|
||||
if result != expected {
|
||||
t.Errorf("Expected:\n%s\n\nGot:\n%s", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetardedJSONEditing(t *testing.T) {
|
||||
original := `{
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"DisplayName": "",
|
||||
"Icon": "None",
|
||||
"Override_Glow_Icon": "None",
|
||||
"Description": "",
|
||||
"FlavorText": "",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false,
|
||||
"MaxStack": 1
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Weight": 10,
|
||||
"MaxStack": 200,
|
||||
"Name": "Item_Fiber"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"DisplayName": "",
|
||||
"Icon": "None",
|
||||
"Override_Glow_Icon": "None",
|
||||
"Description": "",
|
||||
"FlavorText": "",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false,
|
||||
"MaxStack": 1
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Weight": 10,
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
for _, row in ipairs(data.Rows) do
|
||||
if row.MaxStack then
|
||||
if string.find(row.Name, "Carrot") or string.find(row.Name, "Potato") then
|
||||
row.MaxStack = 25
|
||||
else
|
||||
row.MaxStack = row.MaxStack * 10000
|
||||
if row.MaxStack > 1000000 then
|
||||
row.MaxStack = 1000000
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(original, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := original
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Check that the weight was changed
|
||||
if result != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, result)
|
||||
}
|
||||
}
|
27
processor/test_helper.go
Normal file
27
processor/test_helper.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Only modify logger in test mode
|
||||
// This checks if we're running under 'go test'
|
||||
if os.Getenv("GO_TESTING") == "1" || os.Getenv("TESTING") == "1" {
|
||||
// Initialize logger with ERROR level for tests
|
||||
// to minimize noise in test output
|
||||
logger.Init(logger.LevelError)
|
||||
|
||||
// Optionally redirect logger output to discard
|
||||
// This prevents logger output from interfering with test output
|
||||
disableTestLogs := os.Getenv("ENABLE_TEST_LOGS") != "1"
|
||||
if disableTestLogs {
|
||||
// Create a new logger that writes to nowhere
|
||||
silentLogger := logger.New(io.Discard, "", 0)
|
||||
logger.Default = silentLogger
|
||||
}
|
||||
}
|
||||
}
|
217
processor/xml.go
217
processor/xml.go
@@ -1,217 +0,0 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/antchfx/xmlquery"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// XMLProcessor implements the Processor interface for XML documents
|
||||
type XMLProcessor struct{}
|
||||
|
||||
// Process implements the Processor interface for XMLProcessor
|
||||
func (p *XMLProcessor) Process(filename string, pattern string, luaExpr string) (int, int, error) {
|
||||
// Read file content
|
||||
fullPath := filepath.Join(".", filename)
|
||||
content, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error reading file: %v", err)
|
||||
}
|
||||
|
||||
fileContent := string(content)
|
||||
|
||||
// Process the content
|
||||
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// If we made modifications, save the file
|
||||
if modCount > 0 {
|
||||
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("error writing file: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return modCount, matchCount, nil
|
||||
}
|
||||
|
||||
// ProcessContent implements the Processor interface for XMLProcessor
|
||||
func (p *XMLProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
|
||||
// Parse XML document
|
||||
doc, err := xmlquery.Parse(strings.NewReader(content))
|
||||
if err != nil {
|
||||
return content, 0, 0, fmt.Errorf("error parsing XML: %v", err)
|
||||
}
|
||||
|
||||
// Find nodes matching the XPath pattern
|
||||
nodes, err := xmlquery.QueryAll(doc, pattern)
|
||||
if err != nil {
|
||||
return content, 0, 0, fmt.Errorf("error executing XPath: %v", err)
|
||||
}
|
||||
|
||||
matchCount := len(nodes)
|
||||
if matchCount == 0 {
|
||||
return content, 0, 0, nil
|
||||
}
|
||||
|
||||
// Initialize Lua
|
||||
L := lua.NewState()
|
||||
defer L.Close()
|
||||
|
||||
// Load math library
|
||||
L.Push(L.GetGlobal("require"))
|
||||
L.Push(lua.LString("math"))
|
||||
if err := L.PCall(1, 1, nil); err != nil {
|
||||
return content, 0, 0, fmt.Errorf("error loading Lua math library: %v", err)
|
||||
}
|
||||
|
||||
// Load helper functions
|
||||
if err := InitLuaHelpers(L); err != nil {
|
||||
return content, 0, 0, err
|
||||
}
|
||||
|
||||
// Apply modifications to each node
|
||||
modCount := 0
|
||||
for _, node := range nodes {
|
||||
// Reset Lua state for each node
|
||||
L.SetGlobal("v1", lua.LNil)
|
||||
L.SetGlobal("s1", lua.LNil)
|
||||
|
||||
// Get the node value
|
||||
var originalValue string
|
||||
if node.Type == xmlquery.AttributeNode {
|
||||
originalValue = node.InnerText()
|
||||
} else if node.Type == xmlquery.TextNode {
|
||||
originalValue = node.Data
|
||||
} else {
|
||||
originalValue = node.InnerText()
|
||||
}
|
||||
|
||||
// Convert to Lua variables
|
||||
err = p.ToLua(L, originalValue)
|
||||
if err != nil {
|
||||
return content, modCount, matchCount, fmt.Errorf("error converting to Lua: %v", err)
|
||||
}
|
||||
|
||||
// Execute Lua script
|
||||
if err := L.DoString(luaExpr); err != nil {
|
||||
return content, modCount, matchCount, fmt.Errorf("error executing Lua: %v", err)
|
||||
}
|
||||
|
||||
// Get modified value
|
||||
result, err := p.FromLua(L)
|
||||
if err != nil {
|
||||
return content, modCount, matchCount, fmt.Errorf("error getting result from Lua: %v", err)
|
||||
}
|
||||
|
||||
newValue, ok := result.(string)
|
||||
if !ok {
|
||||
return content, modCount, matchCount, fmt.Errorf("expected string result from Lua, got %T", result)
|
||||
}
|
||||
|
||||
// Skip if no change
|
||||
if newValue == originalValue {
|
||||
continue
|
||||
}
|
||||
|
||||
// Apply modification
|
||||
if node.Type == xmlquery.AttributeNode {
|
||||
// For attribute nodes, update the attribute value
|
||||
node.Parent.Attr = append([]xmlquery.Attr{}, node.Parent.Attr...)
|
||||
for i, attr := range node.Parent.Attr {
|
||||
if attr.Name.Local == node.Data {
|
||||
node.Parent.Attr[i].Value = newValue
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if node.Type == xmlquery.TextNode {
|
||||
// For text nodes, update the text content
|
||||
node.Data = newValue
|
||||
} else {
|
||||
// For element nodes, replace inner text
|
||||
// Simple approach: set the InnerText directly if there are no child elements
|
||||
if node.FirstChild == nil || (node.FirstChild != nil && node.FirstChild.Type == xmlquery.TextNode && node.FirstChild.NextSibling == nil) {
|
||||
if node.FirstChild != nil {
|
||||
node.FirstChild.Data = newValue
|
||||
} else {
|
||||
// Create a new text node and add it as the first child
|
||||
textNode := &xmlquery.Node{
|
||||
Type: xmlquery.TextNode,
|
||||
Data: newValue,
|
||||
}
|
||||
node.FirstChild = textNode
|
||||
}
|
||||
} else {
|
||||
// Complex case: node has mixed content or child elements
|
||||
// Replace just the text content while preserving child elements
|
||||
// This is a simplified approach - more complex XML may need more robust handling
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
if child.Type == xmlquery.TextNode {
|
||||
child.Data = newValue
|
||||
break // Update only the first text node
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
modCount++
|
||||
}
|
||||
|
||||
// Serialize the modified XML document to string
|
||||
if doc.FirstChild != nil && doc.FirstChild.Type == xmlquery.DeclarationNode {
|
||||
// If we have an XML declaration, start with it
|
||||
declaration := doc.FirstChild.OutputXML(true)
|
||||
// Remove the firstChild (declaration) before serializing the rest of the document
|
||||
doc.FirstChild = doc.FirstChild.NextSibling
|
||||
return declaration + doc.OutputXML(true), modCount, matchCount, nil
|
||||
}
|
||||
|
||||
return doc.OutputXML(true), modCount, matchCount, nil
|
||||
}
|
||||
|
||||
// ToLua converts XML node values to Lua variables
|
||||
func (p *XMLProcessor) ToLua(L *lua.LState, data interface{}) error {
|
||||
value, ok := data.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("expected string value, got %T", data)
|
||||
}
|
||||
|
||||
// Set as string variable
|
||||
L.SetGlobal("s1", lua.LString(value))
|
||||
|
||||
// Try to convert to number if possible
|
||||
L.SetGlobal("v1", lua.LNumber(0)) // Default to 0
|
||||
if err := L.DoString(fmt.Sprintf("v1 = tonumber(%q) or 0", value)); err != nil {
|
||||
return fmt.Errorf("error converting value to number: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// FromLua gets modified values from Lua
|
||||
func (p *XMLProcessor) FromLua(L *lua.LState) (interface{}, error) {
|
||||
// Check if string variable was modified
|
||||
s1 := L.GetGlobal("s1")
|
||||
if s1 != lua.LNil {
|
||||
if s1Str, ok := s1.(lua.LString); ok {
|
||||
return string(s1Str), nil
|
||||
}
|
||||
}
|
||||
|
||||
// Check if numeric variable was modified
|
||||
v1 := L.GetGlobal("v1")
|
||||
if v1 != lua.LNil {
|
||||
if v1Num, ok := v1.(lua.LNumber); ok {
|
||||
return fmt.Sprintf("%v", v1Num), nil
|
||||
}
|
||||
}
|
||||
|
||||
// Default return empty string
|
||||
return "", nil
|
||||
}
|
File diff suppressed because it is too large
Load Diff
137
regression/regression_test.go
Normal file
137
regression/regression_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package regression
|
||||
|
||||
import (
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func ApiAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
||||
command := utils.ModifyCommand{
|
||||
Regex: regex,
|
||||
Lua: lua,
|
||||
LogLevel: "TRACE",
|
||||
}
|
||||
|
||||
commands, err := processor.ProcessRegex(content, command, "test")
|
||||
if err != nil {
|
||||
return "", 0, 0, err
|
||||
}
|
||||
|
||||
result, modifications := utils.ExecuteModifications(commands, content)
|
||||
return result, modifications, len(commands), nil
|
||||
}
|
||||
|
||||
func TestTalentsMechanicOutOfRange(t *testing.T) {
|
||||
given := `<Talent identifier="quickfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.quickfixer">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="quickfixer" amount="10.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>`
|
||||
|
||||
actual := `<Talent identifier="quickfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.quickfixer">
|
||||
<Replace tag="[amount]" value="30" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="quickfixer" amount="20"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>`
|
||||
|
||||
result, mods, matches, err := ApiAdaptor(given, `<Talent identifier="quickfixer">!anyvalue="(?<movementspeed>!num)"!anyvalue="(?<duration>!num)"!anyvalue="(?<repairspeed>!num)"!anyamount="(?<durationv>!num)"`, "movementspeed=round(movementspeed*1.5, 2) duration=round(duration*2, 2) repairspeed=round(repairspeed*2, 2) durationv=duration")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error processing content: %v", err)
|
||||
}
|
||||
|
||||
if matches != 4 {
|
||||
t.Errorf("Expected 4 matches, got %d", matches)
|
||||
}
|
||||
|
||||
if mods != 4 {
|
||||
t.Errorf("Expected 4 modifications, got %d", mods)
|
||||
}
|
||||
|
||||
if result != actual {
|
||||
t.Errorf("expected %s, got %s", actual, result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndexExplosions_ShouldNotPanic(t *testing.T) {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting current working directory: %v", err)
|
||||
}
|
||||
|
||||
given, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItems.xml"))
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading file: %v", err)
|
||||
}
|
||||
|
||||
expected, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItemsExpected.xml"))
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading file: %v", err)
|
||||
}
|
||||
|
||||
result, _, _, err := ApiAdaptor(string(given), `(?-s)LightComponent!anyrange="(!num)"`, "*4")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error processing content: %v", err)
|
||||
}
|
||||
|
||||
// We don't really care how many god damn matches there are as long as the result is correct
|
||||
// if matches != 45 {
|
||||
// t.Errorf("Expected 45 match, got %d", matches)
|
||||
// }
|
||||
//
|
||||
// if mods != 45 {
|
||||
// t.Errorf("Expected 45 modification, got %d", mods)
|
||||
// }
|
||||
|
||||
if string(result) != string(expected) {
|
||||
t.Errorf("expected %s, got %s", expected, result)
|
||||
}
|
||||
}
|
@@ -16,7 +16,8 @@ fi
|
||||
echo "Tag: $TAG"
|
||||
|
||||
echo "Building the thing..."
|
||||
go build -o BigChef.exe .
|
||||
go build -o chef.exe .
|
||||
go install .
|
||||
|
||||
echo "Creating a release..."
|
||||
TOKEN="$GITEA_API_KEY"
|
||||
@@ -43,6 +44,6 @@ echo "Release ID: $RELEASE_ID"
|
||||
echo "Uploading the things..."
|
||||
curl -X POST \
|
||||
-H "Authorization: token $TOKEN" \
|
||||
-F "attachment=@BigChef.exe" \
|
||||
"$GITEA/api/v1/repos/$REPO/releases/${RELEASE_ID}/assets?name=BigChef.exe"
|
||||
rm BigChef.exe
|
||||
-F "attachment=@chef.exe" \
|
||||
"$GITEA/api/v1/repos/$REPO/releases/${RELEASE_ID}/assets?name=chef.exe"
|
||||
rm chef.exe
|
||||
|
@@ -1,12 +0,0 @@
|
||||
<config>
|
||||
<item>
|
||||
<value>75</value>
|
||||
<multiplier>2</multiplier>
|
||||
<divider>4</divider>
|
||||
</item>
|
||||
<item>
|
||||
<value>150</value>
|
||||
<multiplier>3</multiplier>
|
||||
<divider>2</divider>
|
||||
</item>
|
||||
</config>
|
@@ -1,37 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testdata>
|
||||
<!-- Numeric values -->
|
||||
<item>
|
||||
<id>1</id>
|
||||
<value>200</value>
|
||||
<price>24.99</price>
|
||||
<quantity>5</quantity>
|
||||
</item>
|
||||
|
||||
<!-- Text values -->
|
||||
<item>
|
||||
<id>2</id>
|
||||
<name>Test Product</name>
|
||||
<description>This is a test product description</description>
|
||||
<category>Test</category>
|
||||
</item>
|
||||
|
||||
<!-- Mixed content -->
|
||||
<item>
|
||||
<id>3</id>
|
||||
<name>Mixed Product</name>
|
||||
<price>19.99</price>
|
||||
<code>PRD-123</code>
|
||||
<tags>sale,discount,new</tags>
|
||||
</item>
|
||||
|
||||
<!-- Empty and special values -->
|
||||
<item>
|
||||
<id>4</id>
|
||||
<value></value>
|
||||
<specialChars>Hello & World < > " '</specialChars>
|
||||
<multiline>Line 1
|
||||
Line 2
|
||||
Line 3</multiline>
|
||||
</item>
|
||||
</testdata>
|
11
test_surgical.yml
Normal file
11
test_surgical.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
- name: SurgicalWeightTest
|
||||
json: true
|
||||
lua: |
|
||||
-- This demonstrates surgical JSON editing
|
||||
-- Only the Weight field of Item_Fiber will be modified
|
||||
data.Rows[1].Weight = 999
|
||||
modified = true
|
||||
files:
|
||||
- 'D_Itemable.json'
|
||||
reset: false
|
||||
loglevel: INFO
|
1252
testfiles/OutpostItems.xml
Normal file
1252
testfiles/OutpostItems.xml
Normal file
File diff suppressed because it is too large
Load Diff
1252
testfiles/OutpostItemsExpected.xml
Normal file
1252
testfiles/OutpostItemsExpected.xml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
||||
<config><item><value>100</value></item></config>
|
157
utils/db.go
Normal file
157
utils/db.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
gormlogger "gorm.io/gorm/logger"
|
||||
)
|
||||
|
||||
// dbLogger is a scoped logger for the utils/db package.
|
||||
var dbLogger = logger.Default.WithPrefix("utils/db")
|
||||
|
||||
type DB interface {
|
||||
DB() *gorm.DB
|
||||
Raw(sql string, args ...any) *gorm.DB
|
||||
SaveFile(filePath string, fileData []byte) error
|
||||
GetFile(filePath string) ([]byte, error)
|
||||
GetAllFiles() ([]FileSnapshot, error)
|
||||
RemoveAllFiles() error
|
||||
}
|
||||
|
||||
type FileSnapshot struct {
|
||||
Date time.Time `gorm:"primaryKey"`
|
||||
FilePath string `gorm:"primaryKey"`
|
||||
FileData []byte `gorm:"type:blob"`
|
||||
}
|
||||
|
||||
type DBWrapper struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
var globalDB *DBWrapper
|
||||
|
||||
func GetDB() (DB, error) {
|
||||
getDBLogger := dbLogger.WithPrefix("GetDB")
|
||||
getDBLogger.Debug("Attempting to get database connection")
|
||||
var err error
|
||||
|
||||
dbFile := filepath.Join("data.sqlite")
|
||||
getDBLogger.Debug("Opening database file: %q", dbFile)
|
||||
db, err := gorm.Open(sqlite.Open(dbFile), &gorm.Config{
|
||||
// SkipDefaultTransaction: true,
|
||||
PrepareStmt: true,
|
||||
Logger: gormlogger.Default.LogMode(gormlogger.Silent),
|
||||
})
|
||||
if err != nil {
|
||||
getDBLogger.Error("Failed to open database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getDBLogger.Debug("Database opened successfully, running auto migration")
|
||||
if err := db.AutoMigrate(&FileSnapshot{}); err != nil {
|
||||
getDBLogger.Error("Auto migration failed: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getDBLogger.Debug("Auto migration completed")
|
||||
|
||||
globalDB = &DBWrapper{db: db}
|
||||
getDBLogger.Debug("Database wrapper initialized")
|
||||
return globalDB, nil
|
||||
}
|
||||
|
||||
// Just a wrapper
|
||||
func (db *DBWrapper) Raw(sql string, args ...any) *gorm.DB {
|
||||
rawLogger := dbLogger.WithPrefix("Raw").WithField("sql", sql)
|
||||
rawLogger.Debug("Executing raw SQL query with args: %v", args)
|
||||
return db.db.Raw(sql, args...)
|
||||
}
|
||||
|
||||
func (db *DBWrapper) DB() *gorm.DB {
|
||||
dbLogger.WithPrefix("DB").Debug("Returning GORM DB instance")
|
||||
return db.db
|
||||
}
|
||||
|
||||
func (db *DBWrapper) FileExists(filePath string) (bool, error) {
|
||||
fileExistsLogger := dbLogger.WithPrefix("FileExists").WithField("filePath", filePath)
|
||||
fileExistsLogger.Debug("Checking if file exists in database")
|
||||
var count int64
|
||||
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).Count(&count).Error
|
||||
if err != nil {
|
||||
fileExistsLogger.Error("Error checking if file exists: %v", err)
|
||||
return false, err
|
||||
}
|
||||
fileExistsLogger.Debug("File exists: %t", count > 0)
|
||||
return count > 0, err
|
||||
}
|
||||
|
||||
func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
|
||||
saveFileLogger := dbLogger.WithPrefix("SaveFile").WithField("filePath", filePath)
|
||||
saveFileLogger.Debug("Attempting to save file to database")
|
||||
saveFileLogger.Trace("File data length: %d", len(fileData))
|
||||
|
||||
exists, err := db.FileExists(filePath)
|
||||
if err != nil {
|
||||
saveFileLogger.Error("Error checking if file exists: %v", err)
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
saveFileLogger.Debug("File already exists, skipping save")
|
||||
return nil
|
||||
}
|
||||
saveFileLogger.Debug("Creating new file snapshot in database")
|
||||
err = db.db.Create(&FileSnapshot{
|
||||
Date: time.Now(),
|
||||
FilePath: filePath,
|
||||
FileData: fileData,
|
||||
}).Error
|
||||
if err != nil {
|
||||
saveFileLogger.Error("Failed to create file snapshot: %v", err)
|
||||
} else {
|
||||
saveFileLogger.Debug("File saved successfully to database")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *DBWrapper) GetFile(filePath string) ([]byte, error) {
|
||||
getFileLogger := dbLogger.WithPrefix("GetFile").WithField("filePath", filePath)
|
||||
getFileLogger.Debug("Getting file from database")
|
||||
var fileSnapshot FileSnapshot
|
||||
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).First(&fileSnapshot).Error
|
||||
if err != nil {
|
||||
// Downgrade not-found to warning to avoid noisy errors during first run
|
||||
getFileLogger.Warning("Failed to get file from database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getFileLogger.Debug("File found in database")
|
||||
getFileLogger.Trace("Retrieved file data length: %d", len(fileSnapshot.FileData))
|
||||
return fileSnapshot.FileData, nil
|
||||
}
|
||||
|
||||
func (db *DBWrapper) GetAllFiles() ([]FileSnapshot, error) {
|
||||
getAllFilesLogger := dbLogger.WithPrefix("GetAllFiles")
|
||||
getAllFilesLogger.Debug("Getting all files from database")
|
||||
var fileSnapshots []FileSnapshot
|
||||
err := db.db.Model(&FileSnapshot{}).Find(&fileSnapshots).Error
|
||||
if err != nil {
|
||||
getAllFilesLogger.Error("Failed to get all files from database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getAllFilesLogger.Debug("Found %d files in database", len(fileSnapshots))
|
||||
getAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
|
||||
return fileSnapshots, nil
|
||||
}
|
||||
|
||||
func (db *DBWrapper) RemoveAllFiles() error {
|
||||
removeAllFilesLogger := dbLogger.WithPrefix("RemoveAllFiles")
|
||||
removeAllFilesLogger.Debug("Removing all files from database")
|
||||
err := db.db.Exec("DELETE FROM file_snapshots").Error
|
||||
if err != nil {
|
||||
removeAllFilesLogger.Error("Failed to remove all files from database: %v", err)
|
||||
} else {
|
||||
removeAllFilesLogger.Debug("All files removed from database")
|
||||
}
|
||||
return err
|
||||
}
|
152
utils/file.go
Normal file
152
utils/file.go
Normal file
@@ -0,0 +1,152 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// fileLogger is a scoped logger for the utils/file package.
|
||||
var fileLogger = logger.Default.WithPrefix("utils/file")
|
||||
|
||||
func CleanPath(path string) string {
|
||||
cleanPathLogger := fileLogger.WithPrefix("CleanPath")
|
||||
cleanPathLogger.Debug("Cleaning path: %q", path)
|
||||
cleanPathLogger.Trace("Original path: %q", path)
|
||||
path = filepath.Clean(path)
|
||||
path = strings.ReplaceAll(path, "\\", "/")
|
||||
cleanPathLogger.Trace("Cleaned path result: %q", path)
|
||||
return path
|
||||
}
|
||||
|
||||
func ToAbs(path string) string {
|
||||
toAbsLogger := fileLogger.WithPrefix("ToAbs")
|
||||
toAbsLogger.Debug("Converting path to absolute: %q", path)
|
||||
toAbsLogger.Trace("Input path: %q", path)
|
||||
if filepath.IsAbs(path) {
|
||||
toAbsLogger.Debug("Path is already absolute, cleaning it.")
|
||||
cleanedPath := CleanPath(path)
|
||||
toAbsLogger.Trace("Already absolute path after cleaning: %q", cleanedPath)
|
||||
return cleanedPath
|
||||
}
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
toAbsLogger.Error("Error getting current working directory: %v", err)
|
||||
return CleanPath(path)
|
||||
}
|
||||
toAbsLogger.Trace("Current working directory: %q", cwd)
|
||||
cleanedPath := CleanPath(filepath.Join(cwd, path))
|
||||
toAbsLogger.Trace("Converted absolute path result: %q", cleanedPath)
|
||||
return cleanedPath
|
||||
}
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
func LimitString(s string, maxLen int) string {
|
||||
limitStringLogger := fileLogger.WithPrefix("LimitString").WithField("originalLength", len(s)).WithField("maxLength", maxLen)
|
||||
limitStringLogger.Debug("Limiting string length")
|
||||
s = strings.ReplaceAll(s, "\n", "\\n")
|
||||
if len(s) <= maxLen {
|
||||
limitStringLogger.Trace("String length (%d) is within max length (%d), no truncation", len(s), maxLen)
|
||||
return s
|
||||
}
|
||||
limited := s[:maxLen-3] + "..."
|
||||
limitStringLogger.Trace("String truncated from %d to %d characters: %q", len(s), len(limited), limited)
|
||||
return limited
|
||||
}
|
||||
|
||||
// StrToFloat converts a string to a float64, returning 0 on error.
|
||||
func StrToFloat(s string) float64 {
|
||||
strToFloatLogger := fileLogger.WithPrefix("StrToFloat").WithField("inputString", s)
|
||||
strToFloatLogger.Debug("Attempting to convert string to float")
|
||||
f, err := strconv.ParseFloat(s, 64)
|
||||
if err != nil {
|
||||
strToFloatLogger.Warning("Failed to convert string %q to float, returning 0: %v", s, err)
|
||||
return 0
|
||||
}
|
||||
strToFloatLogger.Trace("Successfully converted %q to float: %f", s, f)
|
||||
return f
|
||||
}
|
||||
|
||||
func ResetWhereNecessary(associations map[string]FileCommandAssociation, db DB) error {
|
||||
resetWhereNecessaryLogger := fileLogger.WithPrefix("ResetWhereNecessary")
|
||||
resetWhereNecessaryLogger.Debug("Starting reset where necessary operation")
|
||||
resetWhereNecessaryLogger.Trace("File-command associations input: %v", associations)
|
||||
dirtyFiles := make(map[string]struct{})
|
||||
for _, association := range associations {
|
||||
resetWhereNecessaryLogger.Debug("Processing association for file: %q", association.File)
|
||||
for _, command := range association.Commands {
|
||||
resetWhereNecessaryLogger.Debug("Checking command %q for reset requirement", command.Name)
|
||||
resetWhereNecessaryLogger.Trace("Command details: %v", command)
|
||||
if command.Reset {
|
||||
resetWhereNecessaryLogger.Debug("Command %q requires reset for file %q, marking as dirty", command.Name, association.File)
|
||||
dirtyFiles[association.File] = struct{}{}
|
||||
}
|
||||
}
|
||||
for _, command := range association.IsolateCommands {
|
||||
resetWhereNecessaryLogger.Debug("Checking isolate command %q for reset requirement", command.Name)
|
||||
resetWhereNecessaryLogger.Trace("Isolate command details: %v", command)
|
||||
if command.Reset {
|
||||
resetWhereNecessaryLogger.Debug("Isolate command %q requires reset for file %q, marking as dirty", command.Name, association.File)
|
||||
dirtyFiles[association.File] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Identified %d files that need to be reset", len(dirtyFiles))
|
||||
resetWhereNecessaryLogger.Trace("Dirty files identified: %v", dirtyFiles)
|
||||
|
||||
for file := range dirtyFiles {
|
||||
resetWhereNecessaryLogger.Debug("Resetting file %q", file)
|
||||
fileData, err := db.GetFile(file)
|
||||
if err != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to get original content for file %q from database: %v", file, err)
|
||||
// Seed the snapshot from current disk content if missing, then use it as fallback
|
||||
currentData, readErr := os.ReadFile(file)
|
||||
if readErr != nil {
|
||||
resetWhereNecessaryLogger.Warning("Additionally failed to read current file content for %q: %v", file, readErr)
|
||||
continue
|
||||
}
|
||||
// Best-effort attempt to save baseline; ignore errors to avoid blocking reset
|
||||
if saveErr := db.SaveFile(file, currentData); saveErr != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to seed baseline snapshot for %q: %v", file, saveErr)
|
||||
}
|
||||
fileData = currentData
|
||||
}
|
||||
resetWhereNecessaryLogger.Trace("Retrieved original file data length for %q: %d", file, len(fileData))
|
||||
resetWhereNecessaryLogger.Debug("Writing original content back to file %q", file)
|
||||
err = os.WriteFile(file, fileData, 0644)
|
||||
if err != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to write original content back to file %q: %v", file, err)
|
||||
continue
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Successfully reset file %q", file)
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Finished reset where necessary operation")
|
||||
return nil
|
||||
}
|
||||
|
||||
func ResetAllFiles(db DB) error {
|
||||
resetAllFilesLogger := fileLogger.WithPrefix("ResetAllFiles")
|
||||
resetAllFilesLogger.Debug("Starting reset all files operation")
|
||||
fileSnapshots, err := db.GetAllFiles()
|
||||
if err != nil {
|
||||
resetAllFilesLogger.Error("Failed to get all file snapshots from database: %v", err)
|
||||
return err
|
||||
}
|
||||
resetAllFilesLogger.Debug("Found %d files in database to reset", len(fileSnapshots))
|
||||
resetAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
|
||||
|
||||
for _, fileSnapshot := range fileSnapshots {
|
||||
resetAllFilesLogger.Debug("Resetting file %q", fileSnapshot.FilePath)
|
||||
err = os.WriteFile(fileSnapshot.FilePath, fileSnapshot.FileData, 0644)
|
||||
if err != nil {
|
||||
resetAllFilesLogger.Warning("Failed to write file %q to disk: %v", fileSnapshot.FilePath, err)
|
||||
continue
|
||||
}
|
||||
resetAllFilesLogger.Debug("File %q written to disk successfully", fileSnapshot.FilePath)
|
||||
}
|
||||
resetAllFilesLogger.Debug("Finished reset all files operation")
|
||||
return nil
|
||||
}
|
21
utils/flags.go
Normal file
21
utils/flags.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"flag"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// flagsLogger is a scoped logger for the utils/flags package.
|
||||
var flagsLogger = logger.Default.WithPrefix("utils/flags")
|
||||
|
||||
var (
|
||||
ParallelFiles = flag.Int("P", 100, "Number of files to process in parallel")
|
||||
Filter = flag.String("f", "", "Filter commands before running them")
|
||||
JSON = flag.Bool("json", false, "Enable JSON mode for processing JSON files")
|
||||
)
|
||||
|
||||
func init() {
|
||||
flagsLogger.Debug("Initializing flags")
|
||||
flagsLogger.Trace("ParallelFiles initial value: %d, Filter initial value: %q, JSON initial value: %t", *ParallelFiles, *Filter, *JSON)
|
||||
}
|
375
utils/modifycommand.go
Normal file
375
utils/modifycommand.go
Normal file
@@ -0,0 +1,375 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// modifyCommandLogger is a scoped logger for the utils/modifycommand package.
|
||||
var modifyCommandLogger = logger.Default.WithPrefix("utils/modifycommand")
|
||||
|
||||
type ModifyCommand struct {
|
||||
Name string `yaml:"name,omitempty"`
|
||||
Regex string `yaml:"regex,omitempty"`
|
||||
Regexes []string `yaml:"regexes,omitempty"`
|
||||
Lua string `yaml:"lua,omitempty"`
|
||||
Files []string `yaml:"files,omitempty"`
|
||||
Reset bool `yaml:"reset,omitempty"`
|
||||
LogLevel string `yaml:"loglevel,omitempty"`
|
||||
Isolate bool `yaml:"isolate,omitempty"`
|
||||
NoDedup bool `yaml:"nodedup,omitempty"`
|
||||
Disabled bool `yaml:"disable,omitempty"`
|
||||
JSON bool `yaml:"json,omitempty"`
|
||||
Modifiers map[string]interface{} `yaml:"modifiers,omitempty"`
|
||||
}
|
||||
|
||||
type CookFile []ModifyCommand
|
||||
|
||||
func (c *ModifyCommand) Validate() error {
|
||||
validateLogger := modifyCommandLogger.WithPrefix("Validate").WithField("commandName", c.Name)
|
||||
validateLogger.Debug("Validating command")
|
||||
|
||||
// For JSON mode, regex patterns are not required
|
||||
if !c.JSON {
|
||||
if c.Regex == "" && len(c.Regexes) == 0 {
|
||||
validateLogger.Error("Validation failed: Regex pattern is required for non-JSON mode")
|
||||
return fmt.Errorf("pattern is required for non-JSON mode")
|
||||
}
|
||||
}
|
||||
|
||||
if c.Lua == "" {
|
||||
validateLogger.Error("Validation failed: Lua expression is required")
|
||||
return fmt.Errorf("lua expression is required")
|
||||
}
|
||||
if len(c.Files) == 0 {
|
||||
validateLogger.Error("Validation failed: At least one file is required")
|
||||
return fmt.Errorf("at least one file is required")
|
||||
}
|
||||
if c.LogLevel == "" {
|
||||
validateLogger.Debug("LogLevel not specified, defaulting to INFO")
|
||||
c.LogLevel = "INFO"
|
||||
}
|
||||
validateLogger.Debug("Command validated successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ehh.. Not much better... Guess this wasn't the big deal
|
||||
var matchesMemoTable map[string]bool = make(map[string]bool)
|
||||
|
||||
func Matches(path string, glob string) (bool, error) {
|
||||
matchesLogger := modifyCommandLogger.WithPrefix("Matches").WithField("path", path).WithField("glob", glob)
|
||||
matchesLogger.Debug("Checking if path matches glob")
|
||||
key := fmt.Sprintf("%s:%s", path, glob)
|
||||
if matches, ok := matchesMemoTable[key]; ok {
|
||||
matchesLogger.Debug("Found match in memo table: %t", matches)
|
||||
return matches, nil
|
||||
}
|
||||
matches, err := doublestar.Match(glob, path)
|
||||
if err != nil {
|
||||
matchesLogger.Error("Failed to match glob: %v", err)
|
||||
return false, fmt.Errorf("failed to match glob %s with file %s: %w", glob, path, err)
|
||||
}
|
||||
matchesMemoTable[key] = matches
|
||||
matchesLogger.Debug("Match result: %t, storing in memo table", matches)
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func SplitPattern(pattern string) (string, string) {
|
||||
splitPatternLogger := modifyCommandLogger.WithPrefix("SplitPattern").WithField("pattern", pattern)
|
||||
splitPatternLogger.Debug("Splitting pattern")
|
||||
splitPatternLogger.Trace("Original pattern: %q", pattern)
|
||||
static, pattern := doublestar.SplitPattern(pattern)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
splitPatternLogger.Error("Error getting current working directory: %v", err)
|
||||
return "", ""
|
||||
}
|
||||
splitPatternLogger.Trace("Current working directory: %q", cwd)
|
||||
if static == "" {
|
||||
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory")
|
||||
static = cwd
|
||||
}
|
||||
if !filepath.IsAbs(static) {
|
||||
splitPatternLogger.Debug("Static part is not absolute, joining with current working directory")
|
||||
static = filepath.Join(cwd, static)
|
||||
static = filepath.Clean(static)
|
||||
splitPatternLogger.Trace("Static path after joining and cleaning: %q", static)
|
||||
}
|
||||
static = strings.ReplaceAll(static, "\\", "/")
|
||||
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, pattern)
|
||||
return static, pattern
|
||||
}
|
||||
|
||||
type FileCommandAssociation struct {
|
||||
File string
|
||||
IsolateCommands []ModifyCommand
|
||||
Commands []ModifyCommand
|
||||
}
|
||||
|
||||
func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[string]FileCommandAssociation, error) {
|
||||
associateFilesLogger := modifyCommandLogger.WithPrefix("AssociateFilesWithCommands")
|
||||
associateFilesLogger.Debug("Associating files with commands")
|
||||
associateFilesLogger.Trace("Input files: %v", files)
|
||||
associateFilesLogger.Trace("Input commands: %v", commands)
|
||||
associationCount := 0
|
||||
fileCommands := make(map[string]FileCommandAssociation)
|
||||
|
||||
for _, file := range files {
|
||||
file = strings.ReplaceAll(file, "\\", "/")
|
||||
associateFilesLogger.Debug("Processing file: %q", file)
|
||||
fileCommands[file] = FileCommandAssociation{
|
||||
File: file,
|
||||
IsolateCommands: []ModifyCommand{},
|
||||
Commands: []ModifyCommand{},
|
||||
}
|
||||
for _, command := range commands {
|
||||
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
|
||||
for _, glob := range command.Files {
|
||||
glob = strings.ReplaceAll(glob, "\\", "/")
|
||||
static, pattern := SplitPattern(glob)
|
||||
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", glob, static, pattern)
|
||||
|
||||
// Build absolute path for the current file to compare with static
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
associateFilesLogger.Warning("Failed to get CWD when matching %q for file %q: %v", glob, file, err)
|
||||
continue
|
||||
}
|
||||
var absFile string
|
||||
if filepath.IsAbs(file) {
|
||||
absFile = filepath.Clean(file)
|
||||
} else {
|
||||
absFile = filepath.Clean(filepath.Join(cwd, file))
|
||||
}
|
||||
absFile = strings.ReplaceAll(absFile, "\\", "/")
|
||||
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
|
||||
|
||||
// Only match if the file is under the static root
|
||||
if !(strings.HasPrefix(absFile, static+"/") || absFile == static) {
|
||||
associateFilesLogger.Trace("Skipping glob %q for file %q because file is outside static root %q", glob, file, static)
|
||||
continue
|
||||
}
|
||||
|
||||
patternFile := strings.TrimPrefix(absFile, static+`/`)
|
||||
associateFilesLogger.Trace("Pattern-relative path used for match: %q", patternFile)
|
||||
matches, err := Matches(patternFile, pattern)
|
||||
if err != nil {
|
||||
associateFilesLogger.Warning("Failed to match glob %q with file %q: %v", glob, file, err)
|
||||
continue
|
||||
}
|
||||
if matches {
|
||||
associateFilesLogger.Debug("File %q matches glob %q. Associating with command %q", file, glob, command.Name)
|
||||
association := fileCommands[file]
|
||||
|
||||
if command.Isolate {
|
||||
associateFilesLogger.Debug("Command %q is an isolate command, adding to isolate list", command.Name)
|
||||
association.IsolateCommands = append(association.IsolateCommands, command)
|
||||
} else {
|
||||
associateFilesLogger.Debug("Command %q is a regular command, adding to regular list", command.Name)
|
||||
association.Commands = append(association.Commands, command)
|
||||
}
|
||||
fileCommands[file] = association
|
||||
associationCount++
|
||||
} else {
|
||||
associateFilesLogger.Trace("File %q did not match glob %q (pattern=%q, rel=%q)", file, glob, pattern, patternFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
currentFileCommands := fileCommands[file]
|
||||
associateFilesLogger.Debug("Finished processing file %q. Found %d regular commands and %d isolate commands", file, len(currentFileCommands.Commands), len(currentFileCommands.IsolateCommands))
|
||||
associateFilesLogger.Trace("Commands for file %q: %v", file, currentFileCommands.Commands)
|
||||
associateFilesLogger.Trace("Isolate commands for file %q: %v", file, currentFileCommands.IsolateCommands)
|
||||
}
|
||||
associateFilesLogger.Info("Completed association. Found %d total associations for %d files and %d commands", associationCount, len(files), len(commands))
|
||||
return fileCommands, nil
|
||||
}
|
||||
|
||||
func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
|
||||
aggregateGlobsLogger := modifyCommandLogger.WithPrefix("AggregateGlobs")
|
||||
aggregateGlobsLogger.Debug("Aggregating glob patterns from commands")
|
||||
aggregateGlobsLogger.Trace("Input commands for aggregation: %v", commands)
|
||||
globs := make(map[string]struct{})
|
||||
for _, command := range commands {
|
||||
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
|
||||
for _, glob := range command.Files {
|
||||
resolvedGlob := strings.Replace(glob, "~", os.Getenv("HOME"), 1)
|
||||
resolvedGlob = strings.ReplaceAll(resolvedGlob, "\\", "/")
|
||||
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q)", glob, resolvedGlob)
|
||||
globs[resolvedGlob] = struct{}{}
|
||||
}
|
||||
}
|
||||
aggregateGlobsLogger.Debug("Finished aggregating globs. Found %d unique glob patterns", len(globs))
|
||||
aggregateGlobsLogger.Trace("Aggregated unique globs: %v", globs)
|
||||
return globs
|
||||
}
|
||||
|
||||
func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
|
||||
expandGlobsLogger := modifyCommandLogger.WithPrefix("ExpandGLobs")
|
||||
expandGlobsLogger.Debug("Expanding glob patterns to actual files")
|
||||
expandGlobsLogger.Trace("Input patterns for expansion: %v", patterns)
|
||||
var files []string
|
||||
filesMap := make(map[string]bool)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
expandGlobsLogger.Error("Failed to get current working directory: %v", err)
|
||||
return nil, fmt.Errorf("failed to get current working directory: %w", err)
|
||||
}
|
||||
expandGlobsLogger.Debug("Current working directory: %q", cwd)
|
||||
|
||||
for pattern := range patterns {
|
||||
expandGlobsLogger.Debug("Processing glob pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
matches, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
expandGlobsLogger.Warning("Error expanding glob %q in %q: %v", pattern, static, err)
|
||||
continue
|
||||
}
|
||||
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
|
||||
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
|
||||
for _, m := range matches {
|
||||
m = filepath.Join(static, m)
|
||||
info, err := os.Stat(m)
|
||||
if err != nil {
|
||||
expandGlobsLogger.Warning("Error getting file info for %q: %v", m, err)
|
||||
continue
|
||||
}
|
||||
if !info.IsDir() && !filesMap[m] {
|
||||
expandGlobsLogger.Trace("Adding unique file to list: %q", m)
|
||||
filesMap[m], files = true, append(files, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
expandGlobsLogger.Debug("Finished expanding globs. Found %d unique files to process", len(files))
|
||||
expandGlobsLogger.Trace("Unique files to process: %v", files)
|
||||
} else {
|
||||
expandGlobsLogger.Warning("No files found after expanding glob patterns.")
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func LoadCommands(args []string) ([]ModifyCommand, error) {
|
||||
loadCommandsLogger := modifyCommandLogger.WithPrefix("LoadCommands")
|
||||
loadCommandsLogger.Debug("Loading commands from arguments (cook files or direct patterns)")
|
||||
loadCommandsLogger.Trace("Input arguments: %v", args)
|
||||
commands := []ModifyCommand{}
|
||||
|
||||
for _, arg := range args {
|
||||
loadCommandsLogger.Debug("Processing argument for commands: %q", arg)
|
||||
newCommands, err := LoadCommandsFromCookFiles(arg)
|
||||
if err != nil {
|
||||
loadCommandsLogger.Error("Failed to load commands from argument %q: %v", arg, err)
|
||||
return nil, fmt.Errorf("failed to load commands from cook files: %w", err)
|
||||
}
|
||||
loadCommandsLogger.Debug("Successfully loaded %d commands from %q", len(newCommands), arg)
|
||||
for _, cmd := range newCommands {
|
||||
if cmd.Disabled {
|
||||
loadCommandsLogger.Debug("Skipping disabled command: %q", cmd.Name)
|
||||
continue
|
||||
}
|
||||
commands = append(commands, cmd)
|
||||
loadCommandsLogger.Trace("Added command %q. Current total commands: %d", cmd.Name, len(commands))
|
||||
}
|
||||
}
|
||||
|
||||
loadCommandsLogger.Info("Finished loading commands. Total %d commands loaded", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFiles(pattern string) ([]ModifyCommand, error) {
|
||||
loadCookFilesLogger := modifyCommandLogger.WithPrefix("LoadCommandsFromCookFiles").WithField("pattern", pattern)
|
||||
loadCookFilesLogger.Debug("Loading commands from cook files based on pattern")
|
||||
loadCookFilesLogger.Trace("Input pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
commands := []ModifyCommand{}
|
||||
cookFiles, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
loadCookFilesLogger.Error("Failed to glob cook files for pattern %q: %v", pattern, err)
|
||||
return nil, fmt.Errorf("failed to glob cook files: %w", err)
|
||||
}
|
||||
loadCookFilesLogger.Debug("Found %d cook files for pattern %q", len(cookFiles), pattern)
|
||||
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
|
||||
|
||||
for _, cookFile := range cookFiles {
|
||||
cookFile = filepath.Join(static, cookFile)
|
||||
cookFile = filepath.Clean(cookFile)
|
||||
cookFile = strings.ReplaceAll(cookFile, "\\", "/")
|
||||
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
|
||||
|
||||
cookFileData, err := os.ReadFile(cookFile)
|
||||
if err != nil {
|
||||
loadCookFilesLogger.Error("Failed to read cook file %q: %v", cookFile, err)
|
||||
return nil, fmt.Errorf("failed to read cook file: %w", err)
|
||||
}
|
||||
loadCookFilesLogger.Trace("Read %d bytes from cook file %q", len(cookFileData), cookFile)
|
||||
newCommands, err := LoadCommandsFromCookFile(cookFileData)
|
||||
if err != nil {
|
||||
loadCookFilesLogger.Error("Failed to load commands from cook file data for %q: %v", cookFile, err)
|
||||
return nil, fmt.Errorf("failed to load commands from cook file: %w", err)
|
||||
}
|
||||
commands = append(commands, newCommands...)
|
||||
loadCookFilesLogger.Debug("Added %d commands from cook file %q. Total commands now: %d", len(newCommands), cookFile, len(commands))
|
||||
}
|
||||
|
||||
loadCookFilesLogger.Debug("Finished loading commands from cook files. Total %d commands", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFile(cookFileData []byte) ([]ModifyCommand, error) {
|
||||
loadCommandLogger := modifyCommandLogger.WithPrefix("LoadCommandsFromCookFile")
|
||||
loadCommandLogger.Debug("Unmarshaling commands from cook file data")
|
||||
loadCommandLogger.Trace("Cook file data length: %d", len(cookFileData))
|
||||
commands := []ModifyCommand{}
|
||||
err := yaml.Unmarshal(cookFileData, &commands)
|
||||
if err != nil {
|
||||
loadCommandLogger.Error("Failed to unmarshal cook file data: %v", err)
|
||||
return nil, fmt.Errorf("failed to unmarshal cook file: %w", err)
|
||||
}
|
||||
loadCommandLogger.Debug("Successfully unmarshaled %d commands", len(commands))
|
||||
loadCommandLogger.Trace("Unmarshaled commands: %v", commands)
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// CountGlobsBeforeDedup counts the total number of glob patterns across all commands before deduplication
|
||||
func CountGlobsBeforeDedup(commands []ModifyCommand) int {
|
||||
countGlobsLogger := modifyCommandLogger.WithPrefix("CountGlobsBeforeDedup")
|
||||
countGlobsLogger.Debug("Counting glob patterns before deduplication")
|
||||
count := 0
|
||||
for _, cmd := range commands {
|
||||
countGlobsLogger.Trace("Processing command %q, adding %d globs", cmd.Name, len(cmd.Files))
|
||||
count += len(cmd.Files)
|
||||
}
|
||||
countGlobsLogger.Debug("Total glob patterns before deduplication: %d", count)
|
||||
return count
|
||||
}
|
||||
|
||||
func FilterCommands(commands []ModifyCommand, filter string) []ModifyCommand {
|
||||
filterCommandsLogger := modifyCommandLogger.WithPrefix("FilterCommands").WithField("filter", filter)
|
||||
filterCommandsLogger.Debug("Filtering commands")
|
||||
filterCommandsLogger.Trace("Input commands: %v", commands)
|
||||
filteredCommands := []ModifyCommand{}
|
||||
filters := strings.Split(filter, ",")
|
||||
filterCommandsLogger.Trace("Split filters: %v", filters)
|
||||
for _, cmd := range commands {
|
||||
filterCommandsLogger.Debug("Checking command %q against filters", cmd.Name)
|
||||
for _, f := range filters {
|
||||
if strings.Contains(cmd.Name, f) {
|
||||
filterCommandsLogger.Debug("Command %q matches filter %q, adding to filtered list", cmd.Name, f)
|
||||
filteredCommands = append(filteredCommands, cmd)
|
||||
break // Command matches, no need to check other filters
|
||||
}
|
||||
}
|
||||
}
|
||||
filterCommandsLogger.Debug("Finished filtering commands. Found %d filtered commands", len(filteredCommands))
|
||||
filterCommandsLogger.Trace("Filtered commands: %v", filteredCommands)
|
||||
return filteredCommands
|
||||
}
|
1000
utils/modifycommand_test.go
Normal file
1000
utils/modifycommand_test.go
Normal file
File diff suppressed because it is too large
Load Diff
79
utils/replacecommand.go
Normal file
79
utils/replacecommand.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// replaceCommandLogger is a scoped logger for the utils/replacecommand package.
|
||||
var replaceCommandLogger = logger.Default.WithPrefix("utils/replacecommand")
|
||||
|
||||
type ReplaceCommand struct {
|
||||
From int
|
||||
To int
|
||||
With string
|
||||
}
|
||||
|
||||
func ExecuteModifications(modifications []ReplaceCommand, fileData string) (string, int) {
|
||||
executeModificationsLogger := replaceCommandLogger.WithPrefix("ExecuteModifications")
|
||||
executeModificationsLogger.Debug("Executing a batch of text modifications")
|
||||
executeModificationsLogger.Trace("Number of modifications: %d, Original file data length: %d", len(modifications), len(fileData))
|
||||
var err error
|
||||
|
||||
sort.Slice(modifications, func(i, j int) bool {
|
||||
return modifications[i].From > modifications[j].From
|
||||
})
|
||||
executeModificationsLogger.Debug("Modifications sorted in reverse order for safe replacement")
|
||||
executeModificationsLogger.Trace("Sorted modifications: %v", modifications)
|
||||
|
||||
executed := 0
|
||||
for idx, modification := range modifications {
|
||||
executeModificationsLogger.Debug("Applying modification %d/%d", idx+1, len(modifications))
|
||||
executeModificationsLogger.Trace("Current modification details: From=%d, To=%d, With=%q", modification.From, modification.To, modification.With)
|
||||
fileData, err = modification.Execute(fileData)
|
||||
if err != nil {
|
||||
executeModificationsLogger.Error("Failed to execute replacement for modification %+v: %v", modification, err)
|
||||
continue
|
||||
}
|
||||
executed++
|
||||
executeModificationsLogger.Trace("File data length after modification: %d", len(fileData))
|
||||
}
|
||||
executeModificationsLogger.Info("Successfully applied %d text replacements", executed)
|
||||
return fileData, executed
|
||||
}
|
||||
|
||||
func (m *ReplaceCommand) Execute(fileDataStr string) (string, error) {
|
||||
executeLogger := replaceCommandLogger.WithPrefix("Execute").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With))
|
||||
executeLogger.Debug("Attempting to execute single replacement")
|
||||
err := m.Validate(len(fileDataStr))
|
||||
if err != nil {
|
||||
executeLogger.Error("Failed to validate modification: %v", err)
|
||||
return fileDataStr, fmt.Errorf("failed to validate modification: %v", err)
|
||||
}
|
||||
|
||||
executeLogger.Trace("Applying replacement: fileDataStr[:%d] + %q + fileDataStr[%d:]", m.From, m.With, m.To)
|
||||
result := fileDataStr[:m.From] + m.With + fileDataStr[m.To:]
|
||||
executeLogger.Trace("Replacement executed. Result length: %d", len(result))
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *ReplaceCommand) Validate(maxsize int) error {
|
||||
validateLogger := replaceCommandLogger.WithPrefix("Validate").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With)).WithField("maxSize", maxsize)
|
||||
validateLogger.Debug("Validating replacement command against max size")
|
||||
if m.To < m.From {
|
||||
validateLogger.Error("Validation failed: 'To' (%d) is less than 'From' (%d)", m.To, m.From)
|
||||
return fmt.Errorf("command to is less than from: %v", m)
|
||||
}
|
||||
if m.From > maxsize || m.To > maxsize {
|
||||
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is greater than max size (%d)", m.From, m.To, maxsize)
|
||||
return fmt.Errorf("command from or to is greater than replacement length: %v", m)
|
||||
}
|
||||
if m.From < 0 || m.To < 0 {
|
||||
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is less than 0", m.From, m.To)
|
||||
return fmt.Errorf("command from or to is less than 0: %v", m)
|
||||
}
|
||||
validateLogger.Debug("Modification command validated successfully")
|
||||
return nil
|
||||
}
|
504
utils/replacecommand_test.go
Normal file
504
utils/replacecommand_test.go
Normal file
@@ -0,0 +1,504 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestReplaceCommandExecute(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
command ReplaceCommand
|
||||
expected string
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "Simple replacement",
|
||||
input: "This is a test string",
|
||||
command: ReplaceCommand{From: 5, To: 7, With: "was"},
|
||||
expected: "This was a test string",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Replace at beginning",
|
||||
input: "Hello world",
|
||||
command: ReplaceCommand{From: 0, To: 5, With: "Hi"},
|
||||
expected: "Hi world",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Replace at end",
|
||||
input: "Hello world",
|
||||
command: ReplaceCommand{From: 6, To: 11, With: "everyone"},
|
||||
expected: "Hello everyone",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Replace entire string",
|
||||
input: "Hello world",
|
||||
command: ReplaceCommand{From: 0, To: 11, With: "Goodbye!"},
|
||||
expected: "Goodbye!",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Error: From > To",
|
||||
input: "Test string",
|
||||
command: ReplaceCommand{From: 7, To: 5, With: "fail"},
|
||||
expected: "Test string",
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "Error: From > string length",
|
||||
input: "Test",
|
||||
command: ReplaceCommand{From: 10, To: 12, With: "fail"},
|
||||
expected: "Test",
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "Error: To > string length",
|
||||
input: "Test",
|
||||
command: ReplaceCommand{From: 2, To: 10, With: "fail"},
|
||||
expected: "Test",
|
||||
shouldError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := tc.command.Execute(tc.input)
|
||||
|
||||
if tc.shouldError {
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error for command %+v but got none", tc.command)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
if result != tc.expected {
|
||||
t.Errorf("Expected %q, got %q", tc.expected, result)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecuteModifications(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
modifications []ReplaceCommand
|
||||
expected string
|
||||
expectedCount int
|
||||
}{
|
||||
{
|
||||
name: "Single modification",
|
||||
input: "Hello world",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 5, With: "Hi"},
|
||||
},
|
||||
expected: "Hi world",
|
||||
expectedCount: 1,
|
||||
},
|
||||
{
|
||||
name: "Multiple modifications",
|
||||
input: "This is a test string",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 4, With: "That"},
|
||||
{From: 8, To: 14, With: "sample"},
|
||||
},
|
||||
expected: "That is sample string",
|
||||
expectedCount: 2,
|
||||
},
|
||||
{
|
||||
name: "Overlapping modifications",
|
||||
input: "ABCDEF",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 3, With: "123"}, // ABC -> 123
|
||||
{From: 2, To: 5, With: "xyz"}, // CDE -> xyz
|
||||
},
|
||||
// The actual behavior with the current implementation
|
||||
expected: "123yzF",
|
||||
expectedCount: 2,
|
||||
},
|
||||
{
|
||||
name: "Sequential modifications",
|
||||
input: "Hello world",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 5, With: "Hi"},
|
||||
{From: 5, To: 6, With: ""}, // Remove the space
|
||||
{From: 6, To: 11, With: "everyone"},
|
||||
},
|
||||
expected: "Hieveryone",
|
||||
expectedCount: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Make a copy of the modifications to avoid modifying the test case
|
||||
mods := make([]ReplaceCommand, len(tc.modifications))
|
||||
copy(mods, tc.modifications)
|
||||
|
||||
result, count := ExecuteModifications(mods, tc.input)
|
||||
|
||||
if count != tc.expectedCount {
|
||||
t.Errorf("Expected %d modifications, got %d", tc.expectedCount, count)
|
||||
}
|
||||
|
||||
if result != tc.expected {
|
||||
t.Errorf("Expected %q, got %q", tc.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestReverseOrderExecution(t *testing.T) {
|
||||
// This test verifies the current behavior of modification application
|
||||
input := "Original text with multiple sections"
|
||||
|
||||
// Modifications in specific positions
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 8, With: "Modified"}, // Original -> Modified
|
||||
{From: 9, To: 13, With: "document"}, // text -> document
|
||||
{From: 14, To: 22, With: "without"}, // with -> without
|
||||
{From: 23, To: 31, With: "any"}, // multiple -> any
|
||||
}
|
||||
|
||||
// The actual current behavior of our implementation
|
||||
expected := "Modified document withouttanytions"
|
||||
|
||||
result, count := ExecuteModifications(modifications, input)
|
||||
|
||||
if count != 4 {
|
||||
t.Errorf("Expected 4 modifications, got %d", count)
|
||||
}
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Expected %q, got %q", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace text in the middle of a string with new content
|
||||
func TestReplaceCommandExecute_ReplacesTextInMiddle(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
To: 11,
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Hello replaced, how are you?", result)
|
||||
}
|
||||
|
||||
// Replace with empty string (deletion)
|
||||
func TestReplaceCommandExecute_DeletesText(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
To: 11,
|
||||
With: "",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Hello , how are you?", result)
|
||||
}
|
||||
|
||||
// Replace with longer string than original segment
|
||||
func TestReplaceCommandExecute_WithLongerString(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
To: 11,
|
||||
With: "longerreplacement",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Hello longerreplacement, how are you?", result)
|
||||
}
|
||||
|
||||
// From and To values are the same (zero-length replacement)
|
||||
func TestReplaceCommandExecute_ZeroLengthReplacement(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 5,
|
||||
To: 5,
|
||||
With: "inserted",
|
||||
}
|
||||
|
||||
fileContent := "Hello world"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Helloinserted world", result)
|
||||
}
|
||||
|
||||
// From value is greater than To value
|
||||
func TestReplaceCommandExecute_FromGreaterThanTo(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 10,
|
||||
To: 5,
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "Hello world, how are you?", result)
|
||||
}
|
||||
|
||||
// From or To values exceed string length
|
||||
func TestReplaceCommandExecute_FromOrToExceedsLength(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 5,
|
||||
To: 50, // Exceeds the length of the fileContent
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "Hello world", result)
|
||||
}
|
||||
|
||||
// From or To values are negative
|
||||
func TestReplaceCommandExecute_NegativeFromOrTo(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: -1,
|
||||
To: 10,
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "Hello world, how are you?", result)
|
||||
}
|
||||
|
||||
// Modifications are applied in reverse order (from highest to lowest 'From' value)
|
||||
func TestExecuteModificationsAppliesInReverseOrder(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 4, With: "That"},
|
||||
{From: 10, To: 14, With: "sample"},
|
||||
{From: 26, To: 38, With: "modifications"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "That is a sample string for modifications"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// One or more modifications fail but others succeed
|
||||
func TestExecuteModificationsWithPartialFailures(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
// Create a custom ReplaceCommand implementation that will fail
|
||||
failingCommand := ReplaceCommand{
|
||||
From: 15,
|
||||
To: 10, // Invalid range (To < From) to cause failure
|
||||
With: "will fail",
|
||||
}
|
||||
|
||||
// Valid commands
|
||||
validCommand1 := ReplaceCommand{
|
||||
From: 0,
|
||||
To: 4,
|
||||
With: "That",
|
||||
}
|
||||
|
||||
validCommand2 := ReplaceCommand{
|
||||
From: 26,
|
||||
To: 38,
|
||||
With: "modifications",
|
||||
}
|
||||
|
||||
modifications := []ReplaceCommand{failingCommand, validCommand1, validCommand2}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "That is a test string for modifications"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
// Only 2 out of 3 modifications should succeed
|
||||
if executed != 2 {
|
||||
t.Errorf("Expected 2 modifications to be executed successfully, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// All valid modifications are executed and the modified string is returned
|
||||
func TestExecuteModificationsAllValid(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "Hello world, this is a test"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 5, With: "Hi"},
|
||||
{From: 18, To: 20, With: "was"},
|
||||
{From: 21, To: 27, With: "an example"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "Hi world, this was an example"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// The count of successfully executed modifications is returned
|
||||
func TestExecuteModificationsReturnsCorrectCount(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "Initial text for testing"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 7, With: "Final"},
|
||||
{From: 12, To: 16, With: "example"},
|
||||
{From: 17, To: 24, With: "process"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
_, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify the count of executed modifications
|
||||
expectedExecuted := 3
|
||||
if executed != expectedExecuted {
|
||||
t.Errorf("Expected %d modifications to be executed, but got %d", expectedExecuted, executed)
|
||||
}
|
||||
}
|
||||
|
||||
// Empty modifications list returns the original string with zero executed count
|
||||
func TestExecuteModificationsWithEmptyList(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
modifications := []ReplaceCommand{}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
if result != fileData {
|
||||
t.Errorf("Expected result to be %q, but got %q", fileData, result)
|
||||
}
|
||||
|
||||
if executed != 0 {
|
||||
t.Errorf("Expected 0 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// Modifications with identical 'From' values
|
||||
func TestExecuteModificationsWithIdenticalFromValues(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 10, To: 14, With: "sample"},
|
||||
{From: 10, To: 14, With: "example"},
|
||||
{From: 26, To: 38, With: "modifications"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
// Yes, it's mangled, yes, it's intentional
|
||||
// Every subsequent command works with the modified contents of the previous command
|
||||
// So by the time we get to "example" the indices have already eaten into "sample"... In fact they have eaten into "samp", "le" is left
|
||||
// So we prepend "example" and end up with "examplele"
|
||||
// Whether sample or example goes first here is irrelevant to us
|
||||
// But it just so happens that sample goes first, so we end up with "examplele"
|
||||
expectedResult := "This is a examplele string for modifications"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// Modifications that would affect each other if not sorted properly
|
||||
func TestExecuteModificationsHandlesOverlappingRanges(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "The quick brown fox jumps over the lazy dog"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 4, To: 9, With: "slow"},
|
||||
{From: 10, To: 15, With: "red"},
|
||||
{From: 16, To: 19, With: "cat"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "The slow red cat jumps over the lazy dog"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user