15 Commits

35 changed files with 5677 additions and 7528 deletions

2
.gitignore vendored
View File

@@ -1,3 +1 @@
*.exe *.exe
.qodo
*.sqlite

93
.vscode/launch.json vendored
View File

@@ -5,99 +5,12 @@
"version": "0.2.0", "version": "0.2.0",
"configurations": [ "configurations": [
{ {
"name": "Launch Package (Barotrauma)", "name": "Launch Package",
"type": "go", "type": "go",
"request": "launch", "request": "launch",
"mode": "auto", "mode": "auto",
"program": "${workspaceFolder}", "program": "${fileDirname}",
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma", "args": []
"args": [
"-loglevel",
"trace",
"-cook",
"*.yml",
]
},
{
"name": "Launch Package (Payday 2)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"cwd": "C:/Users/Administrator/Seafile/Games-Payday2",
"args": [
"-loglevel",
"trace",
"*.yml",
]
},
{
"name": "Launch Package (Barotrauma cookfile)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma",
"args": [
"-loglevel",
"trace",
"-cook",
"cookassistant.yml",
]
},
{
"name": "Launch Package (Quasimorph cookfile)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"cwd": "C:/Users/Administrator/Seafile/Games-Quasimorph",
"args": [
"cook.yml",
]
},
{
"name": "Launch Package (Rimworld cookfile)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"cwd": "C:/Users/Administrator/Seafile/Games-Rimworld/294100",
"args": [
"cookVehicles.yml",
]
},
{
"name": "Launch Package (Workspace)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"args": [
"tester.yml",
]
},
{
"name": "Launch Package (Avorion)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"cwd": "C:/Users/Administrator/Seafile/Games-Avorion/Avorion",
"args": [
"*.yml",
]
},
{
"name": "Launch Package (Minecraft)",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}",
"cwd": "C:/Users/Administrator/Seafile/Games-Minecraft",
"args": [
"cook_tacz.yml",
]
} }
] ]
} }

116
README.md
View File

@@ -1,116 +0,0 @@
# Big Chef
A Go-based tool for modifying XML, JSON, and text documents using XPath/JSONPath/Regex expressions and Lua transformations.
## Features
- **Multi-Format Processing**:
- XML (XPath)
- JSON (JSONPath)
- Text (Regex)
- **Node Value Modification**: Update text values in XML elements, JSON properties or text matches
- **Attribute Manipulation**: Modify XML attributes, JSON object keys or regex capture groups
- **Conditional Logic**: Apply transformations based on document content
- **Complex Operations**:
- Mathematical calculations
- String manipulations
- Date conversions
- Structural changes
- Whole ass Lua environment
- **Error Handling**: Comprehensive error detection for:
- Invalid XML/JSON
- Malformed XPath/JSONPath
- Lua syntax errors
## Usage Examples
### 1. Basic field modification
```xml
<!-- Input -->
<price>44.95</price>
<!-- Command -->
chef -xml "//price" "v=v*2" input.xml
<!-- Output -->
<price>89.9</price>
```
### 2. Supports glob patterns
```xml
chef -xml "//price" "v=v*2" data/**.xml
```
### 3. Attribute Update
```xml
<!-- Input -->
<item price="10.50"/>
<!-- Command -->
chef -xml "//item/@price" "v=v*2" input.xml
<!-- Output -->
<item price="21"/>
```
### 3. JSONPath Transformation
```json
// Input
{
"products": [
{"name": "Widget", "price": 19.99},
{"name": "Gadget", "price": 29.99}
]
}
// Command
chef -json "$.products[*].price" "v=v*0.75" input.json
// Output
{
"products": [
{"name": "Widget", "price": 14.99},
{"name": "Gadget", "price": 22.49}
]
}
```
### 4. Regex Text Replacement
Regex works slightly differently, up to 12 match groups are provided as v1..v12 and s1..s12 for numbers and strings respectively.
A special shorthand "!num" is also provided that simply expands to `(\d*\.?\d+)`.
```xml
<!-- Input -->
<description>Price: $15.00 Special Offer</description>
<!-- Command -->
chef "Price: $!num Special Offer" "v1 = v1 * 0.92" input.xml
<!-- Output -->
<description>Price: $13.80 Special Offer</description>
```
### 5. Conditional Transformation
```xml
<!-- Input -->
<item stock="5" price="10.00"/>
<!-- Command -->
chef -xml "//item" "if tonumber(v.stock) > 0 then v.price = v.price * 0.8 end" input.xml
<!-- Output -->
<item stock="5" price="8.00"/>
```
## Installation
```bash
go build -o chef main.go
```
```bash
# Process XML file
./chef -xml "//price" "v=v*1.2" input.xml
# Process JSON file
./chef -json "$.prices[*]" "v=v*0.9" input.json
```

651
TalentsMechanic.xml Normal file
View File

@@ -0,0 +1,651 @@
<?xml version="1.0" encoding="utf-8"?>
<Talents>
<Talent identifier="powerarmor">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.powerarmor">
<Replace tag="[bonusmovement]" value="25" color="gui.green"/>
</Description>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.exosuit" color="gui.orange"/>
</Description>
<AbilityGroupInterval interval="0.9">
<Conditions>
<AbilityConditionHasItem tags="deepdivinglarge" />
</Conditions>
<Abilities>
<CharacterAbilityModifyStat stattype="MovementSpeed" value="0.25" />
</Abilities>
</AbilityGroupInterval>
<AddedRecipe itemidentifier="exosuit"/>
</Talent>
<Talent identifier="foolhardy">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="4,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.lowhealthstatboost">
<Replace tag="[health]" value="50" color="gui.green"/>
</Description>
<Description tag="talentdescription.additionalstattype">
<Replace tag="[amount]" value="20" color="gui.green"/>
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
</Description>
<AbilityGroupInterval interval="0.9">
<Conditions>
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0.5"/>
</Conditions>
<Abilities>
<CharacterAbilityApplyStatusEffects>
<StatusEffects>
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
<Affliction identifier="foolhardy" amount="1.0"/>
</StatusEffect>
</StatusEffects>
</CharacterAbilityApplyStatusEffects>
</Abilities>
</AbilityGroupInterval>
</Talent>
<Talent identifier="berserker">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.lowhealthstatboost">
<Replace tag="[health]" value="50" color="gui.green"/>
</Description>
<Description tag="talentdescription.additionalstattype">
<Replace tag="[amount]" value="20" color="gui.green"/>
<Replace tag="[stattype]" value="stattypenames.meleedamagebonus" color="gui.orange"/>
</Description>
<AbilityGroupInterval interval="0.9">
<Conditions>
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0.5"/>
</Conditions>
<Abilities>
<CharacterAbilityApplyStatusEffects>
<StatusEffects>
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
<Affliction identifier="berserker" amount="1.0"/>
</StatusEffect>
</StatusEffects>
</CharacterAbilityApplyStatusEffects>
</Abilities>
</AbilityGroupInterval>
</Talent>
<Talent identifier="mudraptorwrestler">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.mudraptorwrestler">
<Replace tag="[amount]" value="50" color="gui.green"/>
</Description>
<Description tag="talentdescription.additionalstattypeself">
<Replace tag="[amount]" value="10" color="gui.green"/>
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnAttack">
<Conditions>
<AbilityConditionAttackData weapontype="NoWeapon,Melee" />
<AbilityConditionCharacter>
<Conditional group="eq mudraptor" />
</AbilityConditionCharacter>
</Conditions>
<Abilities>
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveResistance resistanceid="damage" multiplier="0.9"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="heavylifting">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="1,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.heavylifting">
<Replace tag="[amount]" value="20" color="gui.green"/>
</Description>
<AbilityGroupInterval interval="0.9">
<Conditions>
<AbilityConditionHoldingItem tags="alienartifact,crate"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyStat stattype="MovementSpeed" value="0.2"/>
</Abilities>
</AbilityGroupInterval>
</Talent>
<Talent identifier="iamthatguy">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.iamthatguy">
<Replace tag="[amount]" value="20" color="gui.green"/>
</Description>
<Description tag="talentdescription.skillbonus">
<Replace tag="[amount]" value="20" color="gui.green"/>
<Replace tag="[skillname]" value="stattypenames.weaponsskillbonus" color="gui.orange"/>
</Description>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.heavywrench" color="gui.orange"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="WeaponsSkillBonus" value="20"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnAddDamageAffliction">
<Abilities>
<CharacterAbilityModifyAffliction afflictionidentifiers="blunttrauma" addedmultiplier="0.2" />
</Abilities>
</AbilityGroupEffect>
<AddedRecipe itemidentifier="heavywrench"/>
</Talent>
<Talent identifier="robotics">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,7" sheetelementsize="128,128"/>
<Description tag="talentdescription.robotics"/>
<Description tag="talentdescription.roboticsreminder">
<Replace tag="[amount]" value="2" color="gui.green"/>
</Description>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.defensebotspawner,entityname.defensebotammobox" color="gui.orange"/>
</Description>
<AddedRecipe itemidentifier="defensebotspawner"/>
<AddedRecipe itemidentifier="defensebotammobox"/>
</Talent>
<Talent identifier="ironstorm">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="7,5" sheetelementsize="128,128"/>
<Description tag="talentdescription.ironstorm">
<Replace tag="[chance]" value="10" color="gui.green"/>
</Description>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.scrapcannon" color="gui.orange"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilitySetMetadataInt identifier="tiermodifieroverride" value="3"/>
</Abilities>
</AbilityGroupEffect>
<AddedRecipe itemidentifier="scrapcannon"/>
</Talent>
<Talent identifier="residualwaste">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,5" sheetelementsize="128,128"/>
<Description tag="talentdescription.residualwaste">
<Replace tag="[chance]" value="20" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
<Conditions>
<AbilityConditionServerRandom randomChance="0.2"/>
<!-- don't allow duplicating genetic materials, and prevent infinite FPGA circuits -->
<AbilityConditionItem tags="geneticmaterial,unidentifiedgeneticmaterial,circuitboxcomponent,lightcomponent" invert="true"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyValue multiplyvalue="2"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="massproduction">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,1" sheetelementsize="128,128"/>
<Description tag="talentdescription.massproduction">
<Replace tag="[chance]" value="40" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnItemFabricatedIngredients">
<Conditions>
<AbilityConditionServerRandom randomChance="0.4" />
</Conditions>
<Abilities>
<CharacterAbilityRemoveRandomIngredient>
<AbilityConditionItem category="Material"/>
</CharacterAbilityRemoveRandomIngredient>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="toolmaintenance">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,5" sheetelementsize="128,128"/>
<Description tag="talentdescription.toolmaintenance">
<Replace tag="[amount]" value="1" color="gui.green"/>
</Description>
<!-- Give once when unlocking the talent -->
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGivePermanentStat statidentifier="tool~toolmaintenance" stattype="IncreaseFabricationQuality" value="1" targetallies="true" setvalue="true"/>
</Abilities>
</AbilityGroupEffect>
<!-- Give every 60 seconds for late comers -->
<AbilityGroupInterval interval="60">
<Abilities>
<CharacterAbilityGivePermanentStat statidentifier="tool~toolmaintenance" stattype="IncreaseFabricationQuality" value="1" targetallies="true" setvalue="true"/>
</Abilities>
</AbilityGroupInterval>
</Talent>
<Talent identifier="miner">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="2,3" sheetelementsize="428,428"/>
<Description tag="talentdescription.miner">
<Replace tag="[probability]" value="320" color="gui.green"/>
</Description>
<Description tag="talentdescription.gainoredetachspeed">
<Replace tag="[amount]" value="1600" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="RepairToolDeattachTimeMultiplier" value="1"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
<Conditions>
<AbilityConditionServerRandom randomchance="12.8"/>
<AbilityConditionItem tags="ore"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyValue multiplyvalue="2"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="retrofit">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,5" sheetelementsize="128,128"/>
<Description tag="talentdescription.retrofit" />
<Description tag="talentdescription.doesnotstack" />
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilitySetMetadataInt identifier="tiermodifiers.increasewallhealth" value="1"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="ironman">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.ironhelmet,entityname.makeshiftarmor" color="gui.orange"/>
</Description>
<AddedRecipe itemidentifier="ironhelmet"/>
<AddedRecipe itemidentifier="makeshiftarmor"/>
</Talent>
<Talent identifier="oiledmachinery">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="4,5" sheetelementsize="128,128"/>
<Description tag="talentdescription.oiledmachinery">
<Replace tag="[amount]" value="50" color="gui.green"/>
</Description>
<Description tag="talentdescription.doesnotstack" />
<AbilityGroupInterval interval="60">
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="fabricator" stattype="FabricationSpeed" value="1.5" />
<CharacterAbilityGiveItemStatToTags tags="deconstructor" stattype="DeconstructorSpeed" value="1.5" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="fabricator" stattype="FabricationSpeed" value="1.5" />
<CharacterAbilityGiveItemStatToTags tags="deconstructor" stattype="DeconstructorSpeed" value="1.5" />
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="pumpndump">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="1,7" sheetelementsize="128,128"/>
<Description tag="talentdescription.pumpndump">
<Replace tag="[amount]" value="10" color="gui.green"/>
<Replace tag="[stattype]" value="stattypenames.maxflow" color="gui.orange"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<conditions>
<AbilityConditionItem tags="pump"/>
</conditions>
<Abilities>
<CharacterAbilityGiveItemStat stattype="PumpSpeed" value="1.1"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="ballastdenizen">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="7,6" sheetelementsize="128,128"/>
<Description tag="talentdescription.ballastdenizen">
<Replace tag="[amount]" value="50" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="HoldBreathMultiplier" value="0.5"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="engineengineer">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,5" sheetelementsize="128,128"/>
<Description tag="talentdescription.engineengineer">
<Replace tag="[amount]" value="2.5" color="gui.green"/>
<Replace tag="[max]" value="5" color="gui.green"/>
<Replace tag="[stattype]" value="stattypenames.maxspeed" color="gui.orange"/>
</Description>
<Description tag="talentdescription.doesnotstack" />
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="1" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.025" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="2" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.05" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="3" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.075" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="4" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.1" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="5" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.125" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="6" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.15" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel levelequals="7" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.175" />
</Abilities>
</AbilityGroupInterval>
<AbilityGroupInterval interval="60">
<Conditions>
<AbilityConditionHasLevel minlevel="8" />
</Conditions>
<Abilities>
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.2" />
</Abilities>
</AbilityGroupInterval>
</Talent>
<Talent identifier="multifunctional">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,1" sheetelementsize="128,128"/>
<Description tag="talentdescription.multifunctional">
<Replace tag="[powerincrease]" value="50" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnAttack">
<Conditions>
<AbilityConditionAttackData tags="wrenchitem"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnAttack">
<Conditions>
<AbilityConditionAttackData tags="crowbaritem"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="salvagecrew">
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,7" sheetelementsize="128,128"/>
<Description tag="talentdescription.bonusxponmission">
<Replace tag="[xpbonus]" value="30" color="gui.green"/>
<Replace tag="[missiontype]" value="missiontype.salvage" color="gui.orange"/>
</Description>
<Description tag="talentdescription.salvagecrew">
<Replace tag="[swimbonus]" value="50" color="gui.green"/>
<Replace tag="[resistanceamount]" value="10" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnGainMissionExperience">
<Conditions>
<AbilityConditionMission missiontype="Salvage"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyValue multiplyvalue="1.3"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupInterval interval="0.9">
<Conditions>
<AbilityConditionInSubmarine submarinetype="Wreck" />
</Conditions>
<Abilities>
<CharacterAbilityApplyStatusEffects>
<StatusEffects>
<StatusEffect type="OnAbility" target="This" disabledeltatime="true">
<Affliction identifier="salvagecrew" amount="1.0"/>
</StatusEffect>
</StatusEffects>
</CharacterAbilityApplyStatusEffects>
</Abilities>
</AbilityGroupInterval>
</Talent>
<Talent identifier="machinemaniac" trackedstat="machinemaniac_counter" trackedmax="100">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="3,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.machinemaniac">
<Replace tag="[bonus]" value="80" color="gui.green"/>
<Replace tag="[amount]" value="3" color="gui.orange"/>
</Description>
<Description tag="talentdescription.machinemaniac.30">
<Replace tag="[requirement]" value="12" color="gui.green"/>
<Replace tag="[amount]" value="10" color="gui.green"/>
<Replace tag="[skill]" value="stattypenames.mechanicalskillbonus" color="gui.orange"/>
<Replace tag="[xpamount]" value="500" color="gui.green"/>
</Description>
<Description tag="talentdescription.machinemaniac.50">
<Replace tag="[requirement]" value="20" color="gui.green"/>
<Replace tag="[level]" value="1" color="gui.green"/>
</Description>
<Description tag="talentdescription.machinemaniac.100">
<Replace tag="[requirement]" value="40" color="gui.green"/>
<Replace tag="[amount]" value="50" color="gui.green"/>
</Description>
<!-- Give the player stats that tracks if the rewards should be given -->
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_30" value="1" maxvalue="1" setvalue="true" />
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_50" value="1" maxvalue="1" setvalue="true" />
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_100" value="1" maxvalue="1" setvalue="true" />
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
</Conditions>
<Abilities>
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_counter" value="1" removeondeath="false" />
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_30" min="1"/>
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="12"/>
</Conditions>
<Abilities>
<CharacterAbilityGiveExperience amount="2000"/>
<CharacterAbilityGivePermanentStat stattype="MechanicalSkillBonus" statidentifier="machinemaniac" value="10" setvalue="true" removeondeath="false" />
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_30" />
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_50" min="1"/>
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="20"/>
</Conditions>
<Abilities>
<CharacterAbilityUpgradeSubmarine upgradeprefab="increasemaxpumpflow" upgradecategory="pumps" level="1" />
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_50" />
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_100" min="1"/>
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="40"/>
</Conditions>
<Abilities>
<CharacterAbilityGivePermanentStat stattype="MechanicalRepairSpeed" statidentifier="machinemaniac" value="0.5" setvalue="true" removeondeath="false" />
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_100" />
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="tinkerer">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,1" sheetelementsize="128,128"/>
<Description tag="talentdescription.increasemaxrepairmechanical">
<Replace tag="[percentage]" value="40" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="MaxRepairConditionMultiplierMechanical" value="0.4"/>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="modularrepairs">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,1" sheetelementsize="128,128"/>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.repairpack" color="gui.orange"/>
</Description>
<Description tag="talentdescription.freeupgrade">
<Replace tag="[level]" value="1" color="gui.green"/>
<Replace tag="[upgradename]" value="upgradename.decreaselowskillfixduration" color="gui.orange"/>
</Description>
<AddedRecipe itemidentifier="repairpack"/>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityUpgradeSubmarine upgradeprefab="decreaselowskillfixduration" upgradecategory="electricaldevices" level="1" />
<CharacterAbilityUpgradeSubmarine upgradeprefab="decreaselowskillfixduration" upgradecategory="mechanicaldevices" level="1" />
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="hullfixer">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="0,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.fixfoamgrenade,entityname.handheldstatusmonitor" color="gui.orange"/>
</Description>
<Description tag="talentdescription.additionalstattype">
<Replace tag="[amount]" value="25" color="gui.green"/>
<Replace tag="[stattype]" value="stattypenames.repairtoolstructurerepairmultiplier" color="gui.orange"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="RepairToolStructureRepairMultiplier" value="0.25"/>
</Abilities>
</AbilityGroupEffect>
<AddedRecipe itemidentifier="fixfoamgrenade"/>
<AddedRecipe itemidentifier="handheldstatusmonitor"/>
</Talent>
<Talent identifier="letitdrain">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="1,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.letitdrain"/>
<Description tag="talentdescription.letitdrainreminder">
<Replace tag="[itemcount]" value="2" color="gui.green"/>
</Description>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.portablepump" color="gui.orange"/>
</Description>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGivePermanentStat statidentifier="portablepump" stattype="MaxAttachableCount" value="2" />
</Abilities>
</AbilityGroupEffect>
<AddedRecipe itemidentifier="portablepump"/>
</Talent>
<Talent identifier="quickfixer">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.quickfixer">
<Replace tag="[amount]" value="20" color="gui.green"/>
<Replace tag="[duration]" value="10" color="gui.green"/>
</Description>
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="1"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
</Conditions>
<Abilities>
<CharacterAbilityApplyStatusEffects>
<StatusEffects>
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
<Affliction identifier="quickfixer" amount="10.0"/>
</StatusEffect>
</StatusEffects>
</CharacterAbilityApplyStatusEffects>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="scrapsavant">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,3" sheetelementsize="128,128"/>
<Description tag="talentdescription.doublescrapoutput" />
<Description tag="talentdescription.findadditionalscrap">
<Replace tag="[probability]" value="20" color="gui.green"/>
</Description>
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
<Conditions>
<AbilityConditionItem tags="scrap"/>
</Conditions>
<Abilities>
<CharacterAbilityModifyValue multiplyvalue="2"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnOpenItemContainer">
<Conditions>
<AbilityConditionItemInSubmarine submarinetype="Wreck"/>
<AbilityConditionItem tags="container"/>
</Conditions>
<Abilities>
<CharacterAbilitySpawnItemsToContainer randomchance="0.2" oncepercontainer="true">
<StatusEffects>
<StatusEffect type="OnAbility" target="UseTarget" >
<SpawnItem identifiers="scrap" spawnposition="ThisInventory" spawnifcantbecontained="false" />
</StatusEffect>
</StatusEffects>
</CharacterAbilitySpawnItemsToContainer>
</Abilities>
</AbilityGroupEffect>
</Talent>
<Talent identifier="safetyfirst">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.unlockrecipe">
<Replace tag="[itemname]" value="entityname.safetyharness" color="gui.orange"/>
</Description>
<AddedRecipe itemidentifier="safetyharness"/>
</Talent>
</Talents>

View File

@@ -1,28 +0,0 @@
package main
import (
"time"
logger "git.site.quack-lab.dev/dave/cylogger"
)
func main() {
// Initialize logger with DEBUG level
logger.Init(logger.LevelDebug)
// Test different log levels
logger.Info("This is an info message")
logger.Debug("This is a debug message")
logger.Warning("This is a warning message")
logger.Error("This is an error message")
logger.Trace("This is a trace message (not visible at DEBUG level)")
// Test with a goroutine
logger.SafeGo(func() {
time.Sleep(10 * time.Millisecond)
logger.Info("Message from goroutine")
})
// Wait for goroutine to complete
time.Sleep(20 * time.Millisecond)
}

View File

@@ -1,7 +1,6 @@
package main package main
import ( import (
"cook/utils"
"os" "os"
"path/filepath" "path/filepath"
"testing" "testing"
@@ -77,14 +76,9 @@ func TestGlobExpansion(t *testing.T) {
for _, tc := range tests { for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
// Convert string patterns to map[string]struct{} for ExpandGLobs files, err := expandFilePatterns(tc.patterns)
patternMap := make(map[string]struct{})
for _, pattern := range tc.patterns {
patternMap[pattern] = struct{}{}
}
files, err := utils.ExpandGLobs(patternMap)
if err != nil { if err != nil {
t.Fatalf("ExpandGLobs failed: %v", err) t.Fatalf("expandFilePatterns failed: %v", err)
} }
if len(files) != tc.expected { if len(files) != tc.expected {

34
go.mod
View File

@@ -1,32 +1,20 @@
module cook module modify
go 1.23.2 go 1.24.1
require ( require (
git.site.quack-lab.dev/dave/cylogger v1.3.0 github.com/antchfx/xmlquery v1.4.4
github.com/bmatcuk/doublestar/v4 v4.8.1 github.com/bmatcuk/doublestar/v4 v4.8.1
github.com/stretchr/testify v1.10.0
github.com/yuin/gopher-lua v1.1.1 github.com/yuin/gopher-lua v1.1.1
gopkg.in/yaml.v3 v3.0.1
gorm.io/gorm v1.30.0
) )
require ( require (
github.com/davecgh/go-spew v1.1.1 // indirect github.com/PaesslerAG/gval v1.0.0 // indirect
github.com/google/go-cmp v0.6.0 // indirect github.com/PaesslerAG/jsonpath v0.1.1 // indirect
github.com/hexops/valast v1.5.0 // indirect github.com/antchfx/xpath v1.3.3 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/jinzhu/now v1.1.5 // indirect github.com/sergi/go-diff v1.3.1 // indirect
github.com/kr/pretty v0.3.1 // indirect github.com/stretchr/testify v1.10.0 // indirect
github.com/mattn/go-sqlite3 v1.14.22 // indirect golang.org/x/net v0.33.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect golang.org/x/text v0.21.0 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
golang.org/x/mod v0.21.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/text v0.22.0 // indirect
golang.org/x/tools v0.26.0 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
mvdan.cc/gofumpt v0.4.0 // indirect
) )
require gorm.io/driver/sqlite v1.6.0

133
go.sum
View File

@@ -1,59 +1,98 @@
git.site.quack-lab.dev/dave/cylogger v1.3.0 h1:eTWPUD+ThVi8kGIsRcE0XDeoH3yFb5miFEODyKUdWJw= github.com/PaesslerAG/gval v1.0.0 h1:GEKnRwkWDdf9dOmKcNrar9EA1bz1z9DqPIO1+iLzhd8=
git.site.quack-lab.dev/dave/cylogger v1.3.0/go.mod h1:wctgZplMvroA4X6p8f4B/LaCKtiBcT1Pp+L14kcS8jk= github.com/PaesslerAG/gval v1.0.0/go.mod h1:y/nm5yEyTeX6av0OfKJNp9rBNj2XrGhAf5+v24IBN1I=
github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8=
github.com/PaesslerAG/jsonpath v0.1.1 h1:c1/AToHQMVsduPAa4Vh6xp2U0evy4t8SWp8imEsylIk=
github.com/PaesslerAG/jsonpath v0.1.1/go.mod h1:lVboNxFGal/VwW6d9JzIy56bUsYAP6tH/x80vjnCseY=
github.com/antchfx/xmlquery v1.4.4 h1:mxMEkdYP3pjKSftxss4nUHfjBhnMk4imGoR96FRY2dg=
github.com/antchfx/xmlquery v1.4.4/go.mod h1:AEPEEPYE9GnA2mj5Ur2L5Q5/2PycJ0N9Fusrx9b12fc=
github.com/antchfx/xpath v1.3.3 h1:tmuPQa1Uye0Ym1Zn65vxPgfltWb/Lxu2jeqIGteJSRs=
github.com/antchfx/xpath v1.3.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38= github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/hexops/autogold v0.8.1 h1:wvyd/bAJ+Dy+DcE09BoLk6r4Fa5R5W+O+GUzmR985WM= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/hexops/autogold v0.8.1/go.mod h1:97HLDXyG23akzAoRYJh/2OBs3kd80eHyKPvZw0S5ZBY=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/hexops/valast v1.5.0 h1:FBTuvVi0wjTngtXJRZXMbkN/Dn6DgsUsBwch2DUJU8Y=
github.com/hexops/valast v1.5.0/go.mod h1:Jcy1pNH7LNraVaAZDLyv21hHg2WBv9Nf9FL6fGxU7o4=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM=
mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ=

669
main.go
View File

@@ -1,52 +1,66 @@
package main package main
import ( import (
"errors"
"flag" "flag"
"fmt" "fmt"
"log"
"os" "os"
"sort"
"sync" "sync"
"sync/atomic"
"time"
"cook/processor" "github.com/bmatcuk/doublestar/v4"
"cook/utils"
"gopkg.in/yaml.v3" "modify/processor"
logger "git.site.quack-lab.dev/dave/cylogger"
) )
// mainLogger is a scoped logger for the main package.
var mainLogger = logger.Default.WithPrefix("main")
type GlobalStats struct { type GlobalStats struct {
TotalMatches int64 TotalMatches int
TotalModifications int64 TotalModifications int
ProcessedFiles int64 ProcessedFiles int
FailedFiles int64 FailedFiles int
ModificationsPerCommand sync.Map
} }
var ( type FileMode string
stats GlobalStats = GlobalStats{
ModificationsPerCommand: sync.Map{}, const (
} ModeRegex FileMode = "regex"
ModeXML FileMode = "xml"
ModeJSON FileMode = "json"
) )
var stats GlobalStats
var logger *log.Logger
var (
fileModeFlag = flag.String("mode", "regex", "Processing mode: regex, xml, json")
verboseFlag = flag.Bool("verbose", false, "Enable verbose output")
)
func init() {
log.SetFlags(log.Lmicroseconds | log.Lshortfile)
logger = log.New(os.Stdout, "", log.Lmicroseconds|log.Lshortfile)
stats = GlobalStats{}
}
func main() { func main() {
flag.Usage = func() { flag.Usage = func() {
CreateExampleConfig()
fmt.Fprintf(os.Stderr, "Usage: %s [options] <pattern> <lua_expression> <...files_or_globs>\n", os.Args[0]) fmt.Fprintf(os.Stderr, "Usage: %s [options] <pattern> <lua_expression> <...files_or_globs>\n", os.Args[0])
fmt.Fprintf(os.Stderr, "\nOptions:\n") fmt.Fprintf(os.Stderr, "\nOptions:\n")
fmt.Fprintf(os.Stderr, " -reset\n") fmt.Fprintf(os.Stderr, " -mode string\n")
fmt.Fprintf(os.Stderr, " Reset files to their original state\n") fmt.Fprintf(os.Stderr, " Processing mode: regex, xml, json (default \"regex\")\n")
fmt.Fprintf(os.Stderr, " -loglevel string\n") fmt.Fprintf(os.Stderr, " -xpath string\n")
fmt.Fprintf(os.Stderr, " Set logging level: ERROR, WARNING, INFO, DEBUG, TRACE (default \"INFO\")\n") fmt.Fprintf(os.Stderr, " XPath expression (for XML mode)\n")
fmt.Fprintf(os.Stderr, " -jsonpath string\n")
fmt.Fprintf(os.Stderr, " JSONPath expression (for JSON mode)\n")
fmt.Fprintf(os.Stderr, " -verbose\n")
fmt.Fprintf(os.Stderr, " Enable verbose output\n")
fmt.Fprintf(os.Stderr, "\nExamples:\n") fmt.Fprintf(os.Stderr, "\nExamples:\n")
fmt.Fprintf(os.Stderr, " Regex mode (default):\n") fmt.Fprintf(os.Stderr, " Regex mode (default):\n")
fmt.Fprintf(os.Stderr, " %s \"<value>(\\\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0]) fmt.Fprintf(os.Stderr, " %s \"<value>(\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0])
fmt.Fprintf(os.Stderr, " XML mode:\n")
fmt.Fprintf(os.Stderr, " %s -mode=xml -xpath=\"//value\" \"*1.5\" data.xml\n", os.Args[0])
fmt.Fprintf(os.Stderr, " JSON mode:\n")
fmt.Fprintf(os.Stderr, " %s -mode=json -jsonpath=\"$.items[*].value\" \"*1.5\" data.json\n", os.Args[0])
fmt.Fprintf(os.Stderr, "\nNote: v1, v2, etc. are used to refer to capture groups as numbers.\n") fmt.Fprintf(os.Stderr, "\nNote: v1, v2, etc. are used to refer to capture groups as numbers.\n")
fmt.Fprintf(os.Stderr, " s1, s2, etc. are used to refer to capture groups as strings.\n") fmt.Fprintf(os.Stderr, " s1, s2, etc. are used to refer to capture groups as strings.\n")
fmt.Fprintf(os.Stderr, " Helper functions: num(str) converts string to number, str(num) converts number to string\n") fmt.Fprintf(os.Stderr, " Helper functions: num(str) converts string to number, str(num) converts number to string\n")
@@ -55,557 +69,114 @@ func main() {
fmt.Fprintf(os.Stderr, " You can use any valid Lua code, including if statements, loops, etc.\n") fmt.Fprintf(os.Stderr, " You can use any valid Lua code, including if statements, loops, etc.\n")
fmt.Fprintf(os.Stderr, " Glob patterns are supported for file selection (*.xml, data/**.xml, etc.)\n") fmt.Fprintf(os.Stderr, " Glob patterns are supported for file selection (*.xml, data/**.xml, etc.)\n")
} }
// TODO: Fix bed shitting when doing *.yml in barotrauma directory
flag.Parse() flag.Parse()
args := flag.Args() args := flag.Args()
logger.InitFlag() if len(args) < 3 {
mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String()) fmt.Fprintf(os.Stderr, "%s mode requires %d arguments minimum\n", *fileModeFlag, 3)
mainLogger.Trace("Full argv: %v", os.Args)
if flag.NArg() == 0 {
flag.Usage() flag.Usage()
return return
} }
mainLogger.Debug("Getting database connection") // Get the appropriate pattern and expression based on mode
db, err := utils.GetDB() var pattern, luaExpr string
var filePatterns []string
if *fileModeFlag == "regex" {
pattern = args[0]
luaExpr = args[1]
filePatterns = args[2:]
} else {
// For XML/JSON modes, pattern comes from flags
luaExpr = args[0]
filePatterns = args[1:]
}
// Prepare the Lua expression
originalLuaExpr := luaExpr
luaExpr = processor.BuildLuaScript(luaExpr)
if originalLuaExpr != luaExpr {
logger.Printf("Transformed Lua expression from %q to %q", originalLuaExpr, luaExpr)
}
// Expand file patterns with glob support
files, err := expandFilePatterns(filePatterns)
if err != nil { if err != nil {
mainLogger.Error("Failed to get database: %v", err) fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
return
}
mainLogger.Debug("Database connection established")
workdone, err := HandleSpecialArgs(args, err, db)
if err != nil {
mainLogger.Error("Failed to handle special args: %v", err)
return
}
if workdone {
mainLogger.Info("Special arguments handled, exiting.")
return return
} }
// The plan is: if len(files) == 0 {
// Load all commands fmt.Fprintf(os.Stderr, "No files found matching the specified patterns\n")
mainLogger.Debug("Loading commands from arguments")
mainLogger.Trace("Arguments: %v", args)
commands, err := utils.LoadCommands(args)
if err != nil || len(commands) == 0 {
mainLogger.Error("Failed to load commands: %v", err)
flag.Usage()
return return
} }
// Collect global modifiers from special entries and filter them out
vars := map[string]interface{}{}
filtered := make([]utils.ModifyCommand, 0, len(commands))
for _, c := range commands {
if len(c.Modifiers) > 0 && c.Name == "" && c.Regex == "" && len(c.Regexes) == 0 && c.Lua == "" && len(c.Files) == 0 {
for k, v := range c.Modifiers {
vars[k] = v
}
continue
}
filtered = append(filtered, c)
}
if len(vars) > 0 {
mainLogger.Info("Loaded %d global modifiers", len(vars))
processor.SetVariables(vars)
}
commands = filtered
mainLogger.Info("Loaded %d commands", len(commands))
if *utils.Filter != "" { // Create the processor based on mode
mainLogger.Info("Filtering commands by name: %s", *utils.Filter) var proc processor.Processor
commands = utils.FilterCommands(commands, *utils.Filter) switch *fileModeFlag {
mainLogger.Info("Filtered %d commands", len(commands)) case "regex":
proc = &processor.RegexProcessor{}
logger.Printf("Starting regex modifier with pattern %q, expression %q on %d files",
pattern, luaExpr, len(files))
// case "xml":
// proc = &processor.XMLProcessor{}
// pattern = *xpathFlag
// logger.Printf("Starting XML modifier with XPath %q, expression %q on %d files",
// pattern, luaExpr, len(files))
// case "json":
// proc = &processor.JSONProcessor{}
// pattern = *jsonpathFlag
// logger.Printf("Starting JSON modifier with JSONPath %q, expression %q on %d files",
// pattern, luaExpr, len(files))
} }
// Then aggregate all the globs and deduplicate them var wg sync.WaitGroup
mainLogger.Debug("Aggregating globs and deduplicating") // Process each file
globs := utils.AggregateGlobs(commands) for _, file := range files {
mainLogger.Debug("Aggregated %d globs before deduplication", utils.CountGlobsBeforeDedup(commands))
for _, command := range commands {
mainLogger.Trace("Command: %s", command.Name)
if len(command.Regexes) > 0 {
mainLogger.Trace("Regexes: %v", command.Regexes)
} else {
mainLogger.Trace("Regex: %s", command.Regex)
}
mainLogger.Trace("Files: %v", command.Files)
mainLogger.Trace("Lua: %s", command.Lua)
mainLogger.Trace("Reset: %t", command.Reset)
mainLogger.Trace("Isolate: %t", command.Isolate)
mainLogger.Trace("LogLevel: %s", command.LogLevel)
}
// Resolve all the files for all the globs
mainLogger.Info("Found %d unique file patterns", len(globs))
mainLogger.Debug("Expanding glob patterns to files")
files, err := utils.ExpandGLobs(globs)
if err != nil {
mainLogger.Error("Failed to expand file patterns: %v", err)
return
}
mainLogger.Info("Found %d files to process", len(files))
mainLogger.Trace("Files to process: %v", files)
// Somehow connect files to commands via globs..
// For each file check every glob of every command
// Maybe memoize this part
// That way we know what commands affect what files
mainLogger.Debug("Associating files with commands")
associations, err := utils.AssociateFilesWithCommands(files, commands)
if err != nil {
mainLogger.Error("Failed to associate files with commands: %v", err)
return
}
mainLogger.Debug("Files associated with commands")
mainLogger.Trace("File-command associations: %v", associations)
// Per-file association summary for better visibility when debugging
for file, assoc := range associations {
cmdNames := make([]string, 0, len(assoc.Commands))
for _, c := range assoc.Commands {
cmdNames = append(cmdNames, c.Name)
}
isoNames := make([]string, 0, len(assoc.IsolateCommands))
for _, c := range assoc.IsolateCommands {
isoNames = append(isoNames, c.Name)
}
mainLogger.Debug("File %q has %d regular and %d isolate commands", file, len(assoc.Commands), len(assoc.IsolateCommands))
mainLogger.Trace("\tRegular: %v", cmdNames)
mainLogger.Trace("\tIsolate: %v", isoNames)
}
mainLogger.Debug("Resetting files where necessary")
err = utils.ResetWhereNecessary(associations, db)
if err != nil {
mainLogger.Error("Failed to reset files where necessary: %v", err)
return
}
mainLogger.Debug("Files reset where necessary")
// Then for each file run all commands associated with the file
workers := make(chan struct{}, *utils.ParallelFiles)
wg := sync.WaitGroup{}
mainLogger.Debug("Starting file processing with %d parallel workers", *utils.ParallelFiles)
// Add performance tracking
startTime := time.Now()
// Create a map to store loggers for each command
commandLoggers := make(map[string]*logger.Logger)
for _, command := range commands {
// Create a named logger for each command
cmdName := command.Name
if cmdName == "" {
// If no name is provided, use a short version of the regex pattern
if len(command.Regex) > 20 {
cmdName = command.Regex[:17] + "..."
} else {
cmdName = command.Regex
}
}
// Parse the log level for this specific command
cmdLogLevel := logger.ParseLevel(command.LogLevel)
// Create a logger with the command name as a field
commandLoggers[command.Name] = logger.Default.WithField("command", cmdName)
commandLoggers[command.Name].SetLevel(cmdLogLevel)
mainLogger.Debug("Created logger for command %q with log level %s", cmdName, cmdLogLevel.String())
}
for file, association := range associations {
workers <- struct{}{}
wg.Add(1) wg.Add(1)
logger.SafeGoWithArgs(func(args ...interface{}) { go func(file string) {
defer func() { <-workers }()
defer wg.Done() defer wg.Done()
// Track per-file processing time logger.Printf("Processing file: %s", file)
fileStartTime := time.Now()
mainLogger.Debug("Reading file %q", file) modCount, matchCount, err := proc.Process(file, pattern, luaExpr)
fileData, err := os.ReadFile(file)
if err != nil { if err != nil {
mainLogger.Error("Failed to read file %q: %v", file, err) fmt.Fprintf(os.Stderr, "Failed to process file %s: %v\n", file, err)
atomic.AddInt64(&stats.FailedFiles, 1) stats.FailedFiles++
return } else {
logger.Printf("Successfully processed file: %s", file)
stats.ProcessedFiles++
stats.TotalMatches += matchCount
stats.TotalModifications += modCount
} }
fileDataStr := string(fileData) }(file)
mainLogger.Trace("File %q content: %s", file, utils.LimitString(fileDataStr, 500))
isChanged := false
mainLogger.Debug("Running isolate commands for file %q", file)
fileDataStr, err = RunIsolateCommands(association, file, fileDataStr)
if err != nil && err != NothingToDo {
mainLogger.Error("Failed to run isolate commands for file %q: %v", file, err)
atomic.AddInt64(&stats.FailedFiles, 1)
return
}
if err != NothingToDo {
isChanged = true
}
mainLogger.Debug("Running other commands for file %q", file)
fileDataStr, err = RunOtherCommands(file, fileDataStr, association, commandLoggers)
if err != nil && err != NothingToDo {
mainLogger.Error("Failed to run other commands for file %q: %v", file, err)
atomic.AddInt64(&stats.FailedFiles, 1)
return
}
if err != NothingToDo {
isChanged = true
}
if isChanged {
mainLogger.Debug("Saving file %q to database", file)
err = db.SaveFile(file, fileData)
if err != nil {
mainLogger.Error("Failed to save file %q to database: %v", file, err)
atomic.AddInt64(&stats.FailedFiles, 1)
return
}
mainLogger.Debug("File %q saved to database", file)
}
mainLogger.Debug("Writing file %q", file)
err = os.WriteFile(file, []byte(fileDataStr), 0644)
if err != nil {
mainLogger.Error("Failed to write file %q: %v", file, err)
atomic.AddInt64(&stats.FailedFiles, 1)
return
}
mainLogger.Debug("File %q written", file)
// Only increment ProcessedFiles once per file, after all processing is complete
atomic.AddInt64(&stats.ProcessedFiles, 1)
mainLogger.Debug("File %q processed in %v", file, time.Since(fileStartTime))
}, file, commands)
} }
wg.Wait() wg.Wait()
processingTime := time.Since(startTime)
mainLogger.Info("Processing completed in %v", processingTime)
processedFiles := atomic.LoadInt64(&stats.ProcessedFiles)
if processedFiles > 0 {
mainLogger.Info("Average time per file: %v", processingTime/time.Duration(processedFiles))
}
// TODO: Also give each command its own logger, maybe prefix it with something... Maybe give commands a name?
// Do that with logger.WithField("loglevel", level.String())
// Since each command also has its own log level
// TODO: Maybe even figure out how to run individual commands...?
// TODO: What to do with git? Figure it out ....
// if *gitFlag {
// mainLogger.Info("Git integration enabled, setting up git repository")
// err := setupGit()
// if err != nil {
// mainLogger.Error("Failed to setup git: %v", err)
// fmt.Fprintf(os.Stderr, "Error setting up git: %v\n", err)
// return
// }
// }
// mainLogger.Debug("Expanding file patterns")
// files, err := expandFilePatterns(filePatterns)
// if err != nil {
// mainLogger.Error("Failed to expand file patterns: %v", err)
// fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
// return
// }
// if *gitFlag {
// mainLogger.Info("Cleaning up git files before processing")
// err := cleanupGitFiles(files)
// if err != nil {
// mainLogger.Error("Failed to cleanup git files: %v", err)
// fmt.Fprintf(os.Stderr, "Error cleaning up git files: %v\n", err)
// return
// }
// }
// if *resetFlag {
// mainLogger.Info("Files reset to their original state, nothing more to do")
// log.Printf("Files reset to their original state, nothing more to do")
// return
// }
// Print summary // Print summary
totalModifications := atomic.LoadInt64(&stats.TotalModifications) if stats.TotalModifications == 0 {
if totalModifications == 0 { fmt.Fprintf(os.Stderr, "No modifications were made in any files\n")
mainLogger.Warning("No modifications were made in any files")
} else { } else {
failedFiles := atomic.LoadInt64(&stats.FailedFiles) fmt.Printf("Operation complete! Modified %d values in %d/%d files\n",
mainLogger.Info("Operation complete! Modified %d values in %d/%d files", stats.TotalModifications, stats.ProcessedFiles, stats.ProcessedFiles+stats.FailedFiles)
totalModifications, processedFiles, processedFiles+failedFiles)
sortedCommands := []string{}
stats.ModificationsPerCommand.Range(func(key, value interface{}) bool {
sortedCommands = append(sortedCommands, key.(string))
return true
})
sort.Strings(sortedCommands)
for _, command := range sortedCommands {
count, _ := stats.ModificationsPerCommand.Load(command)
if count.(int) > 0 {
mainLogger.Info("\tCommand %q made %d modifications", command, count)
} else {
mainLogger.Warning("\tCommand %q made no modifications", command)
}
}
} }
} }
func HandleSpecialArgs(args []string, err error, db utils.DB) (bool, error) { func expandFilePatterns(patterns []string) ([]string, error) {
handleSpecialArgsLogger := logger.Default.WithPrefix("HandleSpecialArgs") var files []string
handleSpecialArgsLogger.Debug("Handling special arguments: %v", args) filesMap := make(map[string]bool)
switch args[0] {
case "reset": for _, pattern := range patterns {
handleSpecialArgsLogger.Info("Resetting all files") matches, _ := doublestar.Glob(os.DirFS("."), pattern)
err = utils.ResetAllFiles(db) for _, m := range matches {
if err != nil { if info, err := os.Stat(m); err == nil && !info.IsDir() && !filesMap[m] {
handleSpecialArgsLogger.Error("Failed to reset all files: %v", err) filesMap[m], files = true, append(files, m)
return true, err }
} }
handleSpecialArgsLogger.Info("All files reset")
return true, nil
case "dump":
handleSpecialArgsLogger.Info("Dumping all files from database")
err = db.RemoveAllFiles()
if err != nil {
handleSpecialArgsLogger.Error("Failed to remove all files from database: %v", err)
return true, err
}
handleSpecialArgsLogger.Info("All files removed from database")
return true, nil
} }
handleSpecialArgsLogger.Debug("No special arguments handled, returning false")
return false, nil if len(files) > 0 {
} logger.Printf("Found %d files to process", len(files))
}
func CreateExampleConfig() { return files, nil
createExampleConfigLogger := logger.Default.WithPrefix("CreateExampleConfig")
createExampleConfigLogger.Debug("Creating example configuration file")
commands := []utils.ModifyCommand{
// Global modifiers only entry (no name/regex/lua/files)
{
Modifiers: map[string]interface{}{
"foobar": 4,
"multiply": 1.5,
"prefix": "NEW_",
"enabled": true,
},
},
// Multi-regex example using $variable in Lua
{
Name: "RFToolsMultiply",
Regexes: []string{"generatePerTick = !num", "ticksPer\\w+ = !num", "generatorRFPerTick = !num"},
Lua: "* $foobar",
Files: []string{"polymc/instances/**/rftools*.toml", `polymc\\instances\\**\\rftools*.toml`},
Reset: true,
// LogLevel defaults to INFO
},
// Named capture groups with arithmetic and string ops
{
Name: "UpdateAmountsAndItems",
Regex: `(?P<amount>!num)\s+units\s+of\s+(?P<item>[A-Za-z_\-]+)`,
Lua: `amount = amount * $multiply; item = upper(item); return true`,
Files: []string{"data/**/*.txt"},
// INFO log level
},
// Full replacement via Lua 'replacement' variable
{
Name: "BumpMinorVersion",
Regex: `version\s*=\s*"(?P<major>!num)\.(?P<minor>!num)\.(?P<patch>!num)"`,
Lua: `replacement = format("version=\"%s.%s.%s\"", major, num(minor)+1, 0); return true`,
Files: []string{"config/*.ini", "config/*.cfg"},
},
// Multiline regex example (DOTALL is auto-enabled). Captures numeric in nested XML.
{
Name: "XMLNestedValueMultiply",
Regex: `<item>\s*\s*<name>!any<\/name>\s*\s*<value>(!num)<\/value>\s*\s*<\/item>`,
Lua: `* $multiply`,
Files: []string{"data/**/*.xml"},
// Demonstrates multiline regex in YAML
},
// Multiline regexES array, with different patterns handled by same Lua
{
Name: "MultiLinePatterns",
Regexes: []string{
`<entry>\s*\n\s*<id>(?P<id>!num)</id>\s*\n\s*<score>(?P<score>!num)</score>\s*\n\s*</entry>`,
`\[block\]\nkey=(?P<key>[A-Za-z_]+)\nvalue=(?P<val>!num)`,
},
Lua: `if is_number(score) then score = score * 2 end; if is_number(val) then val = val * 3 end; return true`,
Files: []string{"examples/**/*.*"},
LogLevel: "DEBUG",
},
// Use equals operator shorthand and boolean variable
{
Name: "EnableFlags",
Regex: `enabled\s*=\s*(true|false)`,
Lua: `= $enabled`,
Files: []string{"**/*.toml"},
},
// Demonstrate NoDedup to allow overlapping replacements
{
Name: "OverlappingGroups",
Regex: `(?P<a>!num)(?P<b>!num)`,
Lua: `a = num(a) + 1; b = num(b) + 1; return true`,
Files: []string{"overlap/**/*.txt"},
NoDedup: true,
},
// Isolate command example operating on entire matched block
{
Name: "IsolateUppercaseBlock",
Regex: `BEGIN\n(?P<block>!any)\nEND`,
Lua: `block = upper(block); return true`,
Files: []string{"logs/**/*.log"},
Isolate: true,
LogLevel: "TRACE",
},
// Using !rep placeholder and arrays of files
{
Name: "RepeatPlaceholderExample",
Regex: `name: (.*) !rep(, .* , 2)`,
Lua: `-- no-op, just demonstrate placeholder; return false`,
Files: []string{"lists/**/*.yml", "lists/**/*.yaml"},
},
// Using string variable in Lua expression
{
Name: "PrefixKeys",
Regex: `(?P<key>[A-Za-z0-9_]+)\s*=`,
Lua: `key = $prefix .. key; return true`,
Files: []string{"**/*.properties"},
},
}
data, err := yaml.Marshal(commands)
if err != nil {
createExampleConfigLogger.Error("Failed to marshal example config: %v", err)
return
}
createExampleConfigLogger.Debug("Writing example_cook.yml")
err = os.WriteFile("example_cook.yml", data, 0644)
if err != nil {
createExampleConfigLogger.Error("Failed to write example_cook.yml: %v", err)
return
}
createExampleConfigLogger.Info("Wrote example_cook.yml")
}
var NothingToDo = errors.New("nothing to do")
func RunOtherCommands(file string, fileDataStr string, association utils.FileCommandAssociation, commandLoggers map[string]*logger.Logger) (string, error) {
runOtherCommandsLogger := mainLogger.WithPrefix("RunOtherCommands").WithField("file", file)
runOtherCommandsLogger.Debug("Running other commands for file")
runOtherCommandsLogger.Trace("File data before modifications: %s", utils.LimitString(fileDataStr, 200))
// Aggregate all the modifications and execute them
modifications := []utils.ReplaceCommand{}
numCommandsConsidered := 0
for _, command := range association.Commands {
// Use command-specific logger if available, otherwise fall back to default logger
cmdLogger := logger.Default
if cmdLog, ok := commandLoggers[command.Name]; ok {
cmdLogger = cmdLog
}
patterns := command.Regexes
if len(patterns) == 0 {
patterns = []string{command.Regex}
}
for idx, pattern := range patterns {
tmpCmd := command
tmpCmd.Regex = pattern
cmdLogger.Debug("Begin processing file with command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
numCommandsConsidered++
newModifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
if err != nil {
runOtherCommandsLogger.Error("Failed to process file with command %q: %v", command.Name, err)
continue
}
modifications = append(modifications, newModifications...)
count, ok := stats.ModificationsPerCommand.Load(command.Name)
if !ok {
count = 0
}
stats.ModificationsPerCommand.Store(command.Name, count.(int)+len(newModifications))
cmdLogger.Debug("Command %q generated %d modifications (pattern %d/%d)", command.Name, len(newModifications), idx+1, len(patterns))
cmdLogger.Trace("Modifications generated by command %q: %v", command.Name, newModifications)
if len(newModifications) == 0 {
cmdLogger.Debug("No modifications yielded by command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
}
}
}
runOtherCommandsLogger.Debug("Aggregated %d modifications from %d command-pattern runs", len(modifications), numCommandsConsidered)
runOtherCommandsLogger.Trace("All aggregated modifications: %v", modifications)
if len(modifications) == 0 {
runOtherCommandsLogger.Warning("No modifications found for file")
return fileDataStr, NothingToDo
}
runOtherCommandsLogger.Debug("Executing %d modifications for file", len(modifications))
// Sort commands in reverse order for safe replacements
var count int
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
runOtherCommandsLogger.Trace("File data after modifications: %s", utils.LimitString(fileDataStr, 200))
atomic.AddInt64(&stats.TotalModifications, int64(count))
runOtherCommandsLogger.Info("Executed %d modifications for file", count)
return fileDataStr, nil
}
func RunIsolateCommands(association utils.FileCommandAssociation, file string, fileDataStr string) (string, error) {
runIsolateCommandsLogger := mainLogger.WithPrefix("RunIsolateCommands").WithField("file", file)
runIsolateCommandsLogger.Debug("Running isolate commands for file")
runIsolateCommandsLogger.Trace("File data before isolate modifications: %s", utils.LimitString(fileDataStr, 200))
anythingDone := false
for _, isolateCommand := range association.IsolateCommands {
runIsolateCommandsLogger.Debug("Begin processing file with isolate command %q", isolateCommand.Regex)
patterns := isolateCommand.Regexes
if len(patterns) == 0 {
patterns = []string{isolateCommand.Regex}
}
for idx, pattern := range patterns {
tmpCmd := isolateCommand
tmpCmd.Regex = pattern
modifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
if err != nil {
runIsolateCommandsLogger.Error("Failed to process file with isolate command %q (pattern %d/%d): %v", isolateCommand.Name, idx+1, len(patterns), err)
continue
}
if len(modifications) == 0 {
runIsolateCommandsLogger.Debug("Isolate command %q produced no modifications (pattern %d/%d)", isolateCommand.Name, idx+1, len(patterns))
continue
}
anythingDone = true
runIsolateCommandsLogger.Debug("Executing %d isolate modifications for file", len(modifications))
runIsolateCommandsLogger.Trace("Isolate modifications: %v", modifications)
var count int
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
runIsolateCommandsLogger.Trace("File data after isolate modifications: %s", utils.LimitString(fileDataStr, 200))
atomic.AddInt64(&stats.TotalModifications, int64(count))
runIsolateCommandsLogger.Info("Executed %d isolate modifications for file", count)
}
}
if !anythingDone {
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
return fileDataStr, NothingToDo
}
return fileDataStr, nil
} }

174
processor/json.go Normal file
View File

@@ -0,0 +1,174 @@
package processor
import (
"encoding/json"
"fmt"
"modify/processor/jsonpath"
"os"
"path/filepath"
"strings"
lua "github.com/yuin/gopher-lua"
)
// JSONProcessor implements the Processor interface for JSON documents
type JSONProcessor struct{}
// Process implements the Processor interface for JSONProcessor
func (p *JSONProcessor) Process(filename string, pattern string, luaExpr string) (int, int, error) {
// Read file content
cwd, err := os.Getwd()
if err != nil {
return 0, 0, fmt.Errorf("error getting current working directory: %v", err)
}
fullPath := filepath.Join(cwd, filename)
content, err := os.ReadFile(fullPath)
if err != nil {
return 0, 0, fmt.Errorf("error reading file: %v", err)
}
fileContent := string(content)
// Process the content
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
if err != nil {
return 0, 0, err
}
// If we made modifications, save the file
if modCount > 0 {
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
if err != nil {
return 0, 0, fmt.Errorf("error writing file: %v", err)
}
}
return modCount, matchCount, nil
}
// ProcessContent implements the Processor interface for JSONProcessor
func (p *JSONProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
// Parse JSON document
var jsonData interface{}
err := json.Unmarshal([]byte(content), &jsonData)
if err != nil {
return content, 0, 0, fmt.Errorf("error parsing JSON: %v", err)
}
// Find nodes matching the JSONPath pattern
nodes, err := jsonpath.Get(jsonData, pattern)
if err != nil {
return content, 0, 0, fmt.Errorf("error getting nodes: %v", err)
}
matchCount := len(nodes)
if matchCount == 0 {
return content, 0, 0, nil
}
// Initialize Lua
L, err := NewLuaState()
if err != nil {
return content, len(nodes), 0, fmt.Errorf("error creating Lua state: %v", err)
}
defer L.Close()
err = p.ToLua(L, nodes)
if err != nil {
return content, len(nodes), 0, fmt.Errorf("error converting to Lua: %v", err)
}
// Execute Lua script
if err := L.DoString(luaExpr); err != nil {
return content, len(nodes), 0, fmt.Errorf("error executing Lua %s: %v", luaExpr, err)
}
// Get modified value
result, err := p.FromLua(L)
if err != nil {
return content, len(nodes), 0, fmt.Errorf("error getting result from Lua: %v", err)
}
// Apply the modification to the JSON data
err = p.updateJSONValue(jsonData, pattern, result)
if err != nil {
return content, len(nodes), 0, fmt.Errorf("error updating JSON: %v", err)
}
// Convert the modified JSON back to a string with same formatting
var jsonBytes []byte
if indent, err := detectJsonIndentation(content); err == nil && indent != "" {
// Use detected indentation for output formatting
jsonBytes, err = json.MarshalIndent(jsonData, "", indent)
} else {
// Fall back to standard 2-space indent
jsonBytes, err = json.MarshalIndent(jsonData, "", " ")
}
// We changed all the nodes trust me bro
return string(jsonBytes), len(nodes), len(nodes), nil
}
// detectJsonIndentation tries to determine the indentation used in the original JSON
func detectJsonIndentation(content string) (string, error) {
lines := strings.Split(content, "\n")
if len(lines) < 2 {
return "", fmt.Errorf("not enough lines to detect indentation")
}
// Look for the first indented line
for i := 1; i < len(lines); i++ {
line := lines[i]
trimmed := strings.TrimSpace(line)
if trimmed == "" {
continue
}
// Calculate leading whitespace
indent := line[:len(line)-len(trimmed)]
if len(indent) > 0 {
return indent, nil
}
}
return "", fmt.Errorf("no indentation detected")
}
// / Selects from the root node
// // Selects nodes in the document from the current node that match the selection no matter where they are
// . Selects the current node
// @ Selects attributes
// /bookstore/* Selects all the child element nodes of the bookstore element
// //* Selects all elements in the document
// /bookstore/book[1] Selects the first book element that is the child of the bookstore element.
// /bookstore/book[last()] Selects the last book element that is the child of the bookstore element
// /bookstore/book[last()-1] Selects the last but one book element that is the child of the bookstore element
// /bookstore/book[position()<3] Selects the first two book elements that are children of the bookstore element
// //title[@lang] Selects all the title elements that have an attribute named lang
// //title[@lang='en'] Selects all the title elements that have a "lang" attribute with a value of "en"
// /bookstore/book[price>35.00] Selects all the book elements of the bookstore element that have a price element with a value greater than 35.00
// /bookstore/book[price>35.00]/title Selects all the title elements of the book elements of the bookstore element that have a price element with a value greater than 35.00
// updateJSONValue updates a value in the JSON structure based on its JSONPath
func (p *JSONProcessor) updateJSONValue(jsonData interface{}, path string, newValue interface{}) error {
return nil
}
// ToLua converts JSON values to Lua variables
func (p *JSONProcessor) ToLua(L *lua.LState, data interface{}) error {
table, err := ToLuaTable(L, data)
if err != nil {
return err
}
L.SetGlobal("v", table)
return nil
}
// FromLua retrieves values from Lua
func (p *JSONProcessor) FromLua(L *lua.LState) (interface{}, error) {
luaValue := L.GetGlobal("v")
return FromLuaTable(L, luaValue.(*lua.LTable))
}

1019
processor/json_test.go Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,495 @@
package jsonpath
import (
"fmt"
"strconv"
)
// JSONStep represents a single step in a JSONPath query
type JSONStep struct {
Type StepType
Key string // For Child/RecursiveDescent
Index int // For Index (use -1 for wildcard "*")
}
// JSONNode represents a value in the JSON data with its path
type JSONNode struct {
Value interface{} // The value found at the path
Path string // The exact JSONPath where the value was found
}
// StepType defines the types of steps in a JSONPath
type StepType int
const (
RootStep StepType = iota // $ - The root element
ChildStep // .key - Direct child access
RecursiveDescentStep // ..key - Recursive search for key
WildcardStep // .* - All children of an object
IndexStep // [n] - Array index access (or [*] for all elements)
)
// TraversalMode determines how the traversal behaves
type TraversalMode int
const (
CollectMode TraversalMode = iota // Just collect matched nodes
ModifyFirstMode // Modify first matching node
ModifyAllMode // Modify all matching nodes
)
// ParseJSONPath parses a JSONPath string into a sequence of steps
func ParseJSONPath(path string) ([]JSONStep, error) {
if len(path) == 0 || path[0] != '$' {
return nil, fmt.Errorf("path must start with $; received: %q", path)
}
steps := []JSONStep{}
i := 0
for i < len(path) {
switch path[i] {
case '$':
steps = append(steps, JSONStep{Type: RootStep})
i++
case '.':
i++
if i < len(path) && path[i] == '.' {
// Recursive descent
i++
key, nextPos := readKey(path, i)
steps = append(steps, JSONStep{Type: RecursiveDescentStep, Key: key})
i = nextPos
} else {
// Child step or wildcard
key, nextPos := readKey(path, i)
if key == "*" {
steps = append(steps, JSONStep{Type: WildcardStep})
} else {
steps = append(steps, JSONStep{Type: ChildStep, Key: key})
}
i = nextPos
}
case '[':
// Index step
i++
indexStr, nextPos := readIndex(path, i)
if indexStr == "*" {
steps = append(steps, JSONStep{Type: IndexStep, Index: -1})
} else {
index, err := strconv.Atoi(indexStr)
if err != nil {
return nil, fmt.Errorf("invalid index: %s; error: %w", indexStr, err)
}
steps = append(steps, JSONStep{Type: IndexStep, Index: index})
}
i = nextPos + 1 // Skip closing ]
default:
return nil, fmt.Errorf("unexpected character: %c at position %d; path: %q", path[i], i, path)
}
}
return steps, nil
}
// readKey extracts a key name from the path
func readKey(path string, start int) (string, int) {
i := start
for ; i < len(path); i++ {
if path[i] == '.' || path[i] == '[' {
break
}
}
return path[start:i], i
}
// readIndex extracts an array index or wildcard from the path
func readIndex(path string, start int) (string, int) {
i := start
for ; i < len(path); i++ {
if path[i] == ']' {
break
}
}
return path[start:i], i
}
// Get retrieves values with their paths from data at the specified JSONPath
// Each returned JSONNode contains both the value and its exact path in the data structure
func Get(data interface{}, path string) ([]JSONNode, error) {
steps, err := ParseJSONPath(path)
if err != nil {
return nil, fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
}
results := []JSONNode{}
err = traverseWithPaths(data, steps, &results, "$")
if err != nil {
return nil, fmt.Errorf("failed to traverse JSONPath %q: %w", path, err)
}
return results, nil
}
// Set updates the value at the specified JSONPath in the original data structure.
// It only modifies the first matching node.
func Set(data interface{}, path string, value interface{}) error {
steps, err := ParseJSONPath(path)
if err != nil {
return fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
}
if len(steps) <= 1 {
return fmt.Errorf("cannot set root node; the provided path %q is invalid", path)
}
success := false
err = setWithPath(data, steps, &success, value, "$", ModifyFirstMode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", path, err)
}
return nil
}
// SetAll updates all matching values at the specified JSONPath.
func SetAll(data interface{}, path string, value interface{}) error {
steps, err := ParseJSONPath(path)
if err != nil {
return fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
}
if len(steps) <= 1 {
return fmt.Errorf("cannot set root node; the provided path %q is invalid", path)
}
success := false
err = setWithPath(data, steps, &success, value, "$", ModifyAllMode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", path, err)
}
return nil
}
// setWithPath modifies values while tracking paths
func setWithPath(node interface{}, steps []JSONStep, success *bool, value interface{}, currentPath string, mode TraversalMode) error {
if node == nil || *success && mode == ModifyFirstMode {
return nil
}
// Skip root step
actualSteps := steps
if len(steps) > 0 && steps[0].Type == RootStep {
if len(steps) == 1 {
return fmt.Errorf("cannot set root node; the provided path %q is invalid", currentPath)
}
actualSteps = steps[1:]
}
// Process the first step
if len(actualSteps) == 0 {
return fmt.Errorf("cannot set root node; no steps provided for path %q", currentPath)
}
step := actualSteps[0]
remainingSteps := actualSteps[1:]
isLastStep := len(remainingSteps) == 0
switch step.Type {
case ChildStep:
m, ok := node.(map[string]interface{})
if !ok {
return fmt.Errorf("node at path %q is not a map; actual type: %T", currentPath, node)
}
childPath := currentPath + "." + step.Key
if isLastStep {
// We've reached the target, set the value
m[step.Key] = value
*success = true
return nil
}
// Create intermediate nodes if necessary
child, exists := m[step.Key]
if !exists {
// Create missing intermediate node
if len(remainingSteps) > 0 && remainingSteps[0].Type == IndexStep {
child = []interface{}{}
} else {
child = map[string]interface{}{}
}
m[step.Key] = child
}
err := setWithPath(child, remainingSteps, success, value, childPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
}
case IndexStep:
arr, ok := node.([]interface{})
if !ok {
return fmt.Errorf("node at path %q is not an array; actual type: %T", currentPath, node)
}
// Handle wildcard index
if step.Index == -1 {
for i, item := range arr {
itemPath := fmt.Sprintf("%s[%d]", currentPath, i)
if isLastStep {
arr[i] = value
*success = true
if mode == ModifyFirstMode {
return nil
}
} else {
err := setWithPath(item, remainingSteps, success, value, itemPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", itemPath, err)
}
if *success && mode == ModifyFirstMode {
return nil
}
}
}
return nil
}
// Handle specific index
if step.Index >= 0 && step.Index < len(arr) {
item := arr[step.Index]
itemPath := fmt.Sprintf("%s[%d]", currentPath, step.Index)
if isLastStep {
arr[step.Index] = value
*success = true
} else {
err := setWithPath(item, remainingSteps, success, value, itemPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", itemPath, err)
}
}
}
case RecursiveDescentStep:
// For recursive descent, first check direct match at this level
if m, ok := node.(map[string]interface{}); ok && step.Key != "*" {
if val, exists := m[step.Key]; exists {
directPath := currentPath + "." + step.Key
if isLastStep {
m[step.Key] = value
*success = true
if mode == ModifyFirstMode {
return nil
}
} else {
err := setWithPath(val, remainingSteps, success, value, directPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", directPath, err)
}
if *success && mode == ModifyFirstMode {
return nil
}
}
}
}
// Then continue recursion to all children
switch n := node.(type) {
case map[string]interface{}:
for k, v := range n {
childPath := currentPath + "." + k
// Skip keys we've already processed directly
if step.Key != "*" && k == step.Key {
continue
}
err := setWithPath(v, steps, success, value, childPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
}
if *success && mode == ModifyFirstMode {
return nil
}
}
case []interface{}:
for i, v := range n {
childPath := fmt.Sprintf("%s[%d]", currentPath, i)
err := setWithPath(v, steps, success, value, childPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
}
if *success && mode == ModifyFirstMode {
return nil
}
}
}
case WildcardStep:
m, ok := node.(map[string]interface{})
if !ok {
return fmt.Errorf("node at path %q is not a map; actual type: %T", currentPath, node)
}
for k, v := range m {
childPath := currentPath + "." + k
if isLastStep {
m[k] = value
*success = true
if mode == ModifyFirstMode {
return nil
}
} else {
err := setWithPath(v, remainingSteps, success, value, childPath, mode)
if err != nil {
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
}
if *success && mode == ModifyFirstMode {
return nil
}
}
}
}
return nil
}
// traverseWithPaths tracks both nodes and their paths during traversal
func traverseWithPaths(node interface{}, steps []JSONStep, results *[]JSONNode, currentPath string) error {
if len(steps) == 0 || node == nil {
return fmt.Errorf("cannot traverse with empty steps or nil node; steps length: %d, node: %v", len(steps), node)
}
// Skip root step
actualSteps := steps
if steps[0].Type == RootStep {
if len(steps) == 1 {
*results = append(*results, JSONNode{Value: node, Path: currentPath})
return nil
}
actualSteps = steps[1:]
}
// Process the first step
step := actualSteps[0]
remainingSteps := actualSteps[1:]
isLastStep := len(remainingSteps) == 0
switch step.Type {
case ChildStep:
m, ok := node.(map[string]interface{})
if !ok {
return fmt.Errorf("node is not a map; actual type: %T", node)
}
child, exists := m[step.Key]
if !exists {
return fmt.Errorf("key not found: %s in node at path: %s", step.Key, currentPath)
}
childPath := currentPath + "." + step.Key
if isLastStep {
*results = append(*results, JSONNode{Value: child, Path: childPath})
} else {
err := traverseWithPaths(child, remainingSteps, results, childPath)
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
}
}
case IndexStep:
arr, ok := node.([]interface{})
if !ok {
return fmt.Errorf("node is not an array; actual type: %T", node)
}
// Handle wildcard index
if step.Index == -1 {
for i, item := range arr {
itemPath := fmt.Sprintf("%s[%d]", currentPath, i)
if isLastStep {
*results = append(*results, JSONNode{Value: item, Path: itemPath})
} else {
err := traverseWithPaths(item, remainingSteps, results, itemPath)
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", itemPath, err)
}
}
}
return nil
}
// Handle specific index
if step.Index >= 0 && step.Index < len(arr) {
item := arr[step.Index]
itemPath := fmt.Sprintf("%s[%d]", currentPath, step.Index)
if isLastStep {
*results = append(*results, JSONNode{Value: item, Path: itemPath})
} else {
err := traverseWithPaths(item, remainingSteps, results, itemPath)
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", itemPath, err)
}
}
} else {
return fmt.Errorf("index %d out of bounds for array at path: %s", step.Index, currentPath)
}
case RecursiveDescentStep:
// For recursive descent, first check direct match at this level
if m, ok := node.(map[string]interface{}); ok && step.Key != "*" {
if val, exists := m[step.Key]; exists {
directPath := currentPath + "." + step.Key
if isLastStep {
*results = append(*results, JSONNode{Value: val, Path: directPath})
} else {
err := traverseWithPaths(val, remainingSteps, results, directPath)
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", directPath, err)
}
}
}
}
// For wildcard, collect this node
if step.Key == "*" && isLastStep {
*results = append(*results, JSONNode{Value: node, Path: currentPath})
}
// Then continue recursion to all children
switch n := node.(type) {
case map[string]interface{}:
for k, v := range n {
childPath := currentPath + "." + k
err := traverseWithPaths(v, steps, results, childPath) // Use the same steps
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
}
}
case []interface{}:
for i, v := range n {
childPath := fmt.Sprintf("%s[%d]", currentPath, i)
err := traverseWithPaths(v, steps, results, childPath) // Use the same steps
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
}
}
}
case WildcardStep:
m, ok := node.(map[string]interface{})
if !ok {
return fmt.Errorf("node is not a map; actual type: %T", node)
}
for k, v := range m {
childPath := currentPath + "." + k
if isLastStep {
*results = append(*results, JSONNode{Value: v, Path: childPath})
} else {
err := traverseWithPaths(v, remainingSteps, results, childPath)
if err != nil {
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
}
}
}
}
return nil
}

View File

@@ -0,0 +1,577 @@
package jsonpath
import (
"reflect"
"testing"
)
func TestGetWithPathsBasic(t *testing.T) {
tests := []struct {
name string
data map[string]interface{}
path string
expected []JSONNode
error bool
}{
{
name: "simple property",
data: map[string]interface{}{
"name": "John",
"age": 30,
},
path: "$.name",
expected: []JSONNode{
{Value: "John", Path: "$.name"},
},
},
{
name: "nested property",
data: map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
"age": 30,
},
},
path: "$.user.name",
expected: []JSONNode{
{Value: "John", Path: "$.user.name"},
},
},
{
name: "array access",
data: map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"name": "John", "age": 30},
map[string]interface{}{"name": "Jane", "age": 25},
},
},
path: "$.users[1].name",
expected: []JSONNode{
{Value: "Jane", Path: "$.users[1].name"},
},
},
{
name: "wildcard",
data: map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"name": "John", "age": 30},
map[string]interface{}{"name": "Jane", "age": 25},
},
},
path: "$.users[*].name",
expected: []JSONNode{
{Value: "John", Path: "$.users[0].name"},
{Value: "Jane", Path: "$.users[1].name"},
},
},
{
name: "recursive descent",
data: map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
"profile": map[string]interface{}{
"email": "john@example.com",
},
},
"admin": map[string]interface{}{
"email": "admin@example.com",
},
},
path: "$..email",
expected: []JSONNode{
{Value: "john@example.com", Path: "$.user.profile.email"},
{Value: "admin@example.com", Path: "$.admin.email"},
},
},
{
name: "nonexistent path",
data: map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
},
},
path: "$.user.email",
expected: []JSONNode{},
error: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := Get(tt.data, tt.path)
if err != nil {
if !tt.error {
t.Errorf("GetWithPaths() returned error: %v", err)
}
return
}
// For nonexistent path, we expect empty slice
if tt.name == "nonexistent path" {
if len(result) > 0 {
t.Errorf("GetWithPaths() returned %v, expected empty result", result)
}
return
}
// Check if lengths match
if len(result) != len(tt.expected) {
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
return
}
// For wildcard results, we need to check containment rather than exact order
if tt.name == "wildcard" || tt.name == "recursive descent" {
// For each expected item, check if it exists in the results by both value and path
for _, expected := range tt.expected {
found := false
for _, r := range result {
if reflect.DeepEqual(r.Value, expected.Value) && r.Path == expected.Path {
found = true
break
}
}
if !found {
t.Errorf("GetWithPaths() missing expected value: %v with path: %s", expected.Value, expected.Path)
}
}
} else {
// Otherwise check exact equality of both values and paths
for i, expected := range tt.expected {
if !reflect.DeepEqual(result[i].Value, expected.Value) {
t.Errorf("GetWithPaths() value at [%d] = %v, expected %v", i, result[i].Value, expected.Value)
}
if result[i].Path != expected.Path {
t.Errorf("GetWithPaths() path at [%d] = %s, expected %s", i, result[i].Path, expected.Path)
}
}
}
})
}
}
func TestSet(t *testing.T) {
t.Run("simple property", func(t *testing.T) {
data := map[string]interface{}{
"name": "John",
"age": 30,
}
err := Set(data, "$.name", "Jane")
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
if data["name"] != "Jane" {
t.Errorf("Set() failed: expected name to be 'Jane', got %v", data["name"])
}
})
t.Run("nested property", func(t *testing.T) {
data := map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
"age": 30,
},
}
err := Set(data, "$.user.name", "Jane")
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
user, ok := data["user"].(map[string]interface{})
if !ok {
t.Fatalf("User is not a map")
}
if user["name"] != "Jane" {
t.Errorf("Set() failed: expected user.name to be 'Jane', got %v", user["name"])
}
})
t.Run("array element", func(t *testing.T) {
data := map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"name": "John", "age": 30},
map[string]interface{}{"name": "Jane", "age": 25},
},
}
err := Set(data, "$.users[0].name", "Bob")
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
users, ok := data["users"].([]interface{})
if !ok {
t.Fatalf("Users is not a slice")
}
user0, ok := users[0].(map[string]interface{})
if !ok {
t.Fatalf("User is not a map")
}
if user0["name"] != "Bob" {
t.Errorf("Set() failed: expected users[0].name to be 'Bob', got %v", user0["name"])
}
})
t.Run("complex value", func(t *testing.T) {
data := map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
"profile": map[string]interface{}{
"email": "john@example.com",
},
},
}
newProfile := map[string]interface{}{
"email": "john.doe@example.com",
"phone": "123-456-7890",
}
err := Set(data, "$.user.profile", newProfile)
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
userMap, ok := data["user"].(map[string]interface{})
if !ok {
t.Fatalf("User is not a map")
}
profile, ok := userMap["profile"].(map[string]interface{})
if !ok {
t.Fatalf("Profile is not a map")
}
if profile["email"] != "john.doe@example.com" || profile["phone"] != "123-456-7890" {
t.Errorf("Set() failed: expected profile to be updated with new values")
}
})
t.Run("create new property", func(t *testing.T) {
data := map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
},
}
err := Set(data, "$.user.email", "john@example.com")
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
userMap, ok := data["user"].(map[string]interface{})
if !ok {
t.Fatalf("User is not a map")
}
if email, exists := userMap["email"]; !exists || email != "john@example.com" {
t.Errorf("Set() failed: expected user.email to be 'john@example.com', got %v", userMap["email"])
}
})
t.Run("create nested properties", func(t *testing.T) {
data := map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
},
}
err := Set(data, "$.user.contact.email", "john@example.com")
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
userMap, ok := data["user"].(map[string]interface{})
if !ok {
t.Fatalf("User is not a map")
}
contact, ok := userMap["contact"].(map[string]interface{})
if !ok {
t.Fatalf("Contact is not a map")
}
if email, exists := contact["email"]; !exists || email != "john@example.com" {
t.Errorf("Set() failed: expected user.contact.email to be 'john@example.com', got %v", contact["email"])
}
})
t.Run("create array and element", func(t *testing.T) {
data := map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
},
}
// This should create an empty addresses array, but won't be able to set index 0
// since the array is empty
err := Set(data, "$.user.addresses[0].street", "123 Main St")
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
})
t.Run("multiple targets (should only update first)", func(t *testing.T) {
data := map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"active": true},
map[string]interface{}{"active": true},
},
}
err := Set(data, "$.users[*].active", false)
if err != nil {
t.Errorf("Set() returned error: %v", err)
return
}
users, ok := data["users"].([]interface{})
if !ok {
t.Fatalf("Users is not a slice")
}
user0, ok := users[0].(map[string]interface{})
if !ok {
t.Fatalf("User0 is not a map")
}
user1, ok := users[1].(map[string]interface{})
if !ok {
t.Fatalf("User1 is not a map")
}
// Only the first one should be changed
if active, exists := user0["active"]; !exists || active != false {
t.Errorf("Set() failed: expected users[0].active to be false, got %v", user0["active"])
}
// The second one should remain unchanged
if active, exists := user1["active"]; !exists || active != true {
t.Errorf("Set() incorrectly modified users[1].active: expected true, got %v", user1["active"])
}
})
t.Run("setting on root should fail", func(t *testing.T) {
data := map[string]interface{}{
"name": "John",
}
err := Set(data, "$", "Jane")
if err == nil {
t.Errorf("Set() returned no error, expected error for setting on root")
return
}
// Data should be unchanged
if data["name"] != "John" {
t.Errorf("Data was modified when setting on root")
}
})
}
func TestSetAll(t *testing.T) {
t.Run("simple property", func(t *testing.T) {
data := map[string]interface{}{
"name": "John",
"age": 30,
}
err := SetAll(data, "$.name", "Jane")
if err != nil {
t.Errorf("SetAll() returned error: %v", err)
return
}
if data["name"] != "Jane" {
t.Errorf("SetAll() failed: expected name to be 'Jane', got %v", data["name"])
}
})
t.Run("all array elements", func(t *testing.T) {
data := map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"active": true},
map[string]interface{}{"active": true},
},
}
err := SetAll(data, "$.users[*].active", false)
if err != nil {
t.Errorf("SetAll() returned error: %v", err)
return
}
users, ok := data["users"].([]interface{})
if !ok {
t.Fatalf("Users is not a slice")
}
// Both elements should be updated
for i, user := range users {
userMap, ok := user.(map[string]interface{})
if !ok {
t.Fatalf("User%d is not a map", i)
}
if active, exists := userMap["active"]; !exists || active != false {
t.Errorf("SetAll() failed: expected users[%d].active to be false, got %v", i, userMap["active"])
}
}
})
t.Run("recursive descent", func(t *testing.T) {
data := map[string]interface{}{
"user": map[string]interface{}{
"profile": map[string]interface{}{
"active": true,
},
},
"admin": map[string]interface{}{
"profile": map[string]interface{}{
"active": true,
},
},
}
err := SetAll(data, "$..active", false)
if err != nil {
t.Errorf("SetAll() returned error: %v", err)
return
}
// Check user profile
userProfile, ok := data["user"].(map[string]interface{})["profile"].(map[string]interface{})
if !ok {
t.Fatalf("Failed to access user.profile")
}
if active, exists := userProfile["active"]; !exists || active != false {
t.Errorf("SetAll() didn't update user.profile.active, got: %v", active)
}
// Check admin profile
adminProfile, ok := data["admin"].(map[string]interface{})["profile"].(map[string]interface{})
if !ok {
t.Fatalf("Failed to access admin.profile")
}
if active, exists := adminProfile["active"]; !exists || active != false {
t.Errorf("SetAll() didn't update admin.profile.active, got: %v", active)
}
})
}
func TestGetWithPathsExtended(t *testing.T) {
tests := []struct {
name string
data map[string]interface{}
path string
expected []JSONNode
}{
{
name: "simple property",
data: map[string]interface{}{
"name": "John",
"age": 30,
},
path: "$.name",
expected: []JSONNode{
{Value: "John", Path: "$.name"},
},
},
{
name: "nested property",
data: map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
"age": 30,
},
},
path: "$.user.name",
expected: []JSONNode{
{Value: "John", Path: "$.user.name"},
},
},
{
name: "array access",
data: map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"name": "John", "age": 30},
map[string]interface{}{"name": "Jane", "age": 25},
},
},
path: "$.users[1].name",
expected: []JSONNode{
{Value: "Jane", Path: "$.users[1].name"},
},
},
{
name: "wildcard",
data: map[string]interface{}{
"users": []interface{}{
map[string]interface{}{"name": "John", "age": 30},
map[string]interface{}{"name": "Jane", "age": 25},
},
},
path: "$.users[*].name",
expected: []JSONNode{
{Value: "John", Path: "$.users[0].name"},
{Value: "Jane", Path: "$.users[1].name"},
},
},
{
name: "recursive descent",
data: map[string]interface{}{
"user": map[string]interface{}{
"name": "John",
"profile": map[string]interface{}{
"email": "john@example.com",
},
},
"admin": map[string]interface{}{
"email": "admin@example.com",
},
},
path: "$..email",
expected: []JSONNode{
{Value: "john@example.com", Path: "$.user.profile.email"},
{Value: "admin@example.com", Path: "$.admin.email"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := Get(tt.data, tt.path)
if err != nil {
t.Errorf("GetWithPaths() returned error: %v", err)
return
}
// Check if lengths match
if len(result) != len(tt.expected) {
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
return
}
// For each expected item, find its match in the results and verify both value and path
for _, expected := range tt.expected {
found := false
for _, r := range result {
// Check if value matches
if reflect.DeepEqual(r.Value, expected.Value) {
found = true
// Check if path matches
if r.Path != expected.Path {
t.Errorf("Path mismatch for value %v: got %s, expected %s", r.Value, r.Path, expected.Path)
}
break
}
}
if !found {
t.Errorf("Expected node with value %v and path %s not found in results", expected.Value, expected.Path)
}
}
})
}
}

View File

@@ -0,0 +1,318 @@
package jsonpath
import (
"reflect"
"testing"
)
var testData = map[string]interface{}{
"store": map[string]interface{}{
"book": []interface{}{
map[string]interface{}{
"title": "The Fellowship of the Ring",
"price": 22.99,
},
map[string]interface{}{
"title": "The Two Towers",
"price": 23.45,
},
},
"bicycle": map[string]interface{}{
"color": "red",
"price": 199.95,
},
},
}
func TestParser(t *testing.T) {
tests := []struct {
path string
steps []JSONStep
wantErr bool
}{
{
path: "$.store.bicycle.color",
steps: []JSONStep{
{Type: RootStep},
{Type: ChildStep, Key: "store"},
{Type: ChildStep, Key: "bicycle"},
{Type: ChildStep, Key: "color"},
},
},
{
path: "$..price",
steps: []JSONStep{
{Type: RootStep},
{Type: RecursiveDescentStep, Key: "price"},
},
},
{
path: "$.store.book[*].title",
steps: []JSONStep{
{Type: RootStep},
{Type: ChildStep, Key: "store"},
{Type: ChildStep, Key: "book"},
{Type: IndexStep, Index: -1}, // Wildcard
{Type: ChildStep, Key: "title"},
},
},
{
path: "$.store.book[0]",
steps: []JSONStep{
{Type: RootStep},
{Type: ChildStep, Key: "store"},
{Type: ChildStep, Key: "book"},
{Type: IndexStep, Index: 0},
},
},
{
path: "invalid.path",
wantErr: true,
},
{
path: "$.store.book[abc]",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
steps, err := ParseJSONPath(tt.path)
if (err != nil) != tt.wantErr {
t.Fatalf("ParseJSONPath() error = %v, wantErr %v", err, tt.wantErr)
}
if !tt.wantErr && !reflect.DeepEqual(steps, tt.steps) {
t.Errorf("ParseJSONPath() steps = %+v, want %+v", steps, tt.steps)
}
})
}
}
func TestEvaluator(t *testing.T) {
tests := []struct {
name string
path string
expected []JSONNode
error bool
}{
{
name: "simple_property_access",
path: "$.store.bicycle.color",
expected: []JSONNode{
{Value: "red", Path: "$.store.bicycle.color"},
},
},
{
name: "array_index_access",
path: "$.store.book[0].title",
expected: []JSONNode{
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
},
},
{
name: "wildcard_array_access",
path: "$.store.book[*].title",
expected: []JSONNode{
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
{Value: "The Two Towers", Path: "$.store.book[1].title"},
},
},
{
name: "recursive_price_search",
path: "$..price",
expected: []JSONNode{
{Value: 22.99, Path: "$.store.book[0].price"},
{Value: 23.45, Path: "$.store.book[1].price"},
{Value: 199.95, Path: "$.store.bicycle.price"},
},
},
{
name: "wildcard_recursive",
path: "$..*",
expected: []JSONNode{
// These will be compared by value only, paths will be validated separately
{Value: testData["store"].(map[string]interface{})["book"]},
{Value: testData["store"].(map[string]interface{})["bicycle"]},
{Value: testData["store"].(map[string]interface{})["book"].([]interface{})[0]},
{Value: testData["store"].(map[string]interface{})["book"].([]interface{})[1]},
{Value: "The Fellowship of the Ring"},
{Value: 22.99},
{Value: "The Two Towers"},
{Value: 23.45},
{Value: "red"},
{Value: 199.95},
},
},
{
name: "invalid_index",
path: "$.store.book[5]",
expected: []JSONNode{},
error: true,
},
{
name: "nonexistent_property",
path: "$.store.nonexistent",
expected: []JSONNode{},
error: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Use GetWithPaths directly
result, err := Get(testData, tt.path)
if err != nil {
if !tt.error {
t.Errorf("Get() returned error: %v", err)
}
return
}
// Special handling for wildcard recursive test
if tt.name == "wildcard_recursive" {
// Skip length check for wildcard recursive since it might vary
// Just verify that each expected item is in the results
// Validate values match and paths are filled in
for _, e := range tt.expected {
found := false
for _, r := range result {
if reflect.DeepEqual(r.Value, e.Value) {
found = true
break
}
}
if !found {
t.Errorf("Expected value %v not found in results", e.Value)
}
}
return
}
if len(result) != len(tt.expected) {
t.Errorf("Expected %d items, got %d", len(tt.expected), len(result))
}
// Validate both values and paths
for i, e := range tt.expected {
if i < len(result) {
if !reflect.DeepEqual(result[i].Value, e.Value) {
t.Errorf("Value at [%d]: got %v, expected %v", i, result[i].Value, e.Value)
}
if result[i].Path != e.Path {
t.Errorf("Path at [%d]: got %s, expected %s", i, result[i].Path, e.Path)
}
}
}
})
}
}
func TestEdgeCases(t *testing.T) {
t.Run("empty_data", func(t *testing.T) {
result, err := Get(nil, "$.a.b")
if err == nil {
t.Errorf("Expected error for empty data")
return
}
if len(result) > 0 {
t.Errorf("Expected empty result, got %v", result)
}
})
t.Run("empty_path", func(t *testing.T) {
_, err := ParseJSONPath("")
if err == nil {
t.Error("Expected error for empty path")
}
})
t.Run("numeric_keys", func(t *testing.T) {
data := map[string]interface{}{
"42": "answer",
}
result, err := Get(data, "$.42")
if err != nil {
t.Errorf("Get() returned error: %v", err)
return
}
if len(result) == 0 || result[0].Value != "answer" {
t.Errorf("Expected 'answer', got %v", result)
}
})
}
func TestGetWithPaths(t *testing.T) {
tests := []struct {
name string
path string
expected []JSONNode
}{
{
name: "simple_property_access",
path: "$.store.bicycle.color",
expected: []JSONNode{
{Value: "red", Path: "$.store.bicycle.color"},
},
},
{
name: "array_index_access",
path: "$.store.book[0].title",
expected: []JSONNode{
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
},
},
{
name: "wildcard_array_access",
path: "$.store.book[*].title",
expected: []JSONNode{
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
{Value: "The Two Towers", Path: "$.store.book[1].title"},
},
},
{
name: "recursive_price_search",
path: "$..price",
expected: []JSONNode{
{Value: 22.99, Path: "$.store.book[0].price"},
{Value: 23.45, Path: "$.store.book[1].price"},
{Value: 199.95, Path: "$.store.bicycle.price"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := Get(testData, tt.path)
if err != nil {
t.Errorf("Get() returned error: %v", err)
return
}
// Check if lengths match
if len(result) != len(tt.expected) {
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
return
}
// For each expected item, find its match in the results and verify both value and path
for _, expected := range tt.expected {
found := false
for _, r := range result {
// First verify the value matches
if reflect.DeepEqual(r.Value, expected.Value) {
found = true
// Then verify the path matches
if r.Path != expected.Path {
t.Errorf("Path mismatch for value %v: got %s, expected %s", r.Value, r.Path, expected.Path)
}
break
}
}
if !found {
t.Errorf("Expected node with value %v and path %s not found in results", expected.Value, expected.Path)
}
}
})
}
}

View File

@@ -2,210 +2,128 @@ package processor
import ( import (
"fmt" "fmt"
"io" "reflect"
"net/http"
"regexp"
"strings" "strings"
"cook/utils"
logger "git.site.quack-lab.dev/dave/cylogger"
lua "github.com/yuin/gopher-lua" lua "github.com/yuin/gopher-lua"
) )
// processorLogger is a scoped logger for the processor package. // Processor defines the interface for all file processors
var processorLogger = logger.Default.WithPrefix("processor") type Processor interface {
// Process handles processing a file with the given pattern and Lua expression
Process(filename string, pattern string, luaExpr string) (int, int, error)
// Maybe we make this an interface again for the shits and giggles // ProcessContent handles processing a string content directly with the given pattern and Lua expression
// We will see, it could easily be... // Returns the modified content, modification count, match count, and any error
ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error)
var globalVariables = map[string]interface{}{} // ToLua converts processor-specific data to Lua variables
ToLua(L *lua.LState, data interface{}) error
func SetVariables(vars map[string]interface{}) { // FromLua retrieves modified data from Lua
for k, v := range vars { FromLua(L *lua.LState) (interface{}, error)
globalVariables[k] = v }
}
// ModificationRecord tracks a single value modification
type ModificationRecord struct {
File string
OldValue string
NewValue string
Operation string
Context string
} }
func NewLuaState() (*lua.LState, error) { func NewLuaState() (*lua.LState, error) {
newLStateLogger := processorLogger.WithPrefix("NewLuaState")
newLStateLogger.Debug("Creating new Lua state")
L := lua.NewState() L := lua.NewState()
// defer L.Close() // defer L.Close()
// Load math library // Load math library
newLStateLogger.Debug("Loading Lua math library")
L.Push(L.GetGlobal("require")) L.Push(L.GetGlobal("require"))
L.Push(lua.LString("math")) L.Push(lua.LString("math"))
if err := L.PCall(1, 1, nil); err != nil { if err := L.PCall(1, 1, nil); err != nil {
newLStateLogger.Error("Failed to load Lua math library: %v", err)
return nil, fmt.Errorf("error loading Lua math library: %v", err) return nil, fmt.Errorf("error loading Lua math library: %v", err)
} }
newLStateLogger.Debug("Lua math library loaded")
// Initialize helper functions // Initialize helper functions
newLStateLogger.Debug("Initializing Lua helper functions")
if err := InitLuaHelpers(L); err != nil { if err := InitLuaHelpers(L); err != nil {
newLStateLogger.Error("Failed to initialize Lua helper functions: %v", err)
return nil, err return nil, err
} }
newLStateLogger.Debug("Lua helper functions initialized")
// Inject global variables
if len(globalVariables) > 0 {
newLStateLogger.Debug("Injecting %d global variables into Lua state", len(globalVariables))
for k, v := range globalVariables {
switch val := v.(type) {
case int:
L.SetGlobal(k, lua.LNumber(float64(val)))
case int64:
L.SetGlobal(k, lua.LNumber(float64(val)))
case float32:
L.SetGlobal(k, lua.LNumber(float64(val)))
case float64:
L.SetGlobal(k, lua.LNumber(val))
case string:
L.SetGlobal(k, lua.LString(val))
case bool:
if val {
L.SetGlobal(k, lua.LTrue)
} else {
L.SetGlobal(k, lua.LFalse)
}
default:
// Fallback to string representation
L.SetGlobal(k, lua.LString(fmt.Sprintf("%v", val)))
}
}
}
newLStateLogger.Debug("New Lua state created successfully")
return L, nil return L, nil
} }
// FromLua converts a Lua table to a struct or map recursively // ToLuaTable converts a struct or map to a Lua table recursively
func FromLua(L *lua.LState, luaValue lua.LValue) (interface{}, error) { func ToLuaTable(L *lua.LState, data interface{}) (*lua.LTable, error) {
fromLuaLogger := processorLogger.WithPrefix("FromLua").WithField("luaType", luaValue.Type().String()) luaTable := L.NewTable()
fromLuaLogger.Debug("Converting Lua value to Go interface")
switch v := luaValue.(type) { switch v := data.(type) {
case *lua.LTable: case map[string]interface{}:
fromLuaLogger.Debug("Processing Lua table") for key, value := range v {
isArray, err := IsLuaTableArray(L, v) luaValue, err := ToLuaTable(L, value)
if err != nil { if err != nil {
fromLuaLogger.Error("Failed to determine if Lua table is array: %v", err) return nil, err
return nil, err }
luaTable.RawSetString(key, luaValue)
} }
fromLuaLogger.Debug("Lua table is array: %t", isArray) case struct{}:
if isArray { val := reflect.ValueOf(v)
fromLuaLogger.Debug("Converting Lua table to Go array") for i := 0; i < val.NumField(); i++ {
result := make([]interface{}, 0) field := val.Type().Field(i)
v.ForEach(func(key lua.LValue, value lua.LValue) { luaValue, err := ToLuaTable(L, val.Field(i).Interface())
converted, _ := FromLua(L, value) if err != nil {
result = append(result, converted) return nil, err
}) }
fromLuaLogger.Trace("Converted Go array: %v", result) luaTable.RawSetString(field.Name, luaValue)
return result, nil
} else {
fromLuaLogger.Debug("Converting Lua table to Go map")
result := make(map[string]interface{})
v.ForEach(func(key lua.LValue, value lua.LValue) {
converted, _ := FromLua(L, value)
result[key.String()] = converted
})
fromLuaLogger.Trace("Converted Go map: %v", result)
return result, nil
} }
case lua.LString: case string:
fromLuaLogger.Debug("Converting Lua string to Go string") luaTable.RawSetString("v", lua.LString(v))
fromLuaLogger.Trace("Lua string: %q", string(v)) case bool:
return string(v), nil luaTable.RawSetString("v", lua.LBool(v))
case lua.LBool: case float64:
fromLuaLogger.Debug("Converting Lua boolean to Go boolean") luaTable.RawSetString("v", lua.LNumber(v))
fromLuaLogger.Trace("Lua boolean: %t", bool(v))
return bool(v), nil
case lua.LNumber:
fromLuaLogger.Debug("Converting Lua number to Go float64")
fromLuaLogger.Trace("Lua number: %f", float64(v))
return float64(v), nil
default: default:
fromLuaLogger.Debug("Unsupported Lua type, returning nil") return nil, fmt.Errorf("unsupported data type: %T", data)
return nil, nil
} }
return luaTable, nil
} }
func IsLuaTableArray(L *lua.LState, v *lua.LTable) (bool, error) { // FromLuaTable converts a Lua table to a struct or map recursively
isLuaTableArrayLogger := processorLogger.WithPrefix("IsLuaTableArray") func FromLuaTable(L *lua.LState, luaTable *lua.LTable) (map[string]interface{}, error) {
isLuaTableArrayLogger.Debug("Checking if Lua table is an array") result := make(map[string]interface{})
isLuaTableArrayLogger.Trace("Lua table input: %v", v)
L.SetGlobal("table_to_check", v)
// Use our predefined helper function from InitLuaHelpers luaTable.ForEach(func(key lua.LValue, value lua.LValue) {
err := L.DoString(`is_array = isArray(table_to_check)`) switch v := value.(type) {
if err != nil { case *lua.LTable:
isLuaTableArrayLogger.Error("Error determining if table is an array: %v", err) nestedMap, err := FromLuaTable(L, v)
return false, fmt.Errorf("error determining if table is array: %w", err) if err != nil {
} return
}
result[key.String()] = nestedMap
case lua.LString:
result[key.String()] = string(v)
case lua.LBool:
result[key.String()] = bool(v)
case lua.LNumber:
result[key.String()] = float64(v)
default:
result[key.String()] = nil
}
})
// Check the result of our Lua function
isArray := L.GetGlobal("is_array")
// LVIsFalse returns true if a given LValue is a nil or false otherwise false.
result := !lua.LVIsFalse(isArray)
isLuaTableArrayLogger.Debug("Lua table is array: %t", result)
isLuaTableArrayLogger.Trace("isArray result Lua value: %v", isArray)
return result, nil return result, nil
} }
// InitLuaHelpers initializes common Lua helper functions // InitLuaHelpers initializes common Lua helper functions
func InitLuaHelpers(L *lua.LState) error { func InitLuaHelpers(L *lua.LState) error {
initLuaHelpersLogger := processorLogger.WithPrefix("InitLuaHelpers")
initLuaHelpersLogger.Debug("Loading Lua helper functions")
helperScript := ` helperScript := `
-- Custom Lua helpers for math operations -- Custom Lua helpers for math operations
function min(a, b) return math.min(a, b) end function min(a, b) return math.min(a, b) end
function max(a, b) return math.max(a, b) end function max(a, b) return math.max(a, b) end
function round(x, n) function round(x) return math.floor(x + 0.5) end
if n == nil then n = 0 end
return math.floor(x * 10^n + 0.5) / 10^n
end
function floor(x) return math.floor(x) end function floor(x) return math.floor(x) end
function ceil(x) return math.ceil(x) end function ceil(x) return math.ceil(x) end
function upper(s) return string.upper(s) end function upper(s) return string.upper(s) end
function lower(s) return string.lower(s) end function lower(s) return string.lower(s) end
function format(s, ...) return string.format(s, ...) end
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
-- String split helper
function strsplit(inputstr, sep)
if sep == nil then
sep = "%s"
end
local t = {}
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
table.insert(t, str)
end
return t
end
---@param table table
---@param depth number?
function DumpTable(table, depth)
if depth == nil then
depth = 0
end
if (depth > 200) then
print("Error: Depth > 200 in dumpTable()")
return
end
for k, v in pairs(table) do
if (type(v) == "table") then
print(string.rep(" ", depth) .. k .. ":")
DumpTable(v, depth + 1)
else
print(string.rep(" ", depth) .. k .. ": ", v)
end
end
end
-- String to number conversion helper -- String to number conversion helper
function num(str) function num(str)
@@ -221,39 +139,26 @@ end
function is_number(str) function is_number(str)
return tonumber(str) ~= nil return tonumber(str) ~= nil
end end
function isArray(t)
if type(t) ~= "table" then return false end
local max = 0
local count = 0
for k, _ in pairs(t) do
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then
return false
end
max = math.max(max, k)
count = count + 1
end
return max == count
end
modified = false
` `
if err := L.DoString(helperScript); err != nil { if err := L.DoString(helperScript); err != nil {
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
return fmt.Errorf("error loading helper functions: %v", err) return fmt.Errorf("error loading helper functions: %v", err)
} }
initLuaHelpersLogger.Debug("Lua helper functions loaded")
initLuaHelpersLogger.Debug("Setting up Lua print function to Go")
L.SetGlobal("print", L.NewFunction(printToGo))
L.SetGlobal("fetch", L.NewFunction(fetch))
initLuaHelpersLogger.Debug("Lua print and fetch functions bound to Go")
return nil return nil
} }
func PrependLuaAssignment(luaExpr string) string { // Helper utility functions
prependLuaAssignmentLogger := processorLogger.WithPrefix("PrependLuaAssignment").WithField("originalLuaExpr", luaExpr)
prependLuaAssignmentLogger.Debug("Prepending Lua assignment if necessary") // LimitString truncates a string to maxLen and adds "..." if truncated
func LimitString(s string, maxLen int) string {
s = strings.ReplaceAll(s, "\n", "\\n")
if len(s) <= maxLen {
return s
}
return s[:maxLen-3] + "..."
}
// BuildLuaScript prepares a Lua expression from shorthand notation
func BuildLuaScript(luaExpr string) string {
// Auto-prepend v1 for expressions starting with operators // Auto-prepend v1 for expressions starting with operators
if strings.HasPrefix(luaExpr, "*") || if strings.HasPrefix(luaExpr, "*") ||
strings.HasPrefix(luaExpr, "/") || strings.HasPrefix(luaExpr, "/") ||
@@ -262,202 +167,31 @@ func PrependLuaAssignment(luaExpr string) string {
strings.HasPrefix(luaExpr, "^") || strings.HasPrefix(luaExpr, "^") ||
strings.HasPrefix(luaExpr, "%") { strings.HasPrefix(luaExpr, "%") {
luaExpr = "v1 = v1" + luaExpr luaExpr = "v1 = v1" + luaExpr
prependLuaAssignmentLogger.Debug("Prepended 'v1 = v1' due to operator prefix")
} else if strings.HasPrefix(luaExpr, "=") { } else if strings.HasPrefix(luaExpr, "=") {
// Handle direct assignment with = operator // Handle direct assignment with = operator
luaExpr = "v1 " + luaExpr luaExpr = "v1 " + luaExpr
prependLuaAssignmentLogger.Debug("Prepended 'v1' due to direct assignment operator")
} }
// Add assignment if needed // Add assignment if needed
if !strings.Contains(luaExpr, "=") { if !strings.Contains(luaExpr, "=") {
luaExpr = "v1 = " + luaExpr luaExpr = "v1 = " + luaExpr
prependLuaAssignmentLogger.Debug("Prepended 'v1 =' as no assignment was found")
} }
prependLuaAssignmentLogger.Trace("Final Lua expression after prepending: %q", luaExpr)
return luaExpr return luaExpr
} }
// BuildLuaScript prepares a Lua expression from shorthand notation // Max returns the maximum of two integers
func BuildLuaScript(luaExpr string) string { func Max(a, b int) int {
buildLuaScriptLogger := processorLogger.WithPrefix("BuildLuaScript").WithField("inputLuaExpr", luaExpr) if a > b {
buildLuaScriptLogger.Debug("Building full Lua script from expression") return a
}
// Perform $var substitutions from globalVariables return b
luaExpr = replaceVariables(luaExpr)
luaExpr = PrependLuaAssignment(luaExpr)
fullScript := fmt.Sprintf(`
function run()
%s
end
local res = run()
modified = res == nil or res
`, luaExpr)
buildLuaScriptLogger.Trace("Generated full Lua script: %q", utils.LimitString(fullScript, 200))
return fullScript
} }
func replaceVariables(expr string) string { // Min returns the minimum of two integers
// $varName -> literal value func Min(a, b int) int {
varNameRe := regexp.MustCompile(`\$(\w+)`) if a < b {
return varNameRe.ReplaceAllStringFunc(expr, func(m string) string { return a
name := varNameRe.FindStringSubmatch(m)[1] }
if v, ok := globalVariables[name]; ok { return b
switch val := v.(type) {
case int, int64, float32, float64:
return fmt.Sprintf("%v", val)
case bool:
if val {
return "true"
} else {
return "false"
}
case string:
// Quote strings for Lua literal
return fmt.Sprintf("%q", val)
default:
return fmt.Sprintf("%q", fmt.Sprintf("%v", val))
}
}
return m
})
}
func printToGo(L *lua.LState) int {
printToGoLogger := processorLogger.WithPrefix("printToGo")
printToGoLogger.Debug("Lua print function called, redirecting to Go logger")
top := L.GetTop()
args := make([]interface{}, top)
for i := 1; i <= top; i++ {
args[i-1] = L.Get(i)
}
// Format the message with proper spacing between arguments
var parts []string
for _, arg := range args {
parts = append(parts, fmt.Sprintf("%v", arg))
}
message := strings.Join(parts, " ")
printToGoLogger.Trace("Lua print message: %q", message)
// Use the LUA log level with a script tag
logger.Lua("%s", message)
printToGoLogger.Debug("Message logged from Lua")
return 0
}
func fetch(L *lua.LState) int {
fetchLogger := processorLogger.WithPrefix("fetch")
fetchLogger.Debug("Lua fetch function called")
// Get URL from first argument
url := L.ToString(1)
if url == "" {
fetchLogger.Error("Fetch failed: URL is required")
L.Push(lua.LNil)
L.Push(lua.LString("URL is required"))
return 2
}
fetchLogger.Debug("Fetching URL: %q", url)
// Get options from second argument if provided
var method string = "GET"
var headers map[string]string = make(map[string]string)
var body string = ""
if L.GetTop() > 1 {
options := L.ToTable(2)
if options != nil {
fetchLogger.Debug("Processing fetch options")
// Get method
if methodVal := options.RawGetString("method"); methodVal != lua.LNil {
method = methodVal.String()
fetchLogger.Trace("Method from options: %q", method)
}
// Get headers
if headersVal := options.RawGetString("headers"); headersVal != lua.LNil {
if headersTable, ok := headersVal.(*lua.LTable); ok {
fetchLogger.Trace("Processing headers table")
headersTable.ForEach(func(key lua.LValue, value lua.LValue) {
headers[key.String()] = value.String()
fetchLogger.Trace("Header: %q = %q", key.String(), value.String())
})
}
fetchLogger.Trace("All headers: %v", headers)
}
// Get body
if bodyVal := options.RawGetString("body"); bodyVal != lua.LNil {
body = bodyVal.String()
fetchLogger.Trace("Body from options: %q", utils.LimitString(body, 100))
}
}
}
fetchLogger.Debug("Fetch request details: Method=%q, URL=%q, BodyLength=%d, Headers=%v", method, url, len(body), headers)
// Create HTTP request
req, err := http.NewRequest(method, url, strings.NewReader(body))
if err != nil {
fetchLogger.Error("Error creating HTTP request: %v", err)
L.Push(lua.LNil)
L.Push(lua.LString(fmt.Sprintf("Error creating request: %v", err)))
return 2
}
// Set headers
for key, value := range headers {
req.Header.Set(key, value)
}
fetchLogger.Debug("HTTP request created and headers set")
fetchLogger.Trace("HTTP Request: %+v", req)
// Make request
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
fetchLogger.Error("Error making HTTP request: %v", err)
L.Push(lua.LNil)
L.Push(lua.LString(fmt.Sprintf("Error making request: %v", err)))
return 2
}
defer func() {
fetchLogger.Debug("Closing HTTP response body")
resp.Body.Close()
}()
fetchLogger.Debug("HTTP request executed. Status Code: %d", resp.StatusCode)
// Read response body
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
fetchLogger.Error("Error reading response body: %v", err)
L.Push(lua.LNil)
L.Push(lua.LString(fmt.Sprintf("Error reading response: %v", err)))
return 2
}
fetchLogger.Trace("Response body length: %d", len(bodyBytes))
// Create response table
responseTable := L.NewTable()
responseTable.RawSetString("status", lua.LNumber(resp.StatusCode))
responseTable.RawSetString("statusText", lua.LString(resp.Status))
responseTable.RawSetString("ok", lua.LBool(resp.StatusCode >= 200 && resp.StatusCode < 300))
responseTable.RawSetString("body", lua.LString(string(bodyBytes)))
fetchLogger.Debug("Created Lua response table")
// Set headers in response
headersTable := L.NewTable()
for key, values := range resp.Header {
headersTable.RawSetString(key, lua.LString(values[0]))
fetchLogger.Trace("Response header: %q = %q", key, values[0])
}
responseTable.RawSetString("headers", headersTable)
fetchLogger.Trace("Full response table: %v", responseTable)
L.Push(responseTable)
fetchLogger.Debug("Pushed response table to Lua stack")
return 1
} }

View File

@@ -1,108 +1,134 @@
package processor package processor
import ( import (
"cook/utils"
"fmt" "fmt"
"os"
"path/filepath"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"time"
logger "git.site.quack-lab.dev/dave/cylogger"
lua "github.com/yuin/gopher-lua" lua "github.com/yuin/gopher-lua"
) )
// regexLogger is a scoped logger for the processor/regex package. // RegexProcessor implements the Processor interface using regex patterns
var regexLogger = logger.Default.WithPrefix("processor/regex") type RegexProcessor struct{}
type CaptureGroup struct { // Process implements the Processor interface for RegexProcessor
Name string func (p *RegexProcessor) Process(filename string, pattern string, luaExpr string) (int, int, error) {
Value string // Read file content
Updated string fullPath := filepath.Join(".", filename)
Range [2]int content, err := os.ReadFile(fullPath)
if err != nil {
return 0, 0, fmt.Errorf("error reading file: %v", err)
}
fileContent := string(content)
// Process the content
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
if err != nil {
return 0, 0, err
}
// If we made modifications, save the file
if modCount > 0 {
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
if err != nil {
return 0, 0, fmt.Errorf("error writing file: %v", err)
}
}
return modCount, matchCount, nil
}
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
func (p *RegexProcessor) ToLua(L *lua.LState, data interface{}) error {
captures, ok := data.([]string)
if !ok {
return fmt.Errorf("expected []string for captures, got %T", data)
}
// Set variables for each capture group, starting from v1/s1 for the first capture
for i := 0; i < len(captures); i++ {
// Set string version (always available as s1, s2, etc.)
L.SetGlobal(fmt.Sprintf("s%d", i+1), lua.LString(captures[i]))
// Try to convert to number and set v1, v2, etc.
if val, err := strconv.ParseFloat(captures[i], 64); err == nil {
L.SetGlobal(fmt.Sprintf("v%d", i+1), lua.LNumber(val))
}
}
return nil
}
// FromLua implements the Processor interface for RegexProcessor
func (p *RegexProcessor) FromLua(L *lua.LState) (interface{}, error) {
// Get the modified values after Lua execution
modifications := make(map[int]string)
// Check for modifications to v1-v12 and s1-s12
for i := 0; i < 12; i++ {
// Check both v and s variables to see if any were modified
vVarName := fmt.Sprintf("v%d", i+1)
sVarName := fmt.Sprintf("s%d", i+1)
vLuaVal := L.GetGlobal(vVarName)
sLuaVal := L.GetGlobal(sVarName)
// If our value is a number then it's very likely we want it to be a number
// And not a string
// If we do want it to be a string we will cast it into a string in lua
// wait that wouldn't work... Casting v to a string would not load it here
if vLuaVal.Type() == lua.LTNumber {
modifications[i] = vLuaVal.String()
continue
}
if sLuaVal.Type() == lua.LTString {
modifications[i] = sLuaVal.String()
continue
}
}
return modifications, nil
} }
// ProcessContent applies regex replacement with Lua processing // ProcessContent applies regex replacement with Lua processing
// The filename here exists ONLY so we can pass it to the lua environment func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
// It's not used for anything else // Handle special pattern modifications
func ProcessRegex(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) { if !strings.HasPrefix(pattern, "(?s)") {
processRegexLogger := regexLogger.WithPrefix("ProcessRegex").WithField("commandName", command.Name).WithField("file", filename) pattern = "(?s)" + pattern
processRegexLogger.Debug("Starting regex processing for file") }
processRegexLogger.Trace("Initial file content length: %d", len(content))
processRegexLogger.Trace("Command details: %+v", command)
var commands []utils.ReplaceCommand
// Start timing the regex processing
startTime := time.Now()
// We don't HAVE to do this multiple times for a pattern
// But it's quick enough for us to not care
pattern := resolveRegexPlaceholders(command.Regex)
processRegexLogger.Debug("Resolved regex placeholders. Pattern: %s", pattern)
// I'm not too happy about having to trim regex, we could have meaningful whitespace or newlines
// But it's a compromise that allows us to use | in yaml
// Otherwise we would have to escape every god damn pair of quotation marks
// And a bunch of other shit
pattern = strings.TrimSpace(pattern)
processRegexLogger.Debug("Trimmed regex pattern: %s", pattern)
patternCompileStart := time.Now()
compiledPattern, err := regexp.Compile(pattern) compiledPattern, err := regexp.Compile(pattern)
if err != nil { if err != nil {
processRegexLogger.Error("Error compiling pattern %q: %v", pattern, err) return "", 0, 0, fmt.Errorf("error compiling pattern: %v", err)
return commands, fmt.Errorf("error compiling pattern: %v", err)
} }
processRegexLogger.Debug("Compiled pattern successfully in %v", time.Since(patternCompileStart))
// Same here, it's just string concatenation, it won't kill us previous := luaExpr
// More important is that we don't fuck up the command luaExpr = BuildLuaScript(luaExpr)
// But we shouldn't be able to since it's passed by value fmt.Printf("Changing Lua expression from: %s to: %s\n", previous, luaExpr)
previousLuaExpr := command.Lua
luaExpr := BuildLuaScript(command.Lua) L, err := NewLuaState()
processRegexLogger.Debug("Transformed Lua expression: %q → %q", previousLuaExpr, luaExpr) if err != nil {
processRegexLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200)) return "", 0, 0, fmt.Errorf("error creating Lua state: %v", err)
}
defer L.Close()
// Initialize Lua environment
modificationCount := 0
// Process all regex matches // Process all regex matches
matchFindStart := time.Now() result := content
indices := compiledPattern.FindAllStringSubmatchIndex(content, -1) indices := compiledPattern.FindAllStringSubmatchIndex(content, -1)
matchFindDuration := time.Since(matchFindStart)
processRegexLogger.Debug("Found %d matches in content of length %d (search took %v)",
len(indices), len(content), matchFindDuration)
processRegexLogger.Trace("Match indices: %v", indices)
// Log pattern complexity metrics
patternComplexity := estimatePatternComplexity(pattern)
processRegexLogger.Debug("Pattern complexity estimate: %d", patternComplexity)
if len(indices) == 0 {
processRegexLogger.Warning("No matches found for regex: %q", pattern)
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
return commands, nil
}
// We walk backwards because we're replacing something with something else that might be longer // We walk backwards because we're replacing something with something else that might be longer
// And in the case it is longer than the original all indicces past that change will be fucked up // And in the case it is longer than the original all indicces past that change will be fucked up
// By going backwards we fuck up all the indices to the end of the file that we don't care about // By going backwards we fuck up all the indices to the end of the file that we don't care about
// Because there either aren't any (last match) or they're already modified (subsequent matches) // Because there either aren't any (last match) or they're already modified (subsequent matches)
for i, matchIndices := range indices { for i := len(indices) - 1; i >= 0; i-- {
matchLogger := processRegexLogger.WithField("matchNum", i+1) matchIndices := indices[i]
matchLogger.Debug("Processing match %d of %d", i+1, len(indices))
matchLogger.Trace("Match indices: %v (match position %d-%d)", matchIndices, matchIndices[0], matchIndices[1])
L, err := NewLuaState()
if err != nil {
matchLogger.Error("Error creating Lua state: %v", err)
return commands, fmt.Errorf("error creating Lua state: %v", err)
}
L.SetGlobal("file", lua.LString(filename))
// Hmm... Maybe we don't want to defer this..
// Maybe we want to close them every iteration
// We'll leave it as is for now
defer L.Close()
matchLogger.Trace("Lua state created successfully for match %d", i+1)
// Why we're doing this whole song and dance of indices is to properly handle empty matches // Why we're doing this whole song and dance of indices is to properly handle empty matches
// Plus it's a little cleaner to surgically replace our matches // Plus it's a little cleaner to surgically replace our matches
// If we were to use string.replace and encountered an empty match there'd be nothing to replace // If we were to use string.replace and encountered an empty match there'd be nothing to replace
@@ -110,390 +136,61 @@ func ProcessRegex(content string, command utils.ModifyCommand, filename string)
// So when we're cutting open the array we say 0:7 + modified + 7:end // So when we're cutting open the array we say 0:7 + modified + 7:end
// As if concatenating in the middle of the array // As if concatenating in the middle of the array
// Plus it supports lookarounds // Plus it supports lookarounds
matchContent := content[matchIndices[0]:matchIndices[1]] match := content[matchIndices[0]:matchIndices[1]]
matchPreview := utils.LimitString(matchContent, 50)
matchLogger.Trace("Matched content: %q (length: %d)", matchPreview, len(matchContent))
groups := matchIndices[2:] groups := matchIndices[2:]
if len(groups) <= 0 { if len(groups) <= 0 {
matchLogger.Warning("No capture groups found for match %q and regex %q", matchPreview, pattern) fmt.Println("No capture groups for lua to chew on")
continue continue
} }
if len(groups)%2 == 1 { if len(groups)%2 == 1 {
matchLogger.Warning("Invalid number of group indices (%d), should be even: %v", len(groups), groups) fmt.Println("Odd number of indices of groups, what the fuck?")
continue continue
} }
// Count how many valid groups we have captures := make([]string, 0, len(groups)/2)
validGroups := 0
for j := 0; j < len(groups); j += 2 { for j := 0; j < len(groups); j += 2 {
if groups[j] != -1 && groups[j+1] != -1 { captures = append(captures, content[groups[j]:groups[j+1]])
validGroups++
}
}
matchLogger.Debug("Found %d valid capture groups in match", validGroups)
for _, index := range groups {
if index == -1 {
matchLogger.Warning("Negative index encountered in match indices %v. This may indicate an issue with the regex pattern or an empty/optional capture group.", matchIndices)
continue
}
} }
// We have to use array to preserve order if err := p.ToLua(L, captures); err != nil {
// Very important for the reconstruction step fmt.Println("Error setting Lua variables:", err)
// Because we must overwrite the values in reverse order
// See comments a few dozen lines above for more details
captureGroups := make([]*CaptureGroup, 0, len(groups)/2)
groupNames := compiledPattern.SubexpNames()[1:]
for i, name := range groupNames {
start := groups[i*2]
end := groups[i*2+1]
if start == -1 || end == -1 {
matchLogger.Debug("Skipping empty or unmatched capture group #%d (name: %q)", i+1, name)
continue
}
value := content[start:end]
captureGroups = append(captureGroups, &CaptureGroup{
Name: name,
Value: value,
Range: [2]int{start, end},
})
// Include name info in log if available
if name != "" {
matchLogger.Trace("Capture group '%s': %q (pos %d-%d)", name, value, start, end)
} else {
matchLogger.Trace("Capture group #%d: %q (pos %d-%d)", i+1, value, start, end)
}
}
// Use the DeduplicateGroups flag to control whether to deduplicate capture groups
if !command.NoDedup {
matchLogger.Debug("Deduplicating capture groups as specified in command settings")
captureGroups = deduplicateGroups(captureGroups)
matchLogger.Trace("Capture groups after deduplication: %v", captureGroups)
} else {
matchLogger.Debug("Skipping deduplication of capture groups (NoDedup is true)")
}
if err := toLua(L, captureGroups); err != nil {
matchLogger.Error("Failed to set Lua variables for capture groups: %v", err)
continue continue
} }
matchLogger.Debug("Set %d capture groups as Lua variables", len(captureGroups))
matchLogger.Trace("Lua globals set for capture groups")
if err := L.DoString(luaExpr); err != nil { if err := L.DoString(luaExpr); err != nil {
matchLogger.Error("Lua script execution failed: %v\nScript: %s\nCapture Groups: %+v", fmt.Printf("Error executing Lua code %s for group %s: %v", luaExpr, captures, err)
err, utils.LimitString(luaExpr, 200), captureGroups)
continue continue
} }
matchLogger.Debug("Lua script executed successfully")
// Get modifications from Lua // Get modifications from Lua
updatedCaptureGroups, err := fromLua(L, captureGroups) modResult, err := p.FromLua(L)
if err != nil { if err != nil {
matchLogger.Error("Failed to retrieve modifications from Lua: %v", err) fmt.Println("Error getting modifications:", err)
continue
}
matchLogger.Debug("Retrieved updated values from Lua")
matchLogger.Trace("Updated capture groups from Lua: %v", updatedCaptureGroups)
replacement := ""
replacementVar := L.GetGlobal("replacement")
if replacementVar.Type() != lua.LTNil {
replacement = replacementVar.String()
matchLogger.Debug("Using global replacement variable from Lua: %q", replacement)
}
// Check if modification flag is set
modifiedVal := L.GetGlobal("modified")
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
matchLogger.Debug("Skipping match - no modifications indicated by Lua script")
continue continue
} }
if replacement == "" { // Apply modifications to the matched text
// Apply the modifications to the original match modsMap, ok := modResult.(map[int]string)
replacement = matchContent if !ok || len(modsMap) == 0 {
fmt.Println("No modifications to apply")
// Count groups that were actually modified
modifiedGroupsCount := 0
for _, capture := range updatedCaptureGroups {
if capture.Value != capture.Updated {
modifiedGroupsCount++
}
}
matchLogger.Info("%d of %d capture groups identified for modification", modifiedGroupsCount, len(updatedCaptureGroups))
for _, capture := range updatedCaptureGroups {
if capture.Value == capture.Updated {
matchLogger.Debug("Capture group unchanged: %s", utils.LimitString(capture.Value, 50))
continue
}
// Log what changed with context
matchLogger.Debug("Capture group %q scheduled for modification: %q → %q",
capture.Name, utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
// Indices of the group are relative to content
// To relate them to match we have to subtract the match start index
// replacement = replacement[:groupStart] + newVal + replacement[groupEnd:]
commands = append(commands, utils.ReplaceCommand{
From: capture.Range[0],
To: capture.Range[1],
With: capture.Updated,
})
matchLogger.Trace("Added replacement command: %+v", commands[len(commands)-1])
}
} else {
matchLogger.Debug("Using full replacement string from Lua: %q", utils.LimitString(replacement, 50))
commands = append(commands, utils.ReplaceCommand{
From: matchIndices[0],
To: matchIndices[1],
With: replacement,
})
matchLogger.Trace("Added full replacement command: %+v", commands[len(commands)-1])
}
}
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
processRegexLogger.Debug("Generated %d total modifications", len(commands))
return commands, nil
}
func deduplicateGroups(captureGroups []*CaptureGroup) []*CaptureGroup {
deduplicateGroupsLogger := regexLogger.WithPrefix("deduplicateGroups")
deduplicateGroupsLogger.Debug("Starting deduplication of capture groups")
deduplicateGroupsLogger.Trace("Input capture groups: %v", captureGroups)
// Preserve input order and drop any group that overlaps with an already accepted group
accepted := make([]*CaptureGroup, 0, len(captureGroups))
for _, group := range captureGroups {
groupLogger := deduplicateGroupsLogger.WithField("groupName", group.Name).WithField("groupRange", group.Range)
groupLogger.Debug("Processing capture group")
overlaps := false
for _, kept := range accepted {
// Overlap if start < keptEnd and end > keptStart (adjacent is allowed)
if group.Range[0] < kept.Range[1] && group.Range[1] > kept.Range[0] {
overlaps = true
break
}
}
if overlaps {
groupLogger.Warning("Overlapping capture group detected and skipped.")
continue continue
} }
groupLogger.Debug("Capture group does not overlap with previously accepted groups. Adding.") // Apply the modifications to the original match
accepted = append(accepted, group) replacement := match
for i := len(modsMap) - 1; i >= 0; i-- {
newVal := modsMap[i]
// Indices of the group are relative to content
// To relate them to match we have to subtract the match start index
groupStart := groups[i*2] - matchIndices[0]
groupEnd := groups[i*2+1] - matchIndices[0]
replacement = replacement[:groupStart] + newVal + replacement[groupEnd:]
}
modificationCount++
result = result[:matchIndices[0]] + replacement + result[matchIndices[1]:]
} }
deduplicateGroupsLogger.Debug("Finished deduplication. Original %d groups, %d deduplicated.", len(captureGroups), len(accepted)) return result, modificationCount, len(indices), nil
deduplicateGroupsLogger.Trace("Deduplicated groups: %v", accepted)
return accepted
}
// The order of these replaces is important
// This one handles !num-s inside of named capture groups
// If it were not here our !num in a named capture group would
// Expand to another capture group in the capture group
// We really only want one (our named) capture group
func resolveRegexPlaceholders(pattern string) string {
resolveLogger := regexLogger.WithPrefix("resolveRegexPlaceholders").WithField("originalPattern", utils.LimitString(pattern, 100))
resolveLogger.Debug("Resolving regex placeholders in pattern")
// Handle special pattern modifications
if !strings.HasPrefix(pattern, "(?s)") {
pattern = "(?s)" + pattern
resolveLogger.Debug("Prepended '(?s)' to pattern for single-line mode")
}
namedGroupNum := regexp.MustCompile(`(?:(\?<[^>]+>)(!num))`)
pattern = namedGroupNum.ReplaceAllStringFunc(pattern, func(match string) string {
funcLogger := resolveLogger.WithPrefix("namedGroupNumReplace").WithField("match", utils.LimitString(match, 50))
funcLogger.Debug("Processing named group !num placeholder")
parts := namedGroupNum.FindStringSubmatch(match)
if len(parts) != 3 {
funcLogger.Warning("Unexpected number of submatches for namedGroupNum: %d. Returning original match.", len(parts))
return match
}
replacement := `-?\d*\.?\d+`
funcLogger.Trace("Replacing !num in named group with: %q", replacement)
return parts[1] + replacement
})
resolveLogger.Debug("Handled named group !num placeholders")
pattern = strings.ReplaceAll(pattern, "!num", `(-?\d*\.?\d+)`)
resolveLogger.Debug("Replaced !num with numeric capture group")
pattern = strings.ReplaceAll(pattern, "!any", `.*?`)
resolveLogger.Debug("Replaced !any with non-greedy wildcard")
repPattern := regexp.MustCompile(`!rep\(([^,]+),\s*(\d+)\)`)
// !rep(pattern, count) repeats the pattern n times
// Inserting !any between each repetition
pattern = repPattern.ReplaceAllStringFunc(pattern, func(match string) string {
funcLogger := resolveLogger.WithPrefix("repPatternReplace").WithField("match", utils.LimitString(match, 50))
funcLogger.Debug("Processing !rep placeholder")
parts := repPattern.FindStringSubmatch(match)
if len(parts) != 3 {
funcLogger.Warning("Unexpected number of submatches for repPattern: %d. Returning original match.", len(parts))
return match
}
repeatedPattern := parts[1]
countStr := parts[2]
repetitions, err := strconv.Atoi(countStr)
if err != nil {
funcLogger.Error("Failed to parse repetition count %q: %v. Returning original match.", countStr, err)
return match
}
var finalReplacement string
if repetitions > 0 {
finalReplacement = strings.Repeat(repeatedPattern+".*?", repetitions-1) + repeatedPattern
} else {
finalReplacement = ""
}
funcLogger.Trace("Replaced !rep with %d repetitions of %q: %q", repetitions, utils.LimitString(repeatedPattern, 30), utils.LimitString(finalReplacement, 100))
return finalReplacement
})
resolveLogger.Debug("Handled !rep placeholders")
resolveLogger.Debug("Finished resolving regex placeholders")
resolveLogger.Trace("Final resolved pattern: %q", utils.LimitString(pattern, 100))
return pattern
}
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
func toLua(L *lua.LState, data interface{}) error {
toLuaLogger := regexLogger.WithPrefix("toLua")
toLuaLogger.Debug("Setting capture groups as Lua variables")
captureGroups, ok := data.([]*CaptureGroup)
if !ok {
toLuaLogger.Error("Invalid data type for toLua. Expected []*CaptureGroup, got %T", data)
return fmt.Errorf("expected []*CaptureGroup for captures, got %T", data)
}
toLuaLogger.Trace("Input capture groups: %v", captureGroups)
groupindex := 0
for _, capture := range captureGroups {
groupLogger := toLuaLogger.WithField("captureGroup", capture.Name).WithField("value", utils.LimitString(capture.Value, 50))
groupLogger.Debug("Processing capture group for Lua")
if capture.Name == "" {
// We don't want to change the name of the capture group
// Even if it's empty
tempName := fmt.Sprintf("%d", groupindex+1)
groupindex++
groupLogger.Debug("Unnamed capture group, assigning temporary name: %q", tempName)
L.SetGlobal("s"+tempName, lua.LString(capture.Value))
groupLogger.Trace("Set Lua global s%s = %q", tempName, capture.Value)
val, err := strconv.ParseFloat(capture.Value, 64)
if err == nil {
L.SetGlobal("v"+tempName, lua.LNumber(val))
groupLogger.Trace("Set Lua global v%s = %f", tempName, val)
} else {
groupLogger.Trace("Value %q is not numeric, skipping v%s assignment", capture.Value, tempName)
}
} else {
val, err := strconv.ParseFloat(capture.Value, 64)
if err == nil {
L.SetGlobal(capture.Name, lua.LNumber(val))
groupLogger.Trace("Set Lua global %s = %f (numeric)", capture.Name, val)
} else {
L.SetGlobal(capture.Name, lua.LString(capture.Value))
groupLogger.Trace("Set Lua global %s = %q (string)", capture.Name, capture.Value)
}
}
}
toLuaLogger.Debug("Finished setting capture groups as Lua variables")
return nil
}
// FromLua implements the Processor interface for RegexProcessor
func fromLua(L *lua.LState, captureGroups []*CaptureGroup) ([]*CaptureGroup, error) {
fromLuaLogger := regexLogger.WithPrefix("fromLua")
fromLuaLogger.Debug("Retrieving modifications from Lua for capture groups")
fromLuaLogger.Trace("Initial capture groups: %v", captureGroups)
captureIndex := 0
for _, capture := range captureGroups {
groupLogger := fromLuaLogger.WithField("originalCaptureName", capture.Name).WithField("originalValue", utils.LimitString(capture.Value, 50))
groupLogger.Debug("Processing capture group to retrieve updated value")
if capture.Name == "" {
// This case means it was an unnamed capture group originally.
// We need to reconstruct the original temporary name to fetch its updated value.
// The name will be set to an integer if it was empty, then incremented.
// So, we use the captureIndex to get the correct 'vX' and 'sX' variables.
tempName := fmt.Sprintf("%d", captureIndex+1)
groupLogger.Debug("Retrieving updated value for unnamed group (temp name: %q)", tempName)
vVarName := fmt.Sprintf("v%s", tempName)
sVarName := fmt.Sprintf("s%s", tempName)
captureIndex++
vLuaVal := L.GetGlobal(vVarName)
sLuaVal := L.GetGlobal(sVarName)
groupLogger.Trace("Lua values for unnamed group: v=%v, s=%v", vLuaVal, sLuaVal)
if sLuaVal.Type() == lua.LTString {
capture.Updated = sLuaVal.String()
groupLogger.Trace("Updated value from s%s (string): %q", tempName, capture.Updated)
}
// Numbers have priority
if vLuaVal.Type() == lua.LTNumber {
capture.Updated = vLuaVal.String()
groupLogger.Trace("Updated value from v%s (numeric): %q", tempName, capture.Updated)
}
} else {
// Easy shit, directly use the named capture group
updatedValue := L.GetGlobal(capture.Name)
if updatedValue.Type() != lua.LTNil {
capture.Updated = updatedValue.String()
groupLogger.Trace("Updated value for named group %q: %q", capture.Name, capture.Updated)
} else {
groupLogger.Debug("Named capture group %q not found in Lua globals or is nil. Keeping original value.", capture.Name)
capture.Updated = capture.Value // Keep original if not found or nil
}
}
groupLogger.Debug("Finished processing capture group. Original: %q, Updated: %q", utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
}
fromLuaLogger.Debug("Finished retrieving modifications from Lua")
fromLuaLogger.Trace("Final updated capture groups: %v", captureGroups)
return captureGroups, nil
}
// estimatePatternComplexity gives a rough estimate of regex pattern complexity
// This can help identify potentially problematic patterns
func estimatePatternComplexity(pattern string) int {
estimateComplexityLogger := regexLogger.WithPrefix("estimatePatternComplexity").WithField("pattern", utils.LimitString(pattern, 100))
estimateComplexityLogger.Debug("Estimating regex pattern complexity")
complexity := len(pattern)
// Add complexity for potentially expensive operations
complexity += strings.Count(pattern, ".*") * 10 // Greedy wildcard
complexity += strings.Count(pattern, ".*?") * 5 // Non-greedy wildcard
complexity += strings.Count(pattern, "[^") * 3 // Negated character class
complexity += strings.Count(pattern, "\\b") * 2 // Word boundary
complexity += strings.Count(pattern, "(") * 2 // Capture groups
complexity += strings.Count(pattern, "(?:") * 1 // Non-capture groups
complexity += strings.Count(pattern, "\\1") * 3 // Backreferences
complexity += strings.Count(pattern, "{") * 2 // Counted repetition
estimateComplexityLogger.Debug("Estimated pattern complexity: %d", complexity)
return complexity
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +0,0 @@
package processor
import (
"io"
"os"
logger "git.site.quack-lab.dev/dave/cylogger"
)
func init() {
// Only modify logger in test mode
// This checks if we're running under 'go test'
if os.Getenv("GO_TESTING") == "1" || os.Getenv("TESTING") == "1" {
// Initialize logger with ERROR level for tests
// to minimize noise in test output
logger.Init(logger.LevelError)
// Optionally redirect logger output to discard
// This prevents logger output from interfering with test output
disableTestLogs := os.Getenv("ENABLE_TEST_LOGS") != "1"
if disableTestLogs {
// Create a new logger that writes to nowhere
silentLogger := logger.New(io.Discard, "", 0)
logger.Default = silentLogger
}
}
}

217
processor/xml.go Normal file
View File

@@ -0,0 +1,217 @@
package processor
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/antchfx/xmlquery"
lua "github.com/yuin/gopher-lua"
)
// XMLProcessor implements the Processor interface for XML documents
type XMLProcessor struct{}
// Process implements the Processor interface for XMLProcessor
func (p *XMLProcessor) Process(filename string, pattern string, luaExpr string) (int, int, error) {
// Read file content
fullPath := filepath.Join(".", filename)
content, err := os.ReadFile(fullPath)
if err != nil {
return 0, 0, fmt.Errorf("error reading file: %v", err)
}
fileContent := string(content)
// Process the content
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
if err != nil {
return 0, 0, err
}
// If we made modifications, save the file
if modCount > 0 {
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
if err != nil {
return 0, 0, fmt.Errorf("error writing file: %v", err)
}
}
return modCount, matchCount, nil
}
// ProcessContent implements the Processor interface for XMLProcessor
func (p *XMLProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
// Parse XML document
doc, err := xmlquery.Parse(strings.NewReader(content))
if err != nil {
return content, 0, 0, fmt.Errorf("error parsing XML: %v", err)
}
// Find nodes matching the XPath pattern
nodes, err := xmlquery.QueryAll(doc, pattern)
if err != nil {
return content, 0, 0, fmt.Errorf("error executing XPath: %v", err)
}
matchCount := len(nodes)
if matchCount == 0 {
return content, 0, 0, nil
}
// Initialize Lua
L := lua.NewState()
defer L.Close()
// Load math library
L.Push(L.GetGlobal("require"))
L.Push(lua.LString("math"))
if err := L.PCall(1, 1, nil); err != nil {
return content, 0, 0, fmt.Errorf("error loading Lua math library: %v", err)
}
// Load helper functions
if err := InitLuaHelpers(L); err != nil {
return content, 0, 0, err
}
// Apply modifications to each node
modCount := 0
for _, node := range nodes {
// Reset Lua state for each node
L.SetGlobal("v1", lua.LNil)
L.SetGlobal("s1", lua.LNil)
// Get the node value
var originalValue string
if node.Type == xmlquery.AttributeNode {
originalValue = node.InnerText()
} else if node.Type == xmlquery.TextNode {
originalValue = node.Data
} else {
originalValue = node.InnerText()
}
// Convert to Lua variables
err = p.ToLua(L, originalValue)
if err != nil {
return content, modCount, matchCount, fmt.Errorf("error converting to Lua: %v", err)
}
// Execute Lua script
if err := L.DoString(luaExpr); err != nil {
return content, modCount, matchCount, fmt.Errorf("error executing Lua: %v", err)
}
// Get modified value
result, err := p.FromLua(L)
if err != nil {
return content, modCount, matchCount, fmt.Errorf("error getting result from Lua: %v", err)
}
newValue, ok := result.(string)
if !ok {
return content, modCount, matchCount, fmt.Errorf("expected string result from Lua, got %T", result)
}
// Skip if no change
if newValue == originalValue {
continue
}
// Apply modification
if node.Type == xmlquery.AttributeNode {
// For attribute nodes, update the attribute value
node.Parent.Attr = append([]xmlquery.Attr{}, node.Parent.Attr...)
for i, attr := range node.Parent.Attr {
if attr.Name.Local == node.Data {
node.Parent.Attr[i].Value = newValue
break
}
}
} else if node.Type == xmlquery.TextNode {
// For text nodes, update the text content
node.Data = newValue
} else {
// For element nodes, replace inner text
// Simple approach: set the InnerText directly if there are no child elements
if node.FirstChild == nil || (node.FirstChild != nil && node.FirstChild.Type == xmlquery.TextNode && node.FirstChild.NextSibling == nil) {
if node.FirstChild != nil {
node.FirstChild.Data = newValue
} else {
// Create a new text node and add it as the first child
textNode := &xmlquery.Node{
Type: xmlquery.TextNode,
Data: newValue,
}
node.FirstChild = textNode
}
} else {
// Complex case: node has mixed content or child elements
// Replace just the text content while preserving child elements
// This is a simplified approach - more complex XML may need more robust handling
for child := node.FirstChild; child != nil; child = child.NextSibling {
if child.Type == xmlquery.TextNode {
child.Data = newValue
break // Update only the first text node
}
}
}
}
modCount++
}
// Serialize the modified XML document to string
if doc.FirstChild != nil && doc.FirstChild.Type == xmlquery.DeclarationNode {
// If we have an XML declaration, start with it
declaration := doc.FirstChild.OutputXML(true)
// Remove the firstChild (declaration) before serializing the rest of the document
doc.FirstChild = doc.FirstChild.NextSibling
return declaration + doc.OutputXML(true), modCount, matchCount, nil
}
return doc.OutputXML(true), modCount, matchCount, nil
}
// ToLua converts XML node values to Lua variables
func (p *XMLProcessor) ToLua(L *lua.LState, data interface{}) error {
value, ok := data.(string)
if !ok {
return fmt.Errorf("expected string value, got %T", data)
}
// Set as string variable
L.SetGlobal("s1", lua.LString(value))
// Try to convert to number if possible
L.SetGlobal("v1", lua.LNumber(0)) // Default to 0
if err := L.DoString(fmt.Sprintf("v1 = tonumber(%q) or 0", value)); err != nil {
return fmt.Errorf("error converting value to number: %v", err)
}
return nil
}
// FromLua gets modified values from Lua
func (p *XMLProcessor) FromLua(L *lua.LState) (interface{}, error) {
// Check if string variable was modified
s1 := L.GetGlobal("s1")
if s1 != lua.LNil {
if s1Str, ok := s1.(lua.LString); ok {
return string(s1Str), nil
}
}
// Check if numeric variable was modified
v1 := L.GetGlobal("v1")
if v1 != lua.LNil {
if v1Num, ok := v1.(lua.LNumber); ok {
return fmt.Sprintf("%v", v1Num), nil
}
}
// Default return empty string
return "", nil
}

1532
processor/xml_test.go Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,137 +0,0 @@
package regression
import (
"cook/processor"
"cook/utils"
"os"
"path/filepath"
"testing"
)
func ApiAdaptor(content string, regex string, lua string) (string, int, int, error) {
command := utils.ModifyCommand{
Regex: regex,
Lua: lua,
LogLevel: "TRACE",
}
commands, err := processor.ProcessRegex(content, command, "test")
if err != nil {
return "", 0, 0, err
}
result, modifications := utils.ExecuteModifications(commands, content)
return result, modifications, len(commands), nil
}
func TestTalentsMechanicOutOfRange(t *testing.T) {
given := `<Talent identifier="quickfixer">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.quickfixer">
<Replace tag="[amount]" value="20" color="gui.green"/>
<Replace tag="[duration]" value="10" color="gui.green"/>
</Description>
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="1"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
</Conditions>
<Abilities>
<CharacterAbilityApplyStatusEffects>
<StatusEffects>
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
<Affliction identifier="quickfixer" amount="10.0"/>
</StatusEffect>
</StatusEffects>
</CharacterAbilityApplyStatusEffects>
</Abilities>
</AbilityGroupEffect>
</Talent>`
actual := `<Talent identifier="quickfixer">
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
<Description tag="talentdescription.quickfixer">
<Replace tag="[amount]" value="30" color="gui.green"/>
<Replace tag="[duration]" value="20" color="gui.green"/>
</Description>
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
<AbilityGroupEffect abilityeffecttype="None">
<Abilities>
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="2"/>
</Abilities>
</AbilityGroupEffect>
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
<Conditions>
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
</Conditions>
<Abilities>
<CharacterAbilityApplyStatusEffects>
<StatusEffects>
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
<Affliction identifier="quickfixer" amount="20"/>
</StatusEffect>
</StatusEffects>
</CharacterAbilityApplyStatusEffects>
</Abilities>
</AbilityGroupEffect>
</Talent>`
result, mods, matches, err := ApiAdaptor(given, `<Talent identifier="quickfixer">!anyvalue="(?<movementspeed>!num)"!anyvalue="(?<duration>!num)"!anyvalue="(?<repairspeed>!num)"!anyamount="(?<durationv>!num)"`, "movementspeed=round(movementspeed*1.5, 2) duration=round(duration*2, 2) repairspeed=round(repairspeed*2, 2) durationv=duration")
if err != nil {
t.Fatalf("Error processing content: %v", err)
}
if matches != 4 {
t.Errorf("Expected 4 matches, got %d", matches)
}
if mods != 4 {
t.Errorf("Expected 4 modifications, got %d", mods)
}
if result != actual {
t.Errorf("expected %s, got %s", actual, result)
}
}
func TestIndexExplosions_ShouldNotPanic(t *testing.T) {
cwd, err := os.Getwd()
if err != nil {
t.Fatalf("Error getting current working directory: %v", err)
}
given, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItems.xml"))
if err != nil {
t.Fatalf("Error reading file: %v", err)
}
expected, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItemsExpected.xml"))
if err != nil {
t.Fatalf("Error reading file: %v", err)
}
result, _, _, err := ApiAdaptor(string(given), `(?-s)LightComponent!anyrange="(!num)"`, "*4")
if err != nil {
t.Fatalf("Error processing content: %v", err)
}
// We don't really care how many god damn matches there are as long as the result is correct
// if matches != 45 {
// t.Errorf("Expected 45 match, got %d", matches)
// }
//
// if mods != 45 {
// t.Errorf("Expected 45 modification, got %d", mods)
// }
if string(result) != string(expected) {
t.Errorf("expected %s, got %s", expected, result)
}
}

View File

@@ -1,49 +0,0 @@
#!/bin/bash
echo "Figuring out the tag..."
TAG=$(git describe --tags --exact-match 2>/dev/null || echo "")
if [ -z "$TAG" ]; then
# Get the latest tag
LATEST_TAG=$(git describe --tags $(git rev-list --tags --max-count=1))
# Increment the patch version
IFS='.' read -r -a VERSION_PARTS <<< "$LATEST_TAG"
VERSION_PARTS[2]=$((VERSION_PARTS[2]+1))
TAG="${VERSION_PARTS[0]}.${VERSION_PARTS[1]}.${VERSION_PARTS[2]}"
# Create a new tag
git tag $TAG
git push origin $TAG
fi
echo "Tag: $TAG"
echo "Building the thing..."
go build -o chef.exe .
go install .
echo "Creating a release..."
TOKEN="$GITEA_API_KEY"
GITEA="https://git.site.quack-lab.dev"
REPO="dave/BigChef"
# Create a release
RELEASE_RESPONSE=$(curl -s -X POST \
-H "Authorization: token $TOKEN" \
-H "Accept: application/json" \
-H "Content-Type: application/json" \
-d '{
"tag_name": "'"$TAG"'",
"name": "'"$TAG"'",
"draft": false,
"prerelease": false
}' \
$GITEA/api/v1/repos/$REPO/releases)
# Extract the release ID
echo $RELEASE_RESPONSE
RELEASE_ID=$(echo $RELEASE_RESPONSE | awk -F'"id":' '{print $2+0; exit}')
echo "Release ID: $RELEASE_ID"
echo "Uploading the things..."
curl -X POST \
-H "Authorization: token $TOKEN" \
-F "attachment=@chef.exe" \
"$GITEA/api/v1/repos/$REPO/releases/${RELEASE_ID}/assets?name=chef.exe"
rm chef.exe

1
test.xml Normal file
View File

@@ -0,0 +1 @@
<config><item><value>100</value></item></config>

12
test_complex.xml Normal file
View File

@@ -0,0 +1,12 @@
<config>
<item>
<value>75</value>
<multiplier>2</multiplier>
<divider>4</divider>
</item>
<item>
<value>150</value>
<multiplier>3</multiplier>
<divider>2</divider>
</item>
</config>

37
test_data.xml Normal file
View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<testdata>
<!-- Numeric values -->
<item>
<id>1</id>
<value>200</value>
<price>24.99</price>
<quantity>5</quantity>
</item>
<!-- Text values -->
<item>
<id>2</id>
<name>Test Product</name>
<description>This is a test product description</description>
<category>Test</category>
</item>
<!-- Mixed content -->
<item>
<id>3</id>
<name>Mixed Product</name>
<price>19.99</price>
<code>PRD-123</code>
<tags>sale,discount,new</tags>
</item>
<!-- Empty and special values -->
<item>
<id>4</id>
<value></value>
<specialChars>Hello &amp; World &lt; &gt; &quot; &apos;</specialChars>
<multiline>Line 1
Line 2
Line 3</multiline>
</item>
</testdata>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
<config><item><value>100</value></item></config>

View File

@@ -1,155 +0,0 @@
package utils
import (
"path/filepath"
"time"
logger "git.site.quack-lab.dev/dave/cylogger"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
// dbLogger is a scoped logger for the utils/db package.
var dbLogger = logger.Default.WithPrefix("utils/db")
type DB interface {
DB() *gorm.DB
Raw(sql string, args ...any) *gorm.DB
SaveFile(filePath string, fileData []byte) error
GetFile(filePath string) ([]byte, error)
GetAllFiles() ([]FileSnapshot, error)
RemoveAllFiles() error
}
type FileSnapshot struct {
Date time.Time `gorm:"primaryKey"`
FilePath string `gorm:"primaryKey"`
FileData []byte `gorm:"type:blob"`
}
type DBWrapper struct {
db *gorm.DB
}
var globalDB *DBWrapper
func GetDB() (DB, error) {
getDBLogger := dbLogger.WithPrefix("GetDB")
getDBLogger.Debug("Attempting to get database connection")
var err error
dbFile := filepath.Join("data.sqlite")
getDBLogger.Debug("Opening database file: %q", dbFile)
db, err := gorm.Open(sqlite.Open(dbFile), &gorm.Config{
// SkipDefaultTransaction: true,
PrepareStmt: true,
// Logger: gormlogger.Default.LogMode(gormlogger.Silent),
})
if err != nil {
getDBLogger.Error("Failed to open database: %v", err)
return nil, err
}
getDBLogger.Debug("Database opened successfully, running auto migration")
if err := db.AutoMigrate(&FileSnapshot{}); err != nil {
getDBLogger.Error("Auto migration failed: %v", err)
return nil, err
}
getDBLogger.Debug("Auto migration completed")
globalDB = &DBWrapper{db: db}
getDBLogger.Debug("Database wrapper initialized")
return globalDB, nil
}
// Just a wrapper
func (db *DBWrapper) Raw(sql string, args ...any) *gorm.DB {
rawLogger := dbLogger.WithPrefix("Raw").WithField("sql", sql)
rawLogger.Debug("Executing raw SQL query with args: %v", args)
return db.db.Raw(sql, args...)
}
func (db *DBWrapper) DB() *gorm.DB {
dbLogger.WithPrefix("DB").Debug("Returning GORM DB instance")
return db.db
}
func (db *DBWrapper) FileExists(filePath string) (bool, error) {
fileExistsLogger := dbLogger.WithPrefix("FileExists").WithField("filePath", filePath)
fileExistsLogger.Debug("Checking if file exists in database")
var count int64
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).Count(&count).Error
if err != nil {
fileExistsLogger.Error("Error checking if file exists: %v", err)
return false, err
}
fileExistsLogger.Debug("File exists: %t", count > 0)
return count > 0, err
}
func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
saveFileLogger := dbLogger.WithPrefix("SaveFile").WithField("filePath", filePath)
saveFileLogger.Debug("Attempting to save file to database")
saveFileLogger.Trace("File data length: %d", len(fileData))
exists, err := db.FileExists(filePath)
if err != nil {
saveFileLogger.Error("Error checking if file exists: %v", err)
return err
}
if exists {
saveFileLogger.Debug("File already exists, skipping save")
return nil
}
saveFileLogger.Debug("Creating new file snapshot in database")
err = db.db.Create(&FileSnapshot{
Date: time.Now(),
FilePath: filePath,
FileData: fileData,
}).Error
if err != nil {
saveFileLogger.Error("Failed to create file snapshot: %v", err)
} else {
saveFileLogger.Debug("File saved successfully to database")
}
return err
}
func (db *DBWrapper) GetFile(filePath string) ([]byte, error) {
getFileLogger := dbLogger.WithPrefix("GetFile").WithField("filePath", filePath)
getFileLogger.Debug("Getting file from database")
var fileSnapshot FileSnapshot
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).First(&fileSnapshot).Error
if err != nil {
getFileLogger.Error("Failed to get file from database: %v", err)
return nil, err
}
getFileLogger.Debug("File found in database")
getFileLogger.Trace("Retrieved file data length: %d", len(fileSnapshot.FileData))
return fileSnapshot.FileData, nil
}
func (db *DBWrapper) GetAllFiles() ([]FileSnapshot, error) {
getAllFilesLogger := dbLogger.WithPrefix("GetAllFiles")
getAllFilesLogger.Debug("Getting all files from database")
var fileSnapshots []FileSnapshot
err := db.db.Model(&FileSnapshot{}).Find(&fileSnapshots).Error
if err != nil {
getAllFilesLogger.Error("Failed to get all files from database: %v", err)
return nil, err
}
getAllFilesLogger.Debug("Found %d files in database", len(fileSnapshots))
getAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
return fileSnapshots, nil
}
func (db *DBWrapper) RemoveAllFiles() error {
removeAllFilesLogger := dbLogger.WithPrefix("RemoveAllFiles")
removeAllFilesLogger.Debug("Removing all files from database")
err := db.db.Exec("DELETE FROM file_snapshots").Error
if err != nil {
removeAllFilesLogger.Error("Failed to remove all files from database: %v", err)
} else {
removeAllFilesLogger.Debug("All files removed from database")
}
return err
}

View File

@@ -1,142 +0,0 @@
package utils
import (
"os"
"path/filepath"
"strconv"
"strings"
logger "git.site.quack-lab.dev/dave/cylogger"
)
// fileLogger is a scoped logger for the utils/file package.
var fileLogger = logger.Default.WithPrefix("utils/file")
func CleanPath(path string) string {
cleanPathLogger := fileLogger.WithPrefix("CleanPath")
cleanPathLogger.Debug("Cleaning path: %q", path)
cleanPathLogger.Trace("Original path: %q", path)
path = filepath.Clean(path)
path = strings.ReplaceAll(path, "\\", "/")
cleanPathLogger.Trace("Cleaned path result: %q", path)
return path
}
func ToAbs(path string) string {
toAbsLogger := fileLogger.WithPrefix("ToAbs")
toAbsLogger.Debug("Converting path to absolute: %q", path)
toAbsLogger.Trace("Input path: %q", path)
if filepath.IsAbs(path) {
toAbsLogger.Debug("Path is already absolute, cleaning it.")
cleanedPath := CleanPath(path)
toAbsLogger.Trace("Already absolute path after cleaning: %q", cleanedPath)
return cleanedPath
}
cwd, err := os.Getwd()
if err != nil {
toAbsLogger.Error("Error getting current working directory: %v", err)
return CleanPath(path)
}
toAbsLogger.Trace("Current working directory: %q", cwd)
cleanedPath := CleanPath(filepath.Join(cwd, path))
toAbsLogger.Trace("Converted absolute path result: %q", cleanedPath)
return cleanedPath
}
// LimitString truncates a string to maxLen and adds "..." if truncated
func LimitString(s string, maxLen int) string {
limitStringLogger := fileLogger.WithPrefix("LimitString").WithField("originalLength", len(s)).WithField("maxLength", maxLen)
limitStringLogger.Debug("Limiting string length")
s = strings.ReplaceAll(s, "\n", "\\n")
if len(s) <= maxLen {
limitStringLogger.Trace("String length (%d) is within max length (%d), no truncation", len(s), maxLen)
return s
}
limited := s[:maxLen-3] + "..."
limitStringLogger.Trace("String truncated from %d to %d characters: %q", len(s), len(limited), limited)
return limited
}
// StrToFloat converts a string to a float64, returning 0 on error.
func StrToFloat(s string) float64 {
strToFloatLogger := fileLogger.WithPrefix("StrToFloat").WithField("inputString", s)
strToFloatLogger.Debug("Attempting to convert string to float")
f, err := strconv.ParseFloat(s, 64)
if err != nil {
strToFloatLogger.Warning("Failed to convert string %q to float, returning 0: %v", s, err)
return 0
}
strToFloatLogger.Trace("Successfully converted %q to float: %f", s, f)
return f
}
func ResetWhereNecessary(associations map[string]FileCommandAssociation, db DB) error {
resetWhereNecessaryLogger := fileLogger.WithPrefix("ResetWhereNecessary")
resetWhereNecessaryLogger.Debug("Starting reset where necessary operation")
resetWhereNecessaryLogger.Trace("File-command associations input: %v", associations)
dirtyFiles := make(map[string]struct{})
for _, association := range associations {
resetWhereNecessaryLogger.Debug("Processing association for file: %q", association.File)
for _, command := range association.Commands {
resetWhereNecessaryLogger.Debug("Checking command %q for reset requirement", command.Name)
resetWhereNecessaryLogger.Trace("Command details: %v", command)
if command.Reset {
resetWhereNecessaryLogger.Debug("Command %q requires reset for file %q, marking as dirty", command.Name, association.File)
dirtyFiles[association.File] = struct{}{}
}
}
for _, command := range association.IsolateCommands {
resetWhereNecessaryLogger.Debug("Checking isolate command %q for reset requirement", command.Name)
resetWhereNecessaryLogger.Trace("Isolate command details: %v", command)
if command.Reset {
resetWhereNecessaryLogger.Debug("Isolate command %q requires reset for file %q, marking as dirty", command.Name, association.File)
dirtyFiles[association.File] = struct{}{}
}
}
}
resetWhereNecessaryLogger.Debug("Identified %d files that need to be reset", len(dirtyFiles))
resetWhereNecessaryLogger.Trace("Dirty files identified: %v", dirtyFiles)
for file := range dirtyFiles {
resetWhereNecessaryLogger.Debug("Resetting file %q", file)
fileData, err := db.GetFile(file)
if err != nil {
resetWhereNecessaryLogger.Warning("Failed to get original content for file %q from database: %v", file, err)
continue
}
resetWhereNecessaryLogger.Trace("Retrieved original file data length for %q: %d", file, len(fileData))
resetWhereNecessaryLogger.Debug("Writing original content back to file %q", file)
err = os.WriteFile(file, fileData, 0644)
if err != nil {
resetWhereNecessaryLogger.Warning("Failed to write original content back to file %q: %v", file, err)
continue
}
resetWhereNecessaryLogger.Debug("Successfully reset file %q", file)
}
resetWhereNecessaryLogger.Debug("Finished reset where necessary operation")
return nil
}
func ResetAllFiles(db DB) error {
resetAllFilesLogger := fileLogger.WithPrefix("ResetAllFiles")
resetAllFilesLogger.Debug("Starting reset all files operation")
fileSnapshots, err := db.GetAllFiles()
if err != nil {
resetAllFilesLogger.Error("Failed to get all file snapshots from database: %v", err)
return err
}
resetAllFilesLogger.Debug("Found %d files in database to reset", len(fileSnapshots))
resetAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
for _, fileSnapshot := range fileSnapshots {
resetAllFilesLogger.Debug("Resetting file %q", fileSnapshot.FilePath)
err = os.WriteFile(fileSnapshot.FilePath, fileSnapshot.FileData, 0644)
if err != nil {
resetAllFilesLogger.Warning("Failed to write file %q to disk: %v", fileSnapshot.FilePath, err)
continue
}
resetAllFilesLogger.Debug("File %q written to disk successfully", fileSnapshot.FilePath)
}
resetAllFilesLogger.Debug("Finished reset all files operation")
return nil
}

View File

@@ -1,20 +0,0 @@
package utils
import (
"flag"
logger "git.site.quack-lab.dev/dave/cylogger"
)
// flagsLogger is a scoped logger for the utils/flags package.
var flagsLogger = logger.Default.WithPrefix("utils/flags")
var (
ParallelFiles = flag.Int("P", 100, "Number of files to process in parallel")
Filter = flag.String("f", "", "Filter commands before running them")
)
func init() {
flagsLogger.Debug("Initializing flags")
flagsLogger.Trace("ParallelFiles initial value: %d, Filter initial value: %q", *ParallelFiles, *Filter)
}

View File

@@ -1,369 +0,0 @@
package utils
import (
"fmt"
"os"
"path/filepath"
"strings"
logger "git.site.quack-lab.dev/dave/cylogger"
"github.com/bmatcuk/doublestar/v4"
"gopkg.in/yaml.v3"
)
// modifyCommandLogger is a scoped logger for the utils/modifycommand package.
var modifyCommandLogger = logger.Default.WithPrefix("utils/modifycommand")
type ModifyCommand struct {
Name string `yaml:"name,omitempty"`
Regex string `yaml:"regex,omitempty"`
Regexes []string `yaml:"regexes,omitempty"`
Lua string `yaml:"lua,omitempty"`
Files []string `yaml:"files,omitempty"`
Reset bool `yaml:"reset,omitempty"`
LogLevel string `yaml:"loglevel,omitempty"`
Isolate bool `yaml:"isolate,omitempty"`
NoDedup bool `yaml:"nodedup,omitempty"`
Disabled bool `yaml:"disable,omitempty"`
Modifiers map[string]interface{} `yaml:"modifiers,omitempty"`
}
type CookFile []ModifyCommand
func (c *ModifyCommand) Validate() error {
validateLogger := modifyCommandLogger.WithPrefix("Validate").WithField("commandName", c.Name)
validateLogger.Debug("Validating command")
if c.Regex == "" && len(c.Regexes) == 0 {
validateLogger.Error("Validation failed: Regex pattern is required")
return fmt.Errorf("pattern is required")
}
if c.Lua == "" {
validateLogger.Error("Validation failed: Lua expression is required")
return fmt.Errorf("lua expression is required")
}
if len(c.Files) == 0 {
validateLogger.Error("Validation failed: At least one file is required")
return fmt.Errorf("at least one file is required")
}
if c.LogLevel == "" {
validateLogger.Debug("LogLevel not specified, defaulting to INFO")
c.LogLevel = "INFO"
}
validateLogger.Debug("Command validated successfully")
return nil
}
// Ehh.. Not much better... Guess this wasn't the big deal
var matchesMemoTable map[string]bool = make(map[string]bool)
func Matches(path string, glob string) (bool, error) {
matchesLogger := modifyCommandLogger.WithPrefix("Matches").WithField("path", path).WithField("glob", glob)
matchesLogger.Debug("Checking if path matches glob")
key := fmt.Sprintf("%s:%s", path, glob)
if matches, ok := matchesMemoTable[key]; ok {
matchesLogger.Debug("Found match in memo table: %t", matches)
return matches, nil
}
matches, err := doublestar.Match(glob, path)
if err != nil {
matchesLogger.Error("Failed to match glob: %v", err)
return false, fmt.Errorf("failed to match glob %s with file %s: %w", glob, path, err)
}
matchesMemoTable[key] = matches
matchesLogger.Debug("Match result: %t, storing in memo table", matches)
return matches, nil
}
func SplitPattern(pattern string) (string, string) {
splitPatternLogger := modifyCommandLogger.WithPrefix("SplitPattern").WithField("pattern", pattern)
splitPatternLogger.Debug("Splitting pattern")
splitPatternLogger.Trace("Original pattern: %q", pattern)
static, pattern := doublestar.SplitPattern(pattern)
cwd, err := os.Getwd()
if err != nil {
splitPatternLogger.Error("Error getting current working directory: %v", err)
return "", ""
}
splitPatternLogger.Trace("Current working directory: %q", cwd)
if static == "" {
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory")
static = cwd
}
if !filepath.IsAbs(static) {
splitPatternLogger.Debug("Static part is not absolute, joining with current working directory")
static = filepath.Join(cwd, static)
static = filepath.Clean(static)
splitPatternLogger.Trace("Static path after joining and cleaning: %q", static)
}
static = strings.ReplaceAll(static, "\\", "/")
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, pattern)
return static, pattern
}
type FileCommandAssociation struct {
File string
IsolateCommands []ModifyCommand
Commands []ModifyCommand
}
func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[string]FileCommandAssociation, error) {
associateFilesLogger := modifyCommandLogger.WithPrefix("AssociateFilesWithCommands")
associateFilesLogger.Debug("Associating files with commands")
associateFilesLogger.Trace("Input files: %v", files)
associateFilesLogger.Trace("Input commands: %v", commands)
associationCount := 0
fileCommands := make(map[string]FileCommandAssociation)
for _, file := range files {
file = strings.ReplaceAll(file, "\\", "/")
associateFilesLogger.Debug("Processing file: %q", file)
fileCommands[file] = FileCommandAssociation{
File: file,
IsolateCommands: []ModifyCommand{},
Commands: []ModifyCommand{},
}
for _, command := range commands {
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
for _, glob := range command.Files {
glob = strings.ReplaceAll(glob, "\\", "/")
static, pattern := SplitPattern(glob)
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", glob, static, pattern)
// Build absolute path for the current file to compare with static
cwd, err := os.Getwd()
if err != nil {
associateFilesLogger.Warning("Failed to get CWD when matching %q for file %q: %v", glob, file, err)
continue
}
var absFile string
if filepath.IsAbs(file) {
absFile = filepath.Clean(file)
} else {
absFile = filepath.Clean(filepath.Join(cwd, file))
}
absFile = strings.ReplaceAll(absFile, "\\", "/")
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
// Only match if the file is under the static root
if !(strings.HasPrefix(absFile, static+"/") || absFile == static) {
associateFilesLogger.Trace("Skipping glob %q for file %q because file is outside static root %q", glob, file, static)
continue
}
patternFile := strings.TrimPrefix(absFile, static+`/`)
associateFilesLogger.Trace("Pattern-relative path used for match: %q", patternFile)
matches, err := Matches(patternFile, pattern)
if err != nil {
associateFilesLogger.Warning("Failed to match glob %q with file %q: %v", glob, file, err)
continue
}
if matches {
associateFilesLogger.Debug("File %q matches glob %q. Associating with command %q", file, glob, command.Name)
association := fileCommands[file]
if command.Isolate {
associateFilesLogger.Debug("Command %q is an isolate command, adding to isolate list", command.Name)
association.IsolateCommands = append(association.IsolateCommands, command)
} else {
associateFilesLogger.Debug("Command %q is a regular command, adding to regular list", command.Name)
association.Commands = append(association.Commands, command)
}
fileCommands[file] = association
associationCount++
} else {
associateFilesLogger.Trace("File %q did not match glob %q (pattern=%q, rel=%q)", file, glob, pattern, patternFile)
}
}
}
currentFileCommands := fileCommands[file]
associateFilesLogger.Debug("Finished processing file %q. Found %d regular commands and %d isolate commands", file, len(currentFileCommands.Commands), len(currentFileCommands.IsolateCommands))
associateFilesLogger.Trace("Commands for file %q: %v", file, currentFileCommands.Commands)
associateFilesLogger.Trace("Isolate commands for file %q: %v", file, currentFileCommands.IsolateCommands)
}
associateFilesLogger.Info("Completed association. Found %d total associations for %d files and %d commands", associationCount, len(files), len(commands))
return fileCommands, nil
}
func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
aggregateGlobsLogger := modifyCommandLogger.WithPrefix("AggregateGlobs")
aggregateGlobsLogger.Debug("Aggregating glob patterns from commands")
aggregateGlobsLogger.Trace("Input commands for aggregation: %v", commands)
globs := make(map[string]struct{})
for _, command := range commands {
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
for _, glob := range command.Files {
resolvedGlob := strings.Replace(glob, "~", os.Getenv("HOME"), 1)
resolvedGlob = strings.ReplaceAll(resolvedGlob, "\\", "/")
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q)", glob, resolvedGlob)
globs[resolvedGlob] = struct{}{}
}
}
aggregateGlobsLogger.Debug("Finished aggregating globs. Found %d unique glob patterns", len(globs))
aggregateGlobsLogger.Trace("Aggregated unique globs: %v", globs)
return globs
}
func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
expandGlobsLogger := modifyCommandLogger.WithPrefix("ExpandGLobs")
expandGlobsLogger.Debug("Expanding glob patterns to actual files")
expandGlobsLogger.Trace("Input patterns for expansion: %v", patterns)
var files []string
filesMap := make(map[string]bool)
cwd, err := os.Getwd()
if err != nil {
expandGlobsLogger.Error("Failed to get current working directory: %v", err)
return nil, fmt.Errorf("failed to get current working directory: %w", err)
}
expandGlobsLogger.Debug("Current working directory: %q", cwd)
for pattern := range patterns {
expandGlobsLogger.Debug("Processing glob pattern: %q", pattern)
static, pattern := SplitPattern(pattern)
matches, err := doublestar.Glob(os.DirFS(static), pattern)
if err != nil {
expandGlobsLogger.Warning("Error expanding glob %q in %q: %v", pattern, static, err)
continue
}
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
for _, m := range matches {
m = filepath.Join(static, m)
info, err := os.Stat(m)
if err != nil {
expandGlobsLogger.Warning("Error getting file info for %q: %v", m, err)
continue
}
if !info.IsDir() && !filesMap[m] {
expandGlobsLogger.Trace("Adding unique file to list: %q", m)
filesMap[m], files = true, append(files, m)
}
}
}
if len(files) > 0 {
expandGlobsLogger.Debug("Finished expanding globs. Found %d unique files to process", len(files))
expandGlobsLogger.Trace("Unique files to process: %v", files)
} else {
expandGlobsLogger.Warning("No files found after expanding glob patterns.")
}
return files, nil
}
func LoadCommands(args []string) ([]ModifyCommand, error) {
loadCommandsLogger := modifyCommandLogger.WithPrefix("LoadCommands")
loadCommandsLogger.Debug("Loading commands from arguments (cook files or direct patterns)")
loadCommandsLogger.Trace("Input arguments: %v", args)
commands := []ModifyCommand{}
for _, arg := range args {
loadCommandsLogger.Debug("Processing argument for commands: %q", arg)
newCommands, err := LoadCommandsFromCookFiles(arg)
if err != nil {
loadCommandsLogger.Error("Failed to load commands from argument %q: %v", arg, err)
return nil, fmt.Errorf("failed to load commands from cook files: %w", err)
}
loadCommandsLogger.Debug("Successfully loaded %d commands from %q", len(newCommands), arg)
for _, cmd := range newCommands {
if cmd.Disabled {
loadCommandsLogger.Debug("Skipping disabled command: %q", cmd.Name)
continue
}
commands = append(commands, cmd)
loadCommandsLogger.Trace("Added command %q. Current total commands: %d", cmd.Name, len(commands))
}
}
loadCommandsLogger.Info("Finished loading commands. Total %d commands loaded", len(commands))
return commands, nil
}
func LoadCommandsFromCookFiles(pattern string) ([]ModifyCommand, error) {
loadCookFilesLogger := modifyCommandLogger.WithPrefix("LoadCommandsFromCookFiles").WithField("pattern", pattern)
loadCookFilesLogger.Debug("Loading commands from cook files based on pattern")
loadCookFilesLogger.Trace("Input pattern: %q", pattern)
static, pattern := SplitPattern(pattern)
commands := []ModifyCommand{}
cookFiles, err := doublestar.Glob(os.DirFS(static), pattern)
if err != nil {
loadCookFilesLogger.Error("Failed to glob cook files for pattern %q: %v", pattern, err)
return nil, fmt.Errorf("failed to glob cook files: %w", err)
}
loadCookFilesLogger.Debug("Found %d cook files for pattern %q", len(cookFiles), pattern)
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
for _, cookFile := range cookFiles {
cookFile = filepath.Join(static, cookFile)
cookFile = filepath.Clean(cookFile)
cookFile = strings.ReplaceAll(cookFile, "\\", "/")
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
cookFileData, err := os.ReadFile(cookFile)
if err != nil {
loadCookFilesLogger.Error("Failed to read cook file %q: %v", cookFile, err)
return nil, fmt.Errorf("failed to read cook file: %w", err)
}
loadCookFilesLogger.Trace("Read %d bytes from cook file %q", len(cookFileData), cookFile)
newCommands, err := LoadCommandsFromCookFile(cookFileData)
if err != nil {
loadCookFilesLogger.Error("Failed to load commands from cook file data for %q: %v", cookFile, err)
return nil, fmt.Errorf("failed to load commands from cook file: %w", err)
}
commands = append(commands, newCommands...)
loadCookFilesLogger.Debug("Added %d commands from cook file %q. Total commands now: %d", len(newCommands), cookFile, len(commands))
}
loadCookFilesLogger.Debug("Finished loading commands from cook files. Total %d commands", len(commands))
return commands, nil
}
func LoadCommandsFromCookFile(cookFileData []byte) ([]ModifyCommand, error) {
loadCommandLogger := modifyCommandLogger.WithPrefix("LoadCommandsFromCookFile")
loadCommandLogger.Debug("Unmarshaling commands from cook file data")
loadCommandLogger.Trace("Cook file data length: %d", len(cookFileData))
commands := []ModifyCommand{}
err := yaml.Unmarshal(cookFileData, &commands)
if err != nil {
loadCommandLogger.Error("Failed to unmarshal cook file data: %v", err)
return nil, fmt.Errorf("failed to unmarshal cook file: %w", err)
}
loadCommandLogger.Debug("Successfully unmarshaled %d commands", len(commands))
loadCommandLogger.Trace("Unmarshaled commands: %v", commands)
return commands, nil
}
// CountGlobsBeforeDedup counts the total number of glob patterns across all commands before deduplication
func CountGlobsBeforeDedup(commands []ModifyCommand) int {
countGlobsLogger := modifyCommandLogger.WithPrefix("CountGlobsBeforeDedup")
countGlobsLogger.Debug("Counting glob patterns before deduplication")
count := 0
for _, cmd := range commands {
countGlobsLogger.Trace("Processing command %q, adding %d globs", cmd.Name, len(cmd.Files))
count += len(cmd.Files)
}
countGlobsLogger.Debug("Total glob patterns before deduplication: %d", count)
return count
}
func FilterCommands(commands []ModifyCommand, filter string) []ModifyCommand {
filterCommandsLogger := modifyCommandLogger.WithPrefix("FilterCommands").WithField("filter", filter)
filterCommandsLogger.Debug("Filtering commands")
filterCommandsLogger.Trace("Input commands: %v", commands)
filteredCommands := []ModifyCommand{}
filters := strings.Split(filter, ",")
filterCommandsLogger.Trace("Split filters: %v", filters)
for _, cmd := range commands {
filterCommandsLogger.Debug("Checking command %q against filters", cmd.Name)
for _, f := range filters {
if strings.Contains(cmd.Name, f) {
filterCommandsLogger.Debug("Command %q matches filter %q, adding to filtered list", cmd.Name, f)
filteredCommands = append(filteredCommands, cmd)
break // Command matches, no need to check other filters
}
}
}
filterCommandsLogger.Debug("Finished filtering commands. Found %d filtered commands", len(filteredCommands))
filterCommandsLogger.Trace("Filtered commands: %v", filteredCommands)
return filteredCommands
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,79 +0,0 @@
package utils
import (
"fmt"
"sort"
logger "git.site.quack-lab.dev/dave/cylogger"
)
// replaceCommandLogger is a scoped logger for the utils/replacecommand package.
var replaceCommandLogger = logger.Default.WithPrefix("utils/replacecommand")
type ReplaceCommand struct {
From int
To int
With string
}
func ExecuteModifications(modifications []ReplaceCommand, fileData string) (string, int) {
executeModificationsLogger := replaceCommandLogger.WithPrefix("ExecuteModifications")
executeModificationsLogger.Debug("Executing a batch of text modifications")
executeModificationsLogger.Trace("Number of modifications: %d, Original file data length: %d", len(modifications), len(fileData))
var err error
sort.Slice(modifications, func(i, j int) bool {
return modifications[i].From > modifications[j].From
})
executeModificationsLogger.Debug("Modifications sorted in reverse order for safe replacement")
executeModificationsLogger.Trace("Sorted modifications: %v", modifications)
executed := 0
for idx, modification := range modifications {
executeModificationsLogger.Debug("Applying modification %d/%d", idx+1, len(modifications))
executeModificationsLogger.Trace("Current modification details: From=%d, To=%d, With=%q", modification.From, modification.To, modification.With)
fileData, err = modification.Execute(fileData)
if err != nil {
executeModificationsLogger.Error("Failed to execute replacement for modification %+v: %v", modification, err)
continue
}
executed++
executeModificationsLogger.Trace("File data length after modification: %d", len(fileData))
}
executeModificationsLogger.Info("Successfully applied %d text replacements", executed)
return fileData, executed
}
func (m *ReplaceCommand) Execute(fileDataStr string) (string, error) {
executeLogger := replaceCommandLogger.WithPrefix("Execute").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With))
executeLogger.Debug("Attempting to execute single replacement")
err := m.Validate(len(fileDataStr))
if err != nil {
executeLogger.Error("Failed to validate modification: %v", err)
return fileDataStr, fmt.Errorf("failed to validate modification: %v", err)
}
executeLogger.Trace("Applying replacement: fileDataStr[:%d] + %q + fileDataStr[%d:]", m.From, m.With, m.To)
result := fileDataStr[:m.From] + m.With + fileDataStr[m.To:]
executeLogger.Trace("Replacement executed. Result length: %d", len(result))
return result, nil
}
func (m *ReplaceCommand) Validate(maxsize int) error {
validateLogger := replaceCommandLogger.WithPrefix("Validate").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With)).WithField("maxSize", maxsize)
validateLogger.Debug("Validating replacement command against max size")
if m.To < m.From {
validateLogger.Error("Validation failed: 'To' (%d) is less than 'From' (%d)", m.To, m.From)
return fmt.Errorf("command to is less than from: %v", m)
}
if m.From > maxsize || m.To > maxsize {
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is greater than max size (%d)", m.From, m.To, maxsize)
return fmt.Errorf("command from or to is greater than replacement length: %v", m)
}
if m.From < 0 || m.To < 0 {
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is less than 0", m.From, m.To)
return fmt.Errorf("command from or to is less than 0: %v", m)
}
validateLogger.Debug("Modification command validated successfully")
return nil
}

View File

@@ -1,504 +0,0 @@
package utils
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestReplaceCommandExecute(t *testing.T) {
tests := []struct {
name string
input string
command ReplaceCommand
expected string
shouldError bool
}{
{
name: "Simple replacement",
input: "This is a test string",
command: ReplaceCommand{From: 5, To: 7, With: "was"},
expected: "This was a test string",
shouldError: false,
},
{
name: "Replace at beginning",
input: "Hello world",
command: ReplaceCommand{From: 0, To: 5, With: "Hi"},
expected: "Hi world",
shouldError: false,
},
{
name: "Replace at end",
input: "Hello world",
command: ReplaceCommand{From: 6, To: 11, With: "everyone"},
expected: "Hello everyone",
shouldError: false,
},
{
name: "Replace entire string",
input: "Hello world",
command: ReplaceCommand{From: 0, To: 11, With: "Goodbye!"},
expected: "Goodbye!",
shouldError: false,
},
{
name: "Error: From > To",
input: "Test string",
command: ReplaceCommand{From: 7, To: 5, With: "fail"},
expected: "Test string",
shouldError: true,
},
{
name: "Error: From > string length",
input: "Test",
command: ReplaceCommand{From: 10, To: 12, With: "fail"},
expected: "Test",
shouldError: true,
},
{
name: "Error: To > string length",
input: "Test",
command: ReplaceCommand{From: 2, To: 10, With: "fail"},
expected: "Test",
shouldError: true,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
result, err := tc.command.Execute(tc.input)
if tc.shouldError {
if err == nil {
t.Errorf("Expected an error for command %+v but got none", tc.command)
}
} else {
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if result != tc.expected {
t.Errorf("Expected %q, got %q", tc.expected, result)
}
}
})
}
}
func TestExecuteModifications(t *testing.T) {
tests := []struct {
name string
input string
modifications []ReplaceCommand
expected string
expectedCount int
}{
{
name: "Single modification",
input: "Hello world",
modifications: []ReplaceCommand{
{From: 0, To: 5, With: "Hi"},
},
expected: "Hi world",
expectedCount: 1,
},
{
name: "Multiple modifications",
input: "This is a test string",
modifications: []ReplaceCommand{
{From: 0, To: 4, With: "That"},
{From: 8, To: 14, With: "sample"},
},
expected: "That is sample string",
expectedCount: 2,
},
{
name: "Overlapping modifications",
input: "ABCDEF",
modifications: []ReplaceCommand{
{From: 0, To: 3, With: "123"}, // ABC -> 123
{From: 2, To: 5, With: "xyz"}, // CDE -> xyz
},
// The actual behavior with the current implementation
expected: "123yzF",
expectedCount: 2,
},
{
name: "Sequential modifications",
input: "Hello world",
modifications: []ReplaceCommand{
{From: 0, To: 5, With: "Hi"},
{From: 5, To: 6, With: ""}, // Remove the space
{From: 6, To: 11, With: "everyone"},
},
expected: "Hieveryone",
expectedCount: 3,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
// Make a copy of the modifications to avoid modifying the test case
mods := make([]ReplaceCommand, len(tc.modifications))
copy(mods, tc.modifications)
result, count := ExecuteModifications(mods, tc.input)
if count != tc.expectedCount {
t.Errorf("Expected %d modifications, got %d", tc.expectedCount, count)
}
if result != tc.expected {
t.Errorf("Expected %q, got %q", tc.expected, result)
}
})
}
}
func TestReverseOrderExecution(t *testing.T) {
// This test verifies the current behavior of modification application
input := "Original text with multiple sections"
// Modifications in specific positions
modifications := []ReplaceCommand{
{From: 0, To: 8, With: "Modified"}, // Original -> Modified
{From: 9, To: 13, With: "document"}, // text -> document
{From: 14, To: 22, With: "without"}, // with -> without
{From: 23, To: 31, With: "any"}, // multiple -> any
}
// The actual current behavior of our implementation
expected := "Modified document withouttanytions"
result, count := ExecuteModifications(modifications, input)
if count != 4 {
t.Errorf("Expected 4 modifications, got %d", count)
}
if result != expected {
t.Errorf("Expected %q, got %q", expected, result)
}
}
// Replace text in the middle of a string with new content
func TestReplaceCommandExecute_ReplacesTextInMiddle(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: 6,
To: 11,
With: "replaced",
}
fileContent := "Hello world, how are you?"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.NoError(t, err)
assert.Equal(t, "Hello replaced, how are you?", result)
}
// Replace with empty string (deletion)
func TestReplaceCommandExecute_DeletesText(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: 6,
To: 11,
With: "",
}
fileContent := "Hello world, how are you?"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.NoError(t, err)
assert.Equal(t, "Hello , how are you?", result)
}
// Replace with longer string than original segment
func TestReplaceCommandExecute_WithLongerString(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: 6,
To: 11,
With: "longerreplacement",
}
fileContent := "Hello world, how are you?"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.NoError(t, err)
assert.Equal(t, "Hello longerreplacement, how are you?", result)
}
// From and To values are the same (zero-length replacement)
func TestReplaceCommandExecute_ZeroLengthReplacement(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: 5,
To: 5,
With: "inserted",
}
fileContent := "Hello world"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.NoError(t, err)
assert.Equal(t, "Helloinserted world", result)
}
// From value is greater than To value
func TestReplaceCommandExecute_FromGreaterThanTo(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: 10,
To: 5,
With: "replaced",
}
fileContent := "Hello world, how are you?"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.Error(t, err)
assert.Equal(t, "Hello world, how are you?", result)
}
// From or To values exceed string length
func TestReplaceCommandExecute_FromOrToExceedsLength(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: 5,
To: 50, // Exceeds the length of the fileContent
With: "replaced",
}
fileContent := "Hello world"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.Error(t, err)
assert.Equal(t, "Hello world", result)
}
// From or To values are negative
func TestReplaceCommandExecute_NegativeFromOrTo(t *testing.T) {
// Arrange
cmd := &ReplaceCommand{
From: -1,
To: 10,
With: "replaced",
}
fileContent := "Hello world, how are you?"
// Act
result, err := cmd.Execute(fileContent)
// Assert
assert.Error(t, err)
assert.Equal(t, "Hello world, how are you?", result)
}
// Modifications are applied in reverse order (from highest to lowest 'From' value)
func TestExecuteModificationsAppliesInReverseOrder(t *testing.T) {
// Setup test data
fileData := "This is a test string for replacements"
modifications := []ReplaceCommand{
{From: 0, To: 4, With: "That"},
{From: 10, To: 14, With: "sample"},
{From: 26, To: 38, With: "modifications"},
}
// Execute the function
result, executed := ExecuteModifications(modifications, fileData)
// Verify results
expectedResult := "That is a sample string for modifications"
if result != expectedResult {
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
}
if executed != 3 {
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
}
}
// One or more modifications fail but others succeed
func TestExecuteModificationsWithPartialFailures(t *testing.T) {
// Setup test data
fileData := "This is a test string for replacements"
// Create a custom ReplaceCommand implementation that will fail
failingCommand := ReplaceCommand{
From: 15,
To: 10, // Invalid range (To < From) to cause failure
With: "will fail",
}
// Valid commands
validCommand1 := ReplaceCommand{
From: 0,
To: 4,
With: "That",
}
validCommand2 := ReplaceCommand{
From: 26,
To: 38,
With: "modifications",
}
modifications := []ReplaceCommand{failingCommand, validCommand1, validCommand2}
// Execute the function
result, executed := ExecuteModifications(modifications, fileData)
// Verify results
expectedResult := "That is a test string for modifications"
if result != expectedResult {
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
}
// Only 2 out of 3 modifications should succeed
if executed != 2 {
t.Errorf("Expected 2 modifications to be executed successfully, but got %d", executed)
}
}
// All valid modifications are executed and the modified string is returned
func TestExecuteModificationsAllValid(t *testing.T) {
// Setup test data
fileData := "Hello world, this is a test"
modifications := []ReplaceCommand{
{From: 0, To: 5, With: "Hi"},
{From: 18, To: 20, With: "was"},
{From: 21, To: 27, With: "an example"},
}
// Execute the function
result, executed := ExecuteModifications(modifications, fileData)
// Verify results
expectedResult := "Hi world, this was an example"
if result != expectedResult {
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
}
if executed != 3 {
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
}
}
// The count of successfully executed modifications is returned
func TestExecuteModificationsReturnsCorrectCount(t *testing.T) {
// Setup test data
fileData := "Initial text for testing"
modifications := []ReplaceCommand{
{From: 0, To: 7, With: "Final"},
{From: 12, To: 16, With: "example"},
{From: 17, To: 24, With: "process"},
}
// Execute the function
_, executed := ExecuteModifications(modifications, fileData)
// Verify the count of executed modifications
expectedExecuted := 3
if executed != expectedExecuted {
t.Errorf("Expected %d modifications to be executed, but got %d", expectedExecuted, executed)
}
}
// Empty modifications list returns the original string with zero executed count
func TestExecuteModificationsWithEmptyList(t *testing.T) {
// Setup test data
fileData := "This is a test string for replacements"
modifications := []ReplaceCommand{}
// Execute the function
result, executed := ExecuteModifications(modifications, fileData)
// Verify results
if result != fileData {
t.Errorf("Expected result to be %q, but got %q", fileData, result)
}
if executed != 0 {
t.Errorf("Expected 0 modifications to be executed, but got %d", executed)
}
}
// Modifications with identical 'From' values
func TestExecuteModificationsWithIdenticalFromValues(t *testing.T) {
// Setup test data
fileData := "This is a test string for replacements"
modifications := []ReplaceCommand{
{From: 10, To: 14, With: "sample"},
{From: 10, To: 14, With: "example"},
{From: 26, To: 38, With: "modifications"},
}
// Execute the function
result, executed := ExecuteModifications(modifications, fileData)
// Verify results
// Yes, it's mangled, yes, it's intentional
// Every subsequent command works with the modified contents of the previous command
// So by the time we get to "example" the indices have already eaten into "sample"... In fact they have eaten into "samp", "le" is left
// So we prepend "example" and end up with "examplele"
// Whether sample or example goes first here is irrelevant to us
// But it just so happens that sample goes first, so we end up with "examplele"
expectedResult := "This is a examplele string for modifications"
if result != expectedResult {
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
}
if executed != 3 {
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
}
}
// Modifications that would affect each other if not sorted properly
func TestExecuteModificationsHandlesOverlappingRanges(t *testing.T) {
// Setup test data
fileData := "The quick brown fox jumps over the lazy dog"
modifications := []ReplaceCommand{
{From: 4, To: 9, With: "slow"},
{From: 10, To: 15, With: "red"},
{From: 16, To: 19, With: "cat"},
}
// Execute the function
result, executed := ExecuteModifications(modifications, fileData)
// Verify results
expectedResult := "The slow red cat jumps over the lazy dog"
if result != expectedResult {
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
}
if executed != 3 {
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
}
}