Compare commits
89 Commits
v2.7.0
...
fd1df6e40e
Author | SHA1 | Date | |
---|---|---|---|
fd1df6e40e | |||
1a8c0b9f90 | |||
bff7cc2a27 | |||
ff30b00e71 | |||
e1eb5eeaa6 | |||
2a2e11d8e0 | |||
6eb4f31127 | |||
4b58e00c26 | |||
8ffd8af13c | |||
67861d4455 | |||
299e6d8bfe | |||
388822e90a | |||
91993b4548 | |||
bb69558aaa | |||
052c670627 | |||
67fd215d0e | |||
9ecbbff6fa | |||
774ac0f0ca | |||
b785d24a08 | |||
22f991e72e | |||
5518b27663 | |||
0b899dea2c | |||
3424fea8ad | |||
ddc1d83d58 | |||
4b0a85411d | |||
46e871b626 | |||
258dcc88e7 | |||
75bf449bed | |||
58586395fb | |||
c5a68af5e6 | |||
b4c0284734 | |||
c5d1dad8de | |||
4ff2ee80ee | |||
633eebfd2a | |||
5a31703840 | |||
162d0c758d | |||
14d64495b6 | |||
fe6e97e832 | |||
35b3d8b099 | |||
2e3e958e15 | |||
955afc4295 | |||
2c487bc443 | |||
b77224176b | |||
a2201053c5 | |||
04cedf5ece | |||
ebb07854cc | |||
8a86ae2f40 | |||
e8f16dda2b | |||
513773f641 | |||
22914fe243 | |||
2d523dfe64 | |||
2629722f67 | |||
1f6c4e4976 | |||
bfd08e754e | |||
750010b71a | |||
9064a53820 | |||
294c04a11a | |||
ba7ac07001 | |||
5d10178bf9 | |||
f91c2b4795 | |||
057db23d09 | |||
bf72734b90 | |||
cc30c2bdcb | |||
f453079c72 | |||
e634fe28bd | |||
4e4b7bbd19 | |||
89eed3f847 | |||
f008efd5e1 | |||
f6def1e5a5 | |||
867b188718 | |||
aac29a4074 | |||
8a40f463f7 | |||
8d4db1da91 | |||
d41e2afe17 | |||
76457d22cf | |||
912950d463 | |||
25326ea11b | |||
df212b7fcc | |||
f4a963760a | |||
d236811cb9 | |||
da93770334 | |||
d9f54a8354 | |||
dc8da8ab63 | |||
24262a7dca | |||
d77b13c363 | |||
a9c60a3698 | |||
66bcf21d79 | |||
e847e5c3ce | |||
9a70c9696e |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1 +1,4 @@
|
||||
*.exe
|
||||
.qodo
|
||||
*.sqlite
|
||||
testfiles
|
||||
|
90
.vscode/launch.json
vendored
90
.vscode/launch.json
vendored
@@ -5,16 +5,98 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch Package",
|
||||
"name": "Launch Package (Barotrauma)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma",
|
||||
"args": [
|
||||
"LightComponent!anyrange=\"(!num)\"",
|
||||
"*4",
|
||||
"**/*.xml"
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"-cook",
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Payday 2)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Payday2",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Barotrauma cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Barotrauma",
|
||||
"args": [
|
||||
"-loglevel",
|
||||
"trace",
|
||||
"-cook",
|
||||
"cookassistant.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Quasimorph cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Quasimorph",
|
||||
"args": [
|
||||
"cook.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Rimworld cookfile)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Rimworld/294100",
|
||||
"args": [
|
||||
"cookVehicles.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Workspace)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"args": [
|
||||
"tester.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Avorion)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Avorion/Avorion",
|
||||
"args": [
|
||||
"*.yml",
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Package (Minecraft)",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}",
|
||||
"cwd": "C:/Users/Administrator/Seafile/Games-Minecraft",
|
||||
"args": [
|
||||
"cook_tacz.yml",
|
||||
]
|
||||
}
|
||||
]
|
||||
|
@@ -1,651 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Talents>
|
||||
<Talent identifier="powerarmor">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.powerarmor">
|
||||
<Replace tag="[bonusmovement]" value="25" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.exosuit" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionHasItem tags="deepdivinglarge" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyStat stattype="MovementSpeed" value="0.25" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AddedRecipe itemidentifier="exosuit"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="foolhardy">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="4,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.lowhealthstatboost">
|
||||
<Replace tag="[health]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0.5"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="foolhardy" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="berserker">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.lowhealthstatboost">
|
||||
<Replace tag="[health]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.meleedamagebonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionAboveVitality invert="true" vitalitypercentage="0.5"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true" multiplyafflictionsbymaxvitality="true">
|
||||
<Affliction identifier="berserker" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="mudraptorwrestler">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.mudraptorwrestler">
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattypeself">
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.physicalresistance" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionAttackData weapontype="NoWeapon,Melee" />
|
||||
<AbilityConditionCharacter>
|
||||
<Conditional group="eq mudraptor" />
|
||||
</AbilityConditionCharacter>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveResistance resistanceid="damage" multiplier="0.9"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="heavylifting">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="1,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.heavylifting">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionHoldingItem tags="alienartifact,crate"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyStat stattype="MovementSpeed" value="0.2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="iamthatguy">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.iamthatguy">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.skillbonus">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[skillname]" value="stattypenames.weaponsskillbonus" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.heavywrench" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="WeaponsSkillBonus" value="20"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAddDamageAffliction">
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAffliction afflictionidentifiers="blunttrauma" addedmultiplier="0.2" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="heavywrench"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="robotics">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.robotics"/>
|
||||
<Description tag="talentdescription.roboticsreminder">
|
||||
<Replace tag="[amount]" value="2" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.defensebotspawner,entityname.defensebotammobox" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="defensebotspawner"/>
|
||||
<AddedRecipe itemidentifier="defensebotammobox"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="ironstorm">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="7,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.ironstorm">
|
||||
<Replace tag="[chance]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.scrapcannon" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilitySetMetadataInt identifier="tiermodifieroverride" value="3"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="scrapcannon"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="residualwaste">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.residualwaste">
|
||||
<Replace tag="[chance]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
|
||||
<Conditions>
|
||||
<AbilityConditionServerRandom randomChance="0.2"/>
|
||||
<!-- don't allow duplicating genetic materials, and prevent infinite FPGA circuits -->
|
||||
<AbilityConditionItem tags="geneticmaterial,unidentifiedgeneticmaterial,circuitboxcomponent,lightcomponent" invert="true"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="massproduction">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.massproduction">
|
||||
<Replace tag="[chance]" value="40" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemFabricatedIngredients">
|
||||
<Conditions>
|
||||
<AbilityConditionServerRandom randomChance="0.4" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityRemoveRandomIngredient>
|
||||
<AbilityConditionItem category="Material"/>
|
||||
</CharacterAbilityRemoveRandomIngredient>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="toolmaintenance">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="5,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.toolmaintenance">
|
||||
<Replace tag="[amount]" value="1" color="gui.green"/>
|
||||
</Description>
|
||||
<!-- Give once when unlocking the talent -->
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="tool~toolmaintenance" stattype="IncreaseFabricationQuality" value="1" targetallies="true" setvalue="true"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<!-- Give every 60 seconds for late comers -->
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="tool~toolmaintenance" stattype="IncreaseFabricationQuality" value="1" targetallies="true" setvalue="true"/>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="miner">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="2,3" sheetelementsize="428,428"/>
|
||||
<Description tag="talentdescription.miner">
|
||||
<Replace tag="[probability]" value="320" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.gainoredetachspeed">
|
||||
<Replace tag="[amount]" value="1600" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="RepairToolDeattachTimeMultiplier" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
|
||||
<Conditions>
|
||||
<AbilityConditionServerRandom randomchance="12.8"/>
|
||||
<AbilityConditionItem tags="ore"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="retrofit">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="3,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.retrofit" />
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilitySetMetadataInt identifier="tiermodifiers.increasewallhealth" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="ironman">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.ironhelmet,entityname.makeshiftarmor" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="ironhelmet"/>
|
||||
<AddedRecipe itemidentifier="makeshiftarmor"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="oiledmachinery">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="4,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.oiledmachinery">
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="fabricator" stattype="FabricationSpeed" value="1.5" />
|
||||
<CharacterAbilityGiveItemStatToTags tags="deconstructor" stattype="DeconstructorSpeed" value="1.5" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="fabricator" stattype="FabricationSpeed" value="1.5" />
|
||||
<CharacterAbilityGiveItemStatToTags tags="deconstructor" stattype="DeconstructorSpeed" value="1.5" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="pumpndump">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="1,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.pumpndump">
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.maxflow" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<conditions>
|
||||
<AbilityConditionItem tags="pump"/>
|
||||
</conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStat stattype="PumpSpeed" value="1.1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="ballastdenizen">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="7,6" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.ballastdenizen">
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="HoldBreathMultiplier" value="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="engineengineer">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="2,5" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.engineengineer">
|
||||
<Replace tag="[amount]" value="2.5" color="gui.green"/>
|
||||
<Replace tag="[max]" value="5" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.maxspeed" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.doesnotstack" />
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="1" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.025" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="2" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.05" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="3" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.075" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="4" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.1" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="5" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.125" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="6" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.15" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel levelequals="7" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.175" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
<AbilityGroupInterval interval="60">
|
||||
<Conditions>
|
||||
<AbilityConditionHasLevel minlevel="8" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveItemStatToTags tags="engine" stattype="EngineMaxSpeed" stackable="false" value="1.2" />
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="multifunctional">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="6,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.multifunctional">
|
||||
<Replace tag="[powerincrease]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionAttackData tags="wrenchitem"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnAttack">
|
||||
<Conditions>
|
||||
<AbilityConditionAttackData tags="crowbaritem"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyAttackData addeddamagemultiplier="0.5"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="salvagecrew">
|
||||
<Icon texture="Content/UI/TalentsIcons3.png" sheetindex="0,7" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.bonusxponmission">
|
||||
<Replace tag="[xpbonus]" value="30" color="gui.green"/>
|
||||
<Replace tag="[missiontype]" value="missiontype.salvage" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.salvagecrew">
|
||||
<Replace tag="[swimbonus]" value="50" color="gui.green"/>
|
||||
<Replace tag="[resistanceamount]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnGainMissionExperience">
|
||||
<Conditions>
|
||||
<AbilityConditionMission missiontype="Salvage"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="1.3"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupInterval interval="0.9">
|
||||
<Conditions>
|
||||
<AbilityConditionInSubmarine submarinetype="Wreck" />
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="This" disabledeltatime="true">
|
||||
<Affliction identifier="salvagecrew" amount="1.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupInterval>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="machinemaniac" trackedstat="machinemaniac_counter" trackedmax="100">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="3,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.machinemaniac">
|
||||
<Replace tag="[bonus]" value="80" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="3" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.machinemaniac.30">
|
||||
<Replace tag="[requirement]" value="12" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="10" color="gui.green"/>
|
||||
<Replace tag="[skill]" value="stattypenames.mechanicalskillbonus" color="gui.orange"/>
|
||||
<Replace tag="[xpamount]" value="500" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.machinemaniac.50">
|
||||
<Replace tag="[requirement]" value="20" color="gui.green"/>
|
||||
<Replace tag="[level]" value="1" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.machinemaniac.100">
|
||||
<Replace tag="[requirement]" value="40" color="gui.green"/>
|
||||
<Replace tag="[amount]" value="50" color="gui.green"/>
|
||||
</Description>
|
||||
|
||||
<!-- Give the player stats that tracks if the rewards should be given -->
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_30" value="1" maxvalue="1" setvalue="true" />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_50" value="1" maxvalue="1" setvalue="true" />
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_100" value="1" maxvalue="1" setvalue="true" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="machinemaniac_counter" value="1" removeondeath="false" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_30" min="1"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="12"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveExperience amount="2000"/>
|
||||
<CharacterAbilityGivePermanentStat stattype="MechanicalSkillBonus" statidentifier="machinemaniac" value="10" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_30" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_50" min="1"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="20"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityUpgradeSubmarine upgradeprefab="increasemaxpumpflow" upgradecategory="pumps" level="1" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_50" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_100" min="1"/>
|
||||
<AbilityConditionHasPermanentStat statidentifier="machinemaniac_counter" min="40"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat stattype="MechanicalRepairSpeed" statidentifier="machinemaniac" value="0.5" setvalue="true" removeondeath="false" />
|
||||
<CharacterAbilityResetPermanentStat statidentifier="machinemaniac_100" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="tinkerer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.increasemaxrepairmechanical">
|
||||
<Replace tag="[percentage]" value="40" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MaxRepairConditionMultiplierMechanical" value="0.4"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="modularrepairs">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,1" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.repairpack" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.freeupgrade">
|
||||
<Replace tag="[level]" value="1" color="gui.green"/>
|
||||
<Replace tag="[upgradename]" value="upgradename.decreaselowskillfixduration" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="repairpack"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityUpgradeSubmarine upgradeprefab="decreaselowskillfixduration" upgradecategory="electricaldevices" level="1" />
|
||||
<CharacterAbilityUpgradeSubmarine upgradeprefab="decreaselowskillfixduration" upgradecategory="mechanicaldevices" level="1" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="hullfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="0,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.fixfoamgrenade,entityname.handheldstatusmonitor" color="gui.orange"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.additionalstattype">
|
||||
<Replace tag="[amount]" value="25" color="gui.green"/>
|
||||
<Replace tag="[stattype]" value="stattypenames.repairtoolstructurerepairmultiplier" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="RepairToolStructureRepairMultiplier" value="0.25"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="fixfoamgrenade"/>
|
||||
<AddedRecipe itemidentifier="handheldstatusmonitor"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="letitdrain">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="1,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.letitdrain"/>
|
||||
<Description tag="talentdescription.letitdrainreminder">
|
||||
<Replace tag="[itemcount]" value="2" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.portablepump" color="gui.orange"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGivePermanentStat statidentifier="portablepump" stattype="MaxAttachableCount" value="2" />
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AddedRecipe itemidentifier="portablepump"/>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="quickfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.quickfixer">
|
||||
<Replace tag="[amount]" value="20" color="gui.green"/>
|
||||
<Replace tag="[duration]" value="10" color="gui.green"/>
|
||||
</Description>
|
||||
<Description tag="talentdescription.repairmechanicaldevicestwiceasfast"/>
|
||||
<AbilityGroupEffect abilityeffecttype="None">
|
||||
<Abilities>
|
||||
<CharacterAbilityGiveStat stattype="MechanicalRepairSpeed" value="1"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnRepairComplete">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="fabricator,door,engine,oxygengenerator,pump,turretammosource,deconstructor,medicalfabricator,ductblock"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityApplyStatusEffects>
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="Character" disabledeltatime="true">
|
||||
<Affliction identifier="quickfixer" amount="10.0"/>
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilityApplyStatusEffects>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="scrapsavant">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="6,3" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.doublescrapoutput" />
|
||||
<Description tag="talentdescription.findadditionalscrap">
|
||||
<Replace tag="[probability]" value="20" color="gui.green"/>
|
||||
</Description>
|
||||
<AbilityGroupEffect abilityeffecttype="OnItemDeconstructedMaterial">
|
||||
<Conditions>
|
||||
<AbilityConditionItem tags="scrap"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilityModifyValue multiplyvalue="2"/>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
<AbilityGroupEffect abilityeffecttype="OnOpenItemContainer">
|
||||
<Conditions>
|
||||
<AbilityConditionItemInSubmarine submarinetype="Wreck"/>
|
||||
<AbilityConditionItem tags="container"/>
|
||||
</Conditions>
|
||||
<Abilities>
|
||||
<CharacterAbilitySpawnItemsToContainer randomchance="0.2" oncepercontainer="true">
|
||||
<StatusEffects>
|
||||
<StatusEffect type="OnAbility" target="UseTarget" >
|
||||
<SpawnItem identifiers="scrap" spawnposition="ThisInventory" spawnifcantbecontained="false" />
|
||||
</StatusEffect>
|
||||
</StatusEffects>
|
||||
</CharacterAbilitySpawnItemsToContainer>
|
||||
</Abilities>
|
||||
</AbilityGroupEffect>
|
||||
</Talent>
|
||||
|
||||
<Talent identifier="safetyfirst">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="4,2" sheetelementsize="128,128"/>
|
||||
<Description tag="talentdescription.unlockrecipe">
|
||||
<Replace tag="[itemname]" value="entityname.safetyharness" color="gui.orange"/>
|
||||
</Description>
|
||||
<AddedRecipe itemidentifier="safetyharness"/>
|
||||
</Talent>
|
||||
|
||||
</Talents>
|
28
cmd/log_format_test/main.go
Normal file
28
cmd/log_format_test/main.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Initialize logger with DEBUG level
|
||||
logger.Init(logger.LevelDebug)
|
||||
|
||||
// Test different log levels
|
||||
logger.Info("This is an info message")
|
||||
logger.Debug("This is a debug message")
|
||||
logger.Warning("This is a warning message")
|
||||
logger.Error("This is an error message")
|
||||
logger.Trace("This is a trace message (not visible at DEBUG level)")
|
||||
|
||||
// Test with a goroutine
|
||||
logger.SafeGo(func() {
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
logger.Info("Message from goroutine")
|
||||
})
|
||||
|
||||
// Wait for goroutine to complete
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
}
|
10
glob_test.go
10
glob_test.go
@@ -1,6 +1,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
@@ -76,9 +77,14 @@ func TestGlobExpansion(t *testing.T) {
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
files, err := expandFilePatterns(tc.patterns)
|
||||
// Convert string patterns to map[string]struct{} for ExpandGLobs
|
||||
patternMap := make(map[string]struct{})
|
||||
for _, pattern := range tc.patterns {
|
||||
patternMap[pattern] = struct{}{}
|
||||
}
|
||||
files, err := utils.ExpandGLobs(patternMap)
|
||||
if err != nil {
|
||||
t.Fatalf("expandFilePatterns failed: %v", err)
|
||||
t.Fatalf("ExpandGLobs failed: %v", err)
|
||||
}
|
||||
|
||||
if len(files) != tc.expected {
|
||||
|
55
go.mod
55
go.mod
@@ -1,39 +1,36 @@
|
||||
module modify
|
||||
module cook
|
||||
|
||||
go 1.24.1
|
||||
go 1.23.2
|
||||
|
||||
require (
|
||||
github.com/PaesslerAG/jsonpath v0.1.1
|
||||
github.com/antchfx/xmlquery v1.4.4
|
||||
git.site.quack-lab.dev/dave/cylogger v1.3.0
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/yuin/gopher-lua v1.1.1
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gorm.io/gorm v1.30.0
|
||||
)
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.5 // indirect
|
||||
github.com/cloudflare/circl v1.6.0 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.6.2 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
golang.org/x/crypto v0.35.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/PaesslerAG/gval v1.0.0 // indirect
|
||||
github.com/antchfx/xpath v1.3.3 // indirect
|
||||
github.com/go-git/go-git/v5 v5.14.0
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
golang.org/x/net v0.35.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/google/go-cmp v0.6.0 // indirect
|
||||
github.com/hexops/valast v1.5.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
golang.org/x/mod v0.21.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/text v0.22.0 // indirect
|
||||
golang.org/x/tools v0.26.0 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
|
||||
mvdan.cc/gofumpt v0.4.0 // indirect
|
||||
)
|
||||
|
||||
require gorm.io/driver/sqlite v1.6.0
|
||||
|
193
go.sum
193
go.sum
@@ -1,177 +1,68 @@
|
||||
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
|
||||
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/PaesslerAG/gval v1.0.0 h1:GEKnRwkWDdf9dOmKcNrar9EA1bz1z9DqPIO1+iLzhd8=
|
||||
github.com/PaesslerAG/gval v1.0.0/go.mod h1:y/nm5yEyTeX6av0OfKJNp9rBNj2XrGhAf5+v24IBN1I=
|
||||
github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8=
|
||||
github.com/PaesslerAG/jsonpath v0.1.1 h1:c1/AToHQMVsduPAa4Vh6xp2U0evy4t8SWp8imEsylIk=
|
||||
github.com/PaesslerAG/jsonpath v0.1.1/go.mod h1:lVboNxFGal/VwW6d9JzIy56bUsYAP6tH/x80vjnCseY=
|
||||
github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4=
|
||||
github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||
github.com/antchfx/xmlquery v1.4.4 h1:mxMEkdYP3pjKSftxss4nUHfjBhnMk4imGoR96FRY2dg=
|
||||
github.com/antchfx/xmlquery v1.4.4/go.mod h1:AEPEEPYE9GnA2mj5Ur2L5Q5/2PycJ0N9Fusrx9b12fc=
|
||||
github.com/antchfx/xpath v1.3.3 h1:tmuPQa1Uye0Ym1Zn65vxPgfltWb/Lxu2jeqIGteJSRs=
|
||||
github.com/antchfx/xpath v1.3.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
|
||||
git.site.quack-lab.dev/dave/cylogger v1.3.0 h1:eTWPUD+ThVi8kGIsRcE0XDeoH3yFb5miFEODyKUdWJw=
|
||||
git.site.quack-lab.dev/dave/cylogger v1.3.0/go.mod h1:wctgZplMvroA4X6p8f4B/LaCKtiBcT1Pp+L14kcS8jk=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk=
|
||||
github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
|
||||
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
|
||||
github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU=
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
|
||||
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
|
||||
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
|
||||
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
|
||||
github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60=
|
||||
github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
|
||||
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
|
||||
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
||||
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/hexops/autogold v0.8.1 h1:wvyd/bAJ+Dy+DcE09BoLk6r4Fa5R5W+O+GUzmR985WM=
|
||||
github.com/hexops/autogold v0.8.1/go.mod h1:97HLDXyG23akzAoRYJh/2OBs3kd80eHyKPvZw0S5ZBY=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/hexops/valast v1.5.0 h1:FBTuvVi0wjTngtXJRZXMbkN/Dn6DgsUsBwch2DUJU8Y=
|
||||
github.com/hexops/valast v1.5.0/go.mod h1:Jcy1pNH7LNraVaAZDLyv21hHg2WBv9Nf9FL6fGxU7o4=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k=
|
||||
github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY=
|
||||
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
|
||||
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
||||
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs=
|
||||
golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8=
|
||||
golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU=
|
||||
golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
|
||||
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
|
||||
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ=
|
||||
golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
|
||||
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
|
||||
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
|
||||
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
|
||||
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
|
||||
mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM=
|
||||
mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ=
|
||||
|
357
logger/logger.go
357
logger/logger.go
@@ -1,357 +0,0 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// LogLevel defines the severity of log messages
|
||||
type LogLevel int
|
||||
|
||||
const (
|
||||
// LevelError is for critical errors that should always be displayed
|
||||
LevelError LogLevel = iota
|
||||
// LevelWarning is for important warnings
|
||||
LevelWarning
|
||||
// LevelInfo is for informational messages
|
||||
LevelInfo
|
||||
// LevelDebug is for detailed debugging information
|
||||
LevelDebug
|
||||
// LevelTrace is for very detailed tracing information
|
||||
LevelTrace
|
||||
)
|
||||
|
||||
var levelNames = map[LogLevel]string{
|
||||
LevelError: "ERROR",
|
||||
LevelWarning: "WARNING",
|
||||
LevelInfo: "INFO",
|
||||
LevelDebug: "DEBUG",
|
||||
LevelTrace: "TRACE",
|
||||
}
|
||||
|
||||
var levelColors = map[LogLevel]string{
|
||||
LevelError: "\033[1;31m", // Bold Red
|
||||
LevelWarning: "\033[1;33m", // Bold Yellow
|
||||
LevelInfo: "\033[1;32m", // Bold Green
|
||||
LevelDebug: "\033[1;36m", // Bold Cyan
|
||||
LevelTrace: "\033[1;35m", // Bold Magenta
|
||||
}
|
||||
|
||||
// ResetColor is the ANSI code to reset text color
|
||||
const ResetColor = "\033[0m"
|
||||
|
||||
// Logger is our custom logger with level support
|
||||
type Logger struct {
|
||||
mu sync.Mutex
|
||||
out io.Writer
|
||||
currentLevel LogLevel
|
||||
prefix string
|
||||
flag int
|
||||
useColors bool
|
||||
callerOffset int
|
||||
defaultFields map[string]interface{}
|
||||
}
|
||||
|
||||
var (
|
||||
// DefaultLogger is the global logger instance
|
||||
DefaultLogger *Logger
|
||||
// defaultLogLevel is the default log level if not specified
|
||||
defaultLogLevel = LevelInfo
|
||||
// Global mutex for DefaultLogger initialization
|
||||
initMutex sync.Mutex
|
||||
)
|
||||
|
||||
// ParseLevel converts a string log level to LogLevel
|
||||
func ParseLevel(levelStr string) LogLevel {
|
||||
switch strings.ToUpper(levelStr) {
|
||||
case "ERROR":
|
||||
return LevelError
|
||||
case "WARNING", "WARN":
|
||||
return LevelWarning
|
||||
case "INFO":
|
||||
return LevelInfo
|
||||
case "DEBUG":
|
||||
return LevelDebug
|
||||
case "TRACE":
|
||||
return LevelTrace
|
||||
default:
|
||||
return defaultLogLevel
|
||||
}
|
||||
}
|
||||
|
||||
// String returns the string representation of the log level
|
||||
func (l LogLevel) String() string {
|
||||
if name, ok := levelNames[l]; ok {
|
||||
return name
|
||||
}
|
||||
return fmt.Sprintf("Level(%d)", l)
|
||||
}
|
||||
|
||||
// New creates a new Logger instance
|
||||
func New(out io.Writer, prefix string, flag int) *Logger {
|
||||
return &Logger{
|
||||
out: out,
|
||||
currentLevel: defaultLogLevel,
|
||||
prefix: prefix,
|
||||
flag: flag,
|
||||
useColors: true,
|
||||
callerOffset: 0,
|
||||
defaultFields: make(map[string]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Init initializes the DefaultLogger
|
||||
func Init(level LogLevel) {
|
||||
initMutex.Lock()
|
||||
defer initMutex.Unlock()
|
||||
|
||||
if DefaultLogger == nil {
|
||||
DefaultLogger = New(os.Stdout, "", log.Lmicroseconds|log.Lshortfile)
|
||||
}
|
||||
DefaultLogger.SetLevel(level)
|
||||
}
|
||||
|
||||
// SetLevel sets the current log level
|
||||
func (l *Logger) SetLevel(level LogLevel) {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.currentLevel = level
|
||||
}
|
||||
|
||||
// GetLevel returns the current log level
|
||||
func (l *Logger) GetLevel() LogLevel {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
return l.currentLevel
|
||||
}
|
||||
|
||||
// SetCallerOffset sets the caller offset for correct file and line reporting
|
||||
func (l *Logger) SetCallerOffset(offset int) {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.callerOffset = offset
|
||||
}
|
||||
|
||||
// WithField adds a field to the logger's context
|
||||
func (l *Logger) WithField(key string, value interface{}) *Logger {
|
||||
newLogger := &Logger{
|
||||
out: l.out,
|
||||
currentLevel: l.currentLevel,
|
||||
prefix: l.prefix,
|
||||
flag: l.flag,
|
||||
useColors: l.useColors,
|
||||
callerOffset: l.callerOffset,
|
||||
defaultFields: make(map[string]interface{}),
|
||||
}
|
||||
|
||||
// Copy existing fields
|
||||
for k, v := range l.defaultFields {
|
||||
newLogger.defaultFields[k] = v
|
||||
}
|
||||
|
||||
// Add new field
|
||||
newLogger.defaultFields[key] = value
|
||||
return newLogger
|
||||
}
|
||||
|
||||
// WithFields adds multiple fields to the logger's context
|
||||
func (l *Logger) WithFields(fields map[string]interface{}) *Logger {
|
||||
newLogger := &Logger{
|
||||
out: l.out,
|
||||
currentLevel: l.currentLevel,
|
||||
prefix: l.prefix,
|
||||
flag: l.flag,
|
||||
useColors: l.useColors,
|
||||
callerOffset: l.callerOffset,
|
||||
defaultFields: make(map[string]interface{}),
|
||||
}
|
||||
|
||||
// Copy existing fields
|
||||
for k, v := range l.defaultFields {
|
||||
newLogger.defaultFields[k] = v
|
||||
}
|
||||
|
||||
// Add new fields
|
||||
for k, v := range fields {
|
||||
newLogger.defaultFields[k] = v
|
||||
}
|
||||
return newLogger
|
||||
}
|
||||
|
||||
// formatMessage formats a log message with level, time, file, and line information
|
||||
func (l *Logger) formatMessage(level LogLevel, format string, args ...interface{}) string {
|
||||
var msg string
|
||||
if len(args) > 0 {
|
||||
msg = fmt.Sprintf(format, args...)
|
||||
} else {
|
||||
msg = format
|
||||
}
|
||||
|
||||
// Format default fields if any
|
||||
var fields string
|
||||
if len(l.defaultFields) > 0 {
|
||||
var pairs []string
|
||||
for k, v := range l.defaultFields {
|
||||
pairs = append(pairs, fmt.Sprintf("%s=%v", k, v))
|
||||
}
|
||||
fields = " " + strings.Join(pairs, " ")
|
||||
}
|
||||
|
||||
var levelColor, resetColor string
|
||||
if l.useColors {
|
||||
levelColor = levelColors[level]
|
||||
resetColor = ResetColor
|
||||
}
|
||||
|
||||
var caller string
|
||||
if l.flag&log.Lshortfile != 0 || l.flag&log.Llongfile != 0 {
|
||||
_, file, line, ok := runtime.Caller(3 + l.callerOffset)
|
||||
if !ok {
|
||||
file = "???"
|
||||
line = 0
|
||||
}
|
||||
|
||||
if l.flag&log.Lshortfile != 0 {
|
||||
file = filepath.Base(file)
|
||||
}
|
||||
caller = fmt.Sprintf("%s:%d ", file, line)
|
||||
}
|
||||
|
||||
var timeStr string
|
||||
if l.flag&(log.Ldate|log.Ltime|log.Lmicroseconds) != 0 {
|
||||
t := time.Now()
|
||||
if l.flag&log.Ldate != 0 {
|
||||
timeStr += fmt.Sprintf("%04d/%02d/%02d ", t.Year(), t.Month(), t.Day())
|
||||
}
|
||||
if l.flag&(log.Ltime|log.Lmicroseconds) != 0 {
|
||||
timeStr += fmt.Sprintf("%02d:%02d:%02d", t.Hour(), t.Minute(), t.Second())
|
||||
if l.flag&log.Lmicroseconds != 0 {
|
||||
timeStr += fmt.Sprintf(".%06d", t.Nanosecond()/1000)
|
||||
}
|
||||
timeStr += " "
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s%s%s%s[%s%s%s]%s %s\n",
|
||||
l.prefix, timeStr, caller, levelColor, levelNames[level], resetColor, fields, resetColor, msg)
|
||||
}
|
||||
|
||||
// log logs a message at the specified level
|
||||
func (l *Logger) log(level LogLevel, format string, args ...interface{}) {
|
||||
if level > l.currentLevel {
|
||||
return
|
||||
}
|
||||
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
|
||||
msg := l.formatMessage(level, format, args...)
|
||||
fmt.Fprint(l.out, msg)
|
||||
}
|
||||
|
||||
// Error logs an error message
|
||||
func (l *Logger) Error(format string, args ...interface{}) {
|
||||
l.log(LevelError, format, args...)
|
||||
}
|
||||
|
||||
// Warning logs a warning message
|
||||
func (l *Logger) Warning(format string, args ...interface{}) {
|
||||
l.log(LevelWarning, format, args...)
|
||||
}
|
||||
|
||||
// Info logs an informational message
|
||||
func (l *Logger) Info(format string, args ...interface{}) {
|
||||
l.log(LevelInfo, format, args...)
|
||||
}
|
||||
|
||||
// Debug logs a debug message
|
||||
func (l *Logger) Debug(format string, args ...interface{}) {
|
||||
l.log(LevelDebug, format, args...)
|
||||
}
|
||||
|
||||
// Trace logs a trace message
|
||||
func (l *Logger) Trace(format string, args ...interface{}) {
|
||||
l.log(LevelTrace, format, args...)
|
||||
}
|
||||
|
||||
// Global log functions that use DefaultLogger
|
||||
|
||||
// Error logs an error message using the default logger
|
||||
func Error(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Error(format, args...)
|
||||
}
|
||||
|
||||
// Warning logs a warning message using the default logger
|
||||
func Warning(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Warning(format, args...)
|
||||
}
|
||||
|
||||
// Info logs an informational message using the default logger
|
||||
func Info(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Info(format, args...)
|
||||
}
|
||||
|
||||
// Debug logs a debug message using the default logger
|
||||
func Debug(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Debug(format, args...)
|
||||
}
|
||||
|
||||
// Trace logs a trace message using the default logger
|
||||
func Trace(format string, args ...interface{}) {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
DefaultLogger.Trace(format, args...)
|
||||
}
|
||||
|
||||
// SetLevel sets the log level for the default logger
|
||||
func SetLevel(level LogLevel) {
|
||||
if DefaultLogger == nil {
|
||||
Init(level)
|
||||
return
|
||||
}
|
||||
DefaultLogger.SetLevel(level)
|
||||
}
|
||||
|
||||
// GetLevel gets the log level for the default logger
|
||||
func GetLevel() LogLevel {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.GetLevel()
|
||||
}
|
||||
|
||||
// WithField returns a new logger with the field added to the default logger's context
|
||||
func WithField(key string, value interface{}) *Logger {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.WithField(key, value)
|
||||
}
|
||||
|
||||
// WithFields returns a new logger with the fields added to the default logger's context
|
||||
func WithFields(fields map[string]interface{}) *Logger {
|
||||
if DefaultLogger == nil {
|
||||
Init(defaultLogLevel)
|
||||
}
|
||||
return DefaultLogger.WithFields(fields)
|
||||
}
|
829
main.go
829
main.go
@@ -1,338 +1,701 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
|
||||
"modify/logger"
|
||||
"modify/processor"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// mainLogger is a scoped logger for the main package.
|
||||
var mainLogger = logger.Default.WithPrefix("main")
|
||||
|
||||
type GlobalStats struct {
|
||||
TotalMatches int
|
||||
TotalModifications int
|
||||
ProcessedFiles int
|
||||
FailedFiles int
|
||||
TotalMatches int64
|
||||
TotalModifications int64
|
||||
ProcessedFiles int64
|
||||
FailedFiles int64
|
||||
ModificationsPerCommand sync.Map
|
||||
}
|
||||
|
||||
var stats GlobalStats
|
||||
var stdLogger *log.Logger // Legacy logger for compatibility
|
||||
|
||||
var (
|
||||
jsonFlag = flag.Bool("json", false, "Process JSON files")
|
||||
xmlFlag = flag.Bool("xml", false, "Process XML files")
|
||||
gitFlag = flag.Bool("git", false, "Use git to manage files")
|
||||
resetFlag = flag.Bool("reset", false, "Reset files to their original state")
|
||||
logLevel = flag.String("loglevel", "INFO", "Set log level: ERROR, WARNING, INFO, DEBUG, TRACE")
|
||||
repo *git.Repository
|
||||
worktree *git.Worktree
|
||||
stats GlobalStats = GlobalStats{
|
||||
ModificationsPerCommand: sync.Map{},
|
||||
}
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Keep standard logger setup for compatibility with legacy code
|
||||
log.SetFlags(log.Lmicroseconds | log.Lshortfile)
|
||||
stdLogger = log.New(os.Stdout, "", log.Lmicroseconds|log.Lshortfile)
|
||||
|
||||
stats = GlobalStats{}
|
||||
}
|
||||
|
||||
func main() {
|
||||
// TODO: Implement some sort of git integration
|
||||
// Maybe use go-git
|
||||
// Specify a -git flag
|
||||
// If we are operating with git then:
|
||||
// Inmitialize a repo if one doesn't exist (try to open right?)
|
||||
// For each file matched by glob first figure out if it's already tracked
|
||||
// If not tracked then track it and commit (either it alone or maybe multiple together somehow)
|
||||
// Then reset the file (to undo previous modifications)
|
||||
// THEN change the file
|
||||
// In addition add a -undo flag that will ONLY reset the files without changing them
|
||||
// Only for the ones matched by glob
|
||||
// ^ important because binary files would fuck us up
|
||||
flag.Usage = func() {
|
||||
CreateExampleConfig()
|
||||
fmt.Fprintf(os.Stderr, "Usage: %s [options] <pattern> <lua_expression> <...files_or_globs>\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\nOptions:\n")
|
||||
fmt.Fprintf(os.Stderr, " -json\n")
|
||||
fmt.Fprintf(os.Stderr, " Process JSON files\n")
|
||||
fmt.Fprintf(os.Stderr, " -xml\n")
|
||||
fmt.Fprintf(os.Stderr, " Process XML files\n")
|
||||
fmt.Fprintf(os.Stderr, " -git\n")
|
||||
fmt.Fprintf(os.Stderr, " Use git to manage files\n")
|
||||
fmt.Fprintf(os.Stderr, " -reset\n")
|
||||
fmt.Fprintf(os.Stderr, " Reset files to their original state\n")
|
||||
fmt.Fprintf(os.Stderr, " -loglevel string\n")
|
||||
fmt.Fprintf(os.Stderr, " Set logging level: ERROR, WARNING, INFO, DEBUG, TRACE (default \"INFO\")\n")
|
||||
fmt.Fprintf(os.Stderr, " -mode string\n")
|
||||
fmt.Fprintf(os.Stderr, " Processing mode: regex, xml, json (default \"regex\")\n")
|
||||
fmt.Fprintf(os.Stderr, " -json\n")
|
||||
fmt.Fprintf(os.Stderr, " Enable JSON mode for processing JSON files\n")
|
||||
fmt.Fprintf(os.Stderr, "\nExamples:\n")
|
||||
fmt.Fprintf(os.Stderr, " Regex mode (default):\n")
|
||||
fmt.Fprintf(os.Stderr, " %s \"<value>(\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " XML mode:\n")
|
||||
fmt.Fprintf(os.Stderr, " %s -xml \"//value\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " %s \"<value>(\\\\d+)</value>\" \"*1.5\" data.xml\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " JSON mode:\n")
|
||||
fmt.Fprintf(os.Stderr, " %s -json \"$.items[*].value\" \"*1.5\" data.json\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, " %s -json data.json\n", os.Args[0])
|
||||
fmt.Fprintf(os.Stderr, "\nNote: v1, v2, etc. are used to refer to capture groups as numbers.\n")
|
||||
fmt.Fprintf(os.Stderr, " s1, s2, etc. are used to refer to capture groups as strings.\n")
|
||||
fmt.Fprintf(os.Stderr, " Helper functions: num(str) converts string to number, str(num) converts number to string\n")
|
||||
fmt.Fprintf(os.Stderr, " is_number(str) checks if a string is numeric\n")
|
||||
fmt.Fprintf(os.Stderr, " For XML and JSON, the captured values are exposed as 'v', which can be of any type we capture (string, number, table).\n")
|
||||
fmt.Fprintf(os.Stderr, " If expression starts with an operator like *, /, +, -, =, etc., v1 is automatically prepended\n")
|
||||
fmt.Fprintf(os.Stderr, " You can use any valid Lua code, including if statements, loops, etc.\n")
|
||||
fmt.Fprintf(os.Stderr, " Glob patterns are supported for file selection (*.xml, data/**.xml, etc.)\n")
|
||||
}
|
||||
|
||||
// TODO: Fix bed shitting when doing *.yml in barotrauma directory
|
||||
flag.Parse()
|
||||
|
||||
// Initialize logger with the specified log level
|
||||
level := logger.ParseLevel(*logLevel)
|
||||
logger.Init(level)
|
||||
logger.Info("Initializing with log level: %s", level.String())
|
||||
|
||||
args := flag.Args()
|
||||
if *resetFlag {
|
||||
*gitFlag = true
|
||||
}
|
||||
|
||||
if len(args) < 3 {
|
||||
logger.Error("At least %d arguments are required", 3)
|
||||
logger.InitFlag()
|
||||
mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String())
|
||||
mainLogger.Trace("Full argv: %v", os.Args)
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
|
||||
// Get the appropriate pattern and expression based on mode
|
||||
var pattern, luaExpr string
|
||||
var filePatterns []string
|
||||
|
||||
pattern = args[0]
|
||||
luaExpr = args[1]
|
||||
filePatterns = args[2:]
|
||||
|
||||
// Prepare the Lua expression
|
||||
originalLuaExpr := luaExpr
|
||||
luaExpr = processor.BuildLuaScript(luaExpr)
|
||||
if originalLuaExpr != luaExpr {
|
||||
logger.Debug("Transformed Lua expression from %q to %q", originalLuaExpr, luaExpr)
|
||||
}
|
||||
|
||||
if *gitFlag {
|
||||
logger.Info("Git integration enabled, setting up git repository")
|
||||
err := setupGit()
|
||||
mainLogger.Debug("Getting database connection")
|
||||
db, err := utils.GetDB()
|
||||
if err != nil {
|
||||
logger.Error("Failed to setup git: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error setting up git: %v\n", err)
|
||||
mainLogger.Error("Failed to get database: %v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
mainLogger.Debug("Database connection established")
|
||||
|
||||
// Expand file patterns with glob support
|
||||
logger.Debug("Expanding file patterns: %v", filePatterns)
|
||||
files, err := expandFilePatterns(filePatterns)
|
||||
workdone, err := HandleSpecialArgs(args, err, db)
|
||||
if err != nil {
|
||||
logger.Error("Failed to expand file patterns: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
|
||||
mainLogger.Error("Failed to handle special args: %v", err)
|
||||
return
|
||||
}
|
||||
if workdone {
|
||||
mainLogger.Info("Special arguments handled, exiting.")
|
||||
return
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
logger.Warning("No files found matching the specified patterns")
|
||||
fmt.Fprintf(os.Stderr, "No files found matching the specified patterns\n")
|
||||
// The plan is:
|
||||
// Load all commands
|
||||
mainLogger.Debug("Loading commands from arguments")
|
||||
mainLogger.Trace("Arguments: %v", args)
|
||||
commands, err := utils.LoadCommands(args)
|
||||
if err != nil || len(commands) == 0 {
|
||||
mainLogger.Error("Failed to load commands: %v", err)
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
// Collect global modifiers from special entries and filter them out
|
||||
vars := map[string]interface{}{}
|
||||
filtered := make([]utils.ModifyCommand, 0, len(commands))
|
||||
for _, c := range commands {
|
||||
if len(c.Modifiers) > 0 && c.Name == "" && c.Regex == "" && len(c.Regexes) == 0 && c.Lua == "" && len(c.Files) == 0 {
|
||||
for k, v := range c.Modifiers {
|
||||
vars[k] = v
|
||||
}
|
||||
continue
|
||||
}
|
||||
filtered = append(filtered, c)
|
||||
}
|
||||
if len(vars) > 0 {
|
||||
mainLogger.Info("Loaded %d global modifiers", len(vars))
|
||||
processor.SetVariables(vars)
|
||||
}
|
||||
commands = filtered
|
||||
mainLogger.Info("Loaded %d commands", len(commands))
|
||||
|
||||
if *gitFlag {
|
||||
logger.Info("Cleaning up git files before processing")
|
||||
err := cleanupGitFiles(files)
|
||||
if *utils.Filter != "" {
|
||||
mainLogger.Info("Filtering commands by name: %s", *utils.Filter)
|
||||
commands = utils.FilterCommands(commands, *utils.Filter)
|
||||
mainLogger.Info("Filtered %d commands", len(commands))
|
||||
}
|
||||
|
||||
// Then aggregate all the globs and deduplicate them
|
||||
mainLogger.Debug("Aggregating globs and deduplicating")
|
||||
globs := utils.AggregateGlobs(commands)
|
||||
mainLogger.Debug("Aggregated %d globs before deduplication", utils.CountGlobsBeforeDedup(commands))
|
||||
|
||||
for _, command := range commands {
|
||||
mainLogger.Trace("Command: %s", command.Name)
|
||||
if len(command.Regexes) > 0 {
|
||||
mainLogger.Trace("Regexes: %v", command.Regexes)
|
||||
} else {
|
||||
mainLogger.Trace("Regex: %s", command.Regex)
|
||||
}
|
||||
mainLogger.Trace("Files: %v", command.Files)
|
||||
mainLogger.Trace("Lua: %s", command.Lua)
|
||||
mainLogger.Trace("Reset: %t", command.Reset)
|
||||
mainLogger.Trace("Isolate: %t", command.Isolate)
|
||||
mainLogger.Trace("LogLevel: %s", command.LogLevel)
|
||||
}
|
||||
|
||||
// Resolve all the files for all the globs
|
||||
mainLogger.Info("Found %d unique file patterns", len(globs))
|
||||
mainLogger.Debug("Expanding glob patterns to files")
|
||||
files, err := utils.ExpandGLobs(globs)
|
||||
if err != nil {
|
||||
logger.Error("Failed to cleanup git files: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error cleaning up git files: %v\n", err)
|
||||
mainLogger.Error("Failed to expand file patterns: %v", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
if *resetFlag {
|
||||
logger.Info("Files reset to their original state, nothing more to do")
|
||||
log.Printf("Files reset to their original state, nothing more to do")
|
||||
mainLogger.Info("Found %d files to process", len(files))
|
||||
mainLogger.Trace("Files to process: %v", files)
|
||||
|
||||
// Somehow connect files to commands via globs..
|
||||
// For each file check every glob of every command
|
||||
// Maybe memoize this part
|
||||
// That way we know what commands affect what files
|
||||
mainLogger.Debug("Associating files with commands")
|
||||
associations, err := utils.AssociateFilesWithCommands(files, commands)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to associate files with commands: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Create the processor based on mode
|
||||
var proc processor.Processor
|
||||
switch {
|
||||
case *xmlFlag:
|
||||
proc = &processor.XMLProcessor{}
|
||||
logger.Info("Starting XML modifier with XPath %q, expression %q on %d files",
|
||||
pattern, luaExpr, len(files))
|
||||
case *jsonFlag:
|
||||
proc = &processor.JSONProcessor{}
|
||||
logger.Info("Starting JSON modifier with JSONPath %q, expression %q on %d files",
|
||||
pattern, luaExpr, len(files))
|
||||
default:
|
||||
proc = &processor.RegexProcessor{}
|
||||
logger.Info("Starting regex modifier with pattern %q, expression %q on %d files",
|
||||
pattern, luaExpr, len(files))
|
||||
mainLogger.Debug("Files associated with commands")
|
||||
mainLogger.Trace("File-command associations: %v", associations)
|
||||
// Per-file association summary for better visibility when debugging
|
||||
for file, assoc := range associations {
|
||||
cmdNames := make([]string, 0, len(assoc.Commands))
|
||||
for _, c := range assoc.Commands {
|
||||
cmdNames = append(cmdNames, c.Name)
|
||||
}
|
||||
isoNames := make([]string, 0, len(assoc.IsolateCommands))
|
||||
for _, c := range assoc.IsolateCommands {
|
||||
isoNames = append(isoNames, c.Name)
|
||||
}
|
||||
mainLogger.Debug("File %q has %d regular and %d isolate commands", file, len(assoc.Commands), len(assoc.IsolateCommands))
|
||||
mainLogger.Trace("\tRegular: %v", cmdNames)
|
||||
mainLogger.Trace("\tIsolate: %v", isoNames)
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
// Process each file
|
||||
for _, file := range files {
|
||||
mainLogger.Debug("Resetting files where necessary")
|
||||
err = utils.ResetWhereNecessary(associations, db)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to reset files where necessary: %v", err)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("Files reset where necessary")
|
||||
|
||||
// Then for each file run all commands associated with the file
|
||||
workers := make(chan struct{}, *utils.ParallelFiles)
|
||||
wg := sync.WaitGroup{}
|
||||
mainLogger.Debug("Starting file processing with %d parallel workers", *utils.ParallelFiles)
|
||||
|
||||
// Add performance tracking
|
||||
startTime := time.Now()
|
||||
|
||||
// Create a map to store loggers for each command
|
||||
commandLoggers := make(map[string]*logger.Logger)
|
||||
for _, command := range commands {
|
||||
// Create a named logger for each command
|
||||
cmdName := command.Name
|
||||
if cmdName == "" {
|
||||
// If no name is provided, use a short version of the regex pattern
|
||||
if len(command.Regex) > 20 {
|
||||
cmdName = command.Regex[:17] + "..."
|
||||
} else {
|
||||
cmdName = command.Regex
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the log level for this specific command
|
||||
cmdLogLevel := logger.ParseLevel(command.LogLevel)
|
||||
|
||||
// Create a logger with the command name as a field
|
||||
commandLoggers[command.Name] = logger.Default.WithField("command", cmdName)
|
||||
commandLoggers[command.Name].SetLevel(cmdLogLevel)
|
||||
|
||||
mainLogger.Debug("Created logger for command %q with log level %s", cmdName, cmdLogLevel.String())
|
||||
}
|
||||
|
||||
for file, association := range associations {
|
||||
workers <- struct{}{}
|
||||
wg.Add(1)
|
||||
go func(file string) {
|
||||
logger.SafeGoWithArgs(func(args ...interface{}) {
|
||||
defer func() { <-workers }()
|
||||
defer wg.Done()
|
||||
logger.Debug("Processing file: %s", file)
|
||||
// Track per-file processing time
|
||||
fileStartTime := time.Now()
|
||||
|
||||
// It's a bit fucked, maybe I could do better to call it from proc... But it'll do for now
|
||||
modCount, matchCount, err := processor.Process(proc, file, pattern, luaExpr)
|
||||
mainLogger.Debug("Reading file %q", file)
|
||||
fileData, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
logger.Error("Failed to process file %s: %v", file, err)
|
||||
fmt.Fprintf(os.Stderr, "Failed to process file %s: %v\n", file, err)
|
||||
stats.FailedFiles++
|
||||
} else {
|
||||
if modCount > 0 {
|
||||
logger.Info("Successfully processed file %s: %d modifications from %d matches",
|
||||
file, modCount, matchCount)
|
||||
} else if matchCount > 0 {
|
||||
logger.Info("Found %d matches in file %s but made no modifications",
|
||||
matchCount, file)
|
||||
} else {
|
||||
logger.Debug("No matches found in file: %s", file)
|
||||
mainLogger.Error("Failed to read file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
stats.ProcessedFiles++
|
||||
stats.TotalMatches += matchCount
|
||||
stats.TotalModifications += modCount
|
||||
fileDataStr := string(fileData)
|
||||
mainLogger.Trace("File %q content: %s", file, utils.LimitString(fileDataStr, 500))
|
||||
|
||||
isChanged := false
|
||||
mainLogger.Debug("Running isolate commands for file %q", file)
|
||||
fileDataStr, err = RunIsolateCommands(association, file, fileDataStr)
|
||||
if err != nil && err != NothingToDo {
|
||||
mainLogger.Error("Failed to run isolate commands for file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
}(file)
|
||||
if err != NothingToDo {
|
||||
isChanged = true
|
||||
}
|
||||
|
||||
mainLogger.Debug("Running other commands for file %q", file)
|
||||
fileDataStr, err = RunOtherCommands(file, fileDataStr, association, commandLoggers)
|
||||
if err != nil && err != NothingToDo {
|
||||
mainLogger.Error("Failed to run other commands for file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
if err != NothingToDo {
|
||||
isChanged = true
|
||||
}
|
||||
|
||||
if isChanged {
|
||||
mainLogger.Debug("Saving file %q to database", file)
|
||||
err = db.SaveFile(file, fileData)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to save file %q to database: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("File %q saved to database", file)
|
||||
}
|
||||
|
||||
mainLogger.Debug("Writing file %q", file)
|
||||
err = os.WriteFile(file, []byte(fileDataStr), 0644)
|
||||
if err != nil {
|
||||
mainLogger.Error("Failed to write file %q: %v", file, err)
|
||||
atomic.AddInt64(&stats.FailedFiles, 1)
|
||||
return
|
||||
}
|
||||
mainLogger.Debug("File %q written", file)
|
||||
|
||||
// Only increment ProcessedFiles once per file, after all processing is complete
|
||||
atomic.AddInt64(&stats.ProcessedFiles, 1)
|
||||
|
||||
mainLogger.Debug("File %q processed in %v", file, time.Since(fileStartTime))
|
||||
}, file, commands)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
processingTime := time.Since(startTime)
|
||||
mainLogger.Info("Processing completed in %v", processingTime)
|
||||
processedFiles := atomic.LoadInt64(&stats.ProcessedFiles)
|
||||
if processedFiles > 0 {
|
||||
mainLogger.Info("Average time per file: %v", processingTime/time.Duration(processedFiles))
|
||||
}
|
||||
|
||||
// TODO: Also give each command its own logger, maybe prefix it with something... Maybe give commands a name?
|
||||
// Do that with logger.WithField("loglevel", level.String())
|
||||
// Since each command also has its own log level
|
||||
// TODO: Maybe even figure out how to run individual commands...?
|
||||
// TODO: What to do with git? Figure it out ....
|
||||
|
||||
// if *gitFlag {
|
||||
// mainLogger.Info("Git integration enabled, setting up git repository")
|
||||
// err := setupGit()
|
||||
// if err != nil {
|
||||
// mainLogger.Error("Failed to setup git: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error setting up git: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
|
||||
// mainLogger.Debug("Expanding file patterns")
|
||||
// files, err := expandFilePatterns(filePatterns)
|
||||
// if err != nil {
|
||||
// mainLogger.Error("Failed to expand file patterns: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error expanding file patterns: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
|
||||
// if *gitFlag {
|
||||
// mainLogger.Info("Cleaning up git files before processing")
|
||||
// err := cleanupGitFiles(files)
|
||||
// if err != nil {
|
||||
// mainLogger.Error("Failed to cleanup git files: %v", err)
|
||||
// fmt.Fprintf(os.Stderr, "Error cleaning up git files: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// }
|
||||
// if *resetFlag {
|
||||
// mainLogger.Info("Files reset to their original state, nothing more to do")
|
||||
// log.Printf("Files reset to their original state, nothing more to do")
|
||||
// return
|
||||
// }
|
||||
|
||||
// Print summary
|
||||
if stats.TotalModifications == 0 {
|
||||
logger.Warning("No modifications were made in any files")
|
||||
fmt.Fprintf(os.Stderr, "No modifications were made in any files\n")
|
||||
totalModifications := atomic.LoadInt64(&stats.TotalModifications)
|
||||
if totalModifications == 0 {
|
||||
mainLogger.Warning("No modifications were made in any files")
|
||||
} else {
|
||||
logger.Info("Operation complete! Modified %d values in %d/%d files",
|
||||
stats.TotalModifications, stats.ProcessedFiles, stats.ProcessedFiles+stats.FailedFiles)
|
||||
fmt.Printf("Operation complete! Modified %d values in %d/%d files\n",
|
||||
stats.TotalModifications, stats.ProcessedFiles, stats.ProcessedFiles+stats.FailedFiles)
|
||||
failedFiles := atomic.LoadInt64(&stats.FailedFiles)
|
||||
mainLogger.Info("Operation complete! Modified %d values in %d/%d files",
|
||||
totalModifications, processedFiles, processedFiles+failedFiles)
|
||||
sortedCommands := []string{}
|
||||
stats.ModificationsPerCommand.Range(func(key, value interface{}) bool {
|
||||
sortedCommands = append(sortedCommands, key.(string))
|
||||
return true
|
||||
})
|
||||
sort.Strings(sortedCommands)
|
||||
|
||||
for _, command := range sortedCommands {
|
||||
count, _ := stats.ModificationsPerCommand.Load(command)
|
||||
if count.(int) > 0 {
|
||||
mainLogger.Info("\tCommand %q made %d modifications", command, count)
|
||||
} else {
|
||||
mainLogger.Warning("\tCommand %q made no modifications", command)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setupGit() error {
|
||||
cwd, err := os.Getwd()
|
||||
func HandleSpecialArgs(args []string, err error, db utils.DB) (bool, error) {
|
||||
handleSpecialArgsLogger := logger.Default.WithPrefix("HandleSpecialArgs")
|
||||
handleSpecialArgsLogger.Debug("Handling special arguments: %v", args)
|
||||
switch args[0] {
|
||||
case "reset":
|
||||
handleSpecialArgsLogger.Info("Resetting all files")
|
||||
err = utils.ResetAllFiles(db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get current working directory: %w", err)
|
||||
handleSpecialArgsLogger.Error("Failed to reset all files: %v", err)
|
||||
return true, err
|
||||
}
|
||||
logger.Debug("Current working directory obtained: %s", cwd)
|
||||
|
||||
logger.Debug("Attempting to open git repository at %s", cwd)
|
||||
repo, err = git.PlainOpen(cwd)
|
||||
handleSpecialArgsLogger.Info("All files reset")
|
||||
return true, nil
|
||||
case "dump":
|
||||
handleSpecialArgsLogger.Info("Dumping all files from database")
|
||||
err = db.RemoveAllFiles()
|
||||
if err != nil {
|
||||
logger.Debug("No existing git repository found at %s, attempting to initialize a new git repository.", cwd)
|
||||
repo, err = git.PlainInit(cwd, false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize a new git repository at %s: %w", cwd, err)
|
||||
handleSpecialArgsLogger.Error("Failed to remove all files from database: %v", err)
|
||||
return true, err
|
||||
}
|
||||
logger.Info("Successfully initialized a new git repository at %s", cwd)
|
||||
} else {
|
||||
logger.Info("Successfully opened existing git repository at %s", cwd)
|
||||
handleSpecialArgsLogger.Info("All files removed from database")
|
||||
return true, nil
|
||||
}
|
||||
|
||||
logger.Debug("Attempting to obtain worktree for repository at %s", cwd)
|
||||
worktree, err = repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to obtain worktree for repository at %s: %w", cwd, err)
|
||||
}
|
||||
logger.Debug("Successfully obtained worktree for repository at %s", cwd)
|
||||
return nil
|
||||
handleSpecialArgsLogger.Debug("No special arguments handled, returning false")
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func expandFilePatterns(patterns []string) ([]string, error) {
|
||||
var files []string
|
||||
filesMap := make(map[string]bool)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get current working directory: %w", err)
|
||||
func CreateExampleConfig() {
|
||||
createExampleConfigLogger := logger.Default.WithPrefix("CreateExampleConfig")
|
||||
createExampleConfigLogger.Debug("Creating example configuration file")
|
||||
commands := []utils.ModifyCommand{
|
||||
// Global modifiers only entry (no name/regex/lua/files)
|
||||
{
|
||||
Modifiers: map[string]interface{}{
|
||||
"foobar": 4,
|
||||
"multiply": 1.5,
|
||||
"prefix": "NEW_",
|
||||
"enabled": true,
|
||||
},
|
||||
},
|
||||
// Multi-regex example using $variable in Lua
|
||||
{
|
||||
Name: "RFToolsMultiply",
|
||||
Regexes: []string{"generatePerTick = !num", "ticksPer\\w+ = !num", "generatorRFPerTick = !num"},
|
||||
Lua: "* $foobar",
|
||||
Files: []string{"polymc/instances/**/rftools*.toml", `polymc\\instances\\**\\rftools*.toml`},
|
||||
Reset: true,
|
||||
// LogLevel defaults to INFO
|
||||
},
|
||||
// Named capture groups with arithmetic and string ops
|
||||
{
|
||||
Name: "UpdateAmountsAndItems",
|
||||
Regex: `(?P<amount>!num)\s+units\s+of\s+(?P<item>[A-Za-z_\-]+)`,
|
||||
Lua: `amount = amount * $multiply; item = upper(item); return true`,
|
||||
Files: []string{"data/**/*.txt"},
|
||||
// INFO log level
|
||||
},
|
||||
// Full replacement via Lua 'replacement' variable
|
||||
{
|
||||
Name: "BumpMinorVersion",
|
||||
Regex: `version\s*=\s*"(?P<major>!num)\.(?P<minor>!num)\.(?P<patch>!num)"`,
|
||||
Lua: `replacement = format("version=\"%s.%s.%s\"", major, num(minor)+1, 0); return true`,
|
||||
Files: []string{"config/*.ini", "config/*.cfg"},
|
||||
},
|
||||
// Multiline regex example (DOTALL is auto-enabled). Captures numeric in nested XML.
|
||||
{
|
||||
Name: "XMLNestedValueMultiply",
|
||||
Regex: `<item>\s*\s*<name>!any<\/name>\s*\s*<value>(!num)<\/value>\s*\s*<\/item>`,
|
||||
Lua: `* $multiply`,
|
||||
Files: []string{"data/**/*.xml"},
|
||||
// Demonstrates multiline regex in YAML
|
||||
},
|
||||
// Multiline regexES array, with different patterns handled by same Lua
|
||||
{
|
||||
Name: "MultiLinePatterns",
|
||||
Regexes: []string{
|
||||
`<entry>\s*\n\s*<id>(?P<id>!num)</id>\s*\n\s*<score>(?P<score>!num)</score>\s*\n\s*</entry>`,
|
||||
`\[block\]\nkey=(?P<key>[A-Za-z_]+)\nvalue=(?P<val>!num)`,
|
||||
},
|
||||
Lua: `if is_number(score) then score = score * 2 end; if is_number(val) then val = val * 3 end; return true`,
|
||||
Files: []string{"examples/**/*.*"},
|
||||
LogLevel: "DEBUG",
|
||||
},
|
||||
// Use equals operator shorthand and boolean variable
|
||||
{
|
||||
Name: "EnableFlags",
|
||||
Regex: `enabled\s*=\s*(true|false)`,
|
||||
Lua: `= $enabled`,
|
||||
Files: []string{"**/*.toml"},
|
||||
},
|
||||
// Demonstrate NoDedup to allow overlapping replacements
|
||||
{
|
||||
Name: "OverlappingGroups",
|
||||
Regex: `(?P<a>!num)(?P<b>!num)`,
|
||||
Lua: `a = num(a) + 1; b = num(b) + 1; return true`,
|
||||
Files: []string{"overlap/**/*.txt"},
|
||||
NoDedup: true,
|
||||
},
|
||||
// Isolate command example operating on entire matched block
|
||||
{
|
||||
Name: "IsolateUppercaseBlock",
|
||||
Regex: `BEGIN\n(?P<block>!any)\nEND`,
|
||||
Lua: `block = upper(block); return true`,
|
||||
Files: []string{"logs/**/*.log"},
|
||||
Isolate: true,
|
||||
LogLevel: "TRACE",
|
||||
},
|
||||
// Using !rep placeholder and arrays of files
|
||||
{
|
||||
Name: "RepeatPlaceholderExample",
|
||||
Regex: `name: (.*) !rep(, .* , 2)`,
|
||||
Lua: `-- no-op, just demonstrate placeholder; return false`,
|
||||
Files: []string{"lists/**/*.yml", "lists/**/*.yaml"},
|
||||
},
|
||||
// Using string variable in Lua expression
|
||||
{
|
||||
Name: "PrefixKeys",
|
||||
Regex: `(?P<key>[A-Za-z0-9_]+)\s*=`,
|
||||
Lua: `key = $prefix .. key; return true`,
|
||||
Files: []string{"**/*.properties"},
|
||||
},
|
||||
// JSON mode examples
|
||||
{
|
||||
Name: "JSONArrayMultiply",
|
||||
JSON: true,
|
||||
Lua: `for i, item in ipairs(data.items) do data.items[i].value = item.value * 2 end; return true`,
|
||||
Files: []string{"data/**/*.json"},
|
||||
},
|
||||
{
|
||||
Name: "JSONObjectUpdate",
|
||||
JSON: true,
|
||||
Lua: `data.version = "2.0.0"; data.enabled = true; return true`,
|
||||
Files: []string{"config/**/*.json"},
|
||||
},
|
||||
{
|
||||
Name: "JSONNestedModify",
|
||||
JSON: true,
|
||||
Lua: `if data.settings and data.settings.performance then data.settings.performance.multiplier = data.settings.performance.multiplier * 1.5 end; return true`,
|
||||
Files: []string{"settings/**/*.json"},
|
||||
},
|
||||
}
|
||||
|
||||
logger.Debug("Expanding patterns from directory: %s", cwd)
|
||||
for _, pattern := range patterns {
|
||||
logger.Trace("Processing pattern: %s", pattern)
|
||||
matches, _ := doublestar.Glob(os.DirFS(cwd), pattern)
|
||||
logger.Debug("Found %d matches for pattern %s", len(matches), pattern)
|
||||
for _, m := range matches {
|
||||
info, err := os.Stat(m)
|
||||
data, err := yaml.Marshal(commands)
|
||||
if err != nil {
|
||||
logger.Warning("Error getting file info for %s: %v", m, err)
|
||||
createExampleConfigLogger.Error("Failed to marshal example config: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
createExampleConfigLogger.Debug("Writing example_cook.yml")
|
||||
err = os.WriteFile("example_cook.yml", data, 0644)
|
||||
if err != nil {
|
||||
createExampleConfigLogger.Error("Failed to write example_cook.yml: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
createExampleConfigLogger.Info("Wrote example_cook.yml")
|
||||
}
|
||||
|
||||
var NothingToDo = errors.New("nothing to do")
|
||||
|
||||
func RunOtherCommands(file string, fileDataStr string, association utils.FileCommandAssociation, commandLoggers map[string]*logger.Logger) (string, error) {
|
||||
runOtherCommandsLogger := mainLogger.WithPrefix("RunOtherCommands").WithField("file", file)
|
||||
runOtherCommandsLogger.Debug("Running other commands for file")
|
||||
runOtherCommandsLogger.Trace("File data before modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
// Separate JSON and regex commands for different processing approaches
|
||||
jsonCommands := []utils.ModifyCommand{}
|
||||
regexCommands := []utils.ModifyCommand{}
|
||||
|
||||
for _, command := range association.Commands {
|
||||
if command.JSON || *utils.JSON {
|
||||
jsonCommands = append(jsonCommands, command)
|
||||
} else {
|
||||
regexCommands = append(regexCommands, command)
|
||||
}
|
||||
}
|
||||
|
||||
// Process JSON commands sequentially (each operates on the entire file)
|
||||
for _, command := range jsonCommands {
|
||||
cmdLogger := logger.Default
|
||||
if cmdLog, ok := commandLoggers[command.Name]; ok {
|
||||
cmdLogger = cmdLog
|
||||
}
|
||||
|
||||
cmdLogger.Debug("Processing file with JSON mode for command %q", command.Name)
|
||||
newModifications, err := processor.ProcessJSON(fileDataStr, command, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with JSON command %q: %v", command.Name, err)
|
||||
continue
|
||||
}
|
||||
if !info.IsDir() && !filesMap[m] {
|
||||
logger.Trace("Adding file to process list: %s", m)
|
||||
filesMap[m], files = true, append(files, m)
|
||||
|
||||
// Apply JSON modifications immediately
|
||||
if len(newModifications) > 0 {
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(newModifications, fileDataStr)
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
cmdLogger.Debug("Applied %d JSON modifications for command %q", count, command.Name)
|
||||
}
|
||||
|
||||
count, ok := stats.ModificationsPerCommand.Load(command.Name)
|
||||
if !ok {
|
||||
count = 0
|
||||
}
|
||||
stats.ModificationsPerCommand.Store(command.Name, count.(int)+len(newModifications))
|
||||
}
|
||||
|
||||
// Aggregate regex modifications and execute them
|
||||
modifications := []utils.ReplaceCommand{}
|
||||
numCommandsConsidered := 0
|
||||
for _, command := range regexCommands {
|
||||
cmdLogger := logger.Default
|
||||
if cmdLog, ok := commandLoggers[command.Name]; ok {
|
||||
cmdLogger = cmdLog
|
||||
}
|
||||
|
||||
patterns := command.Regexes
|
||||
if len(patterns) == 0 {
|
||||
patterns = []string{command.Regex}
|
||||
}
|
||||
for idx, pattern := range patterns {
|
||||
tmpCmd := command
|
||||
tmpCmd.Regex = pattern
|
||||
cmdLogger.Debug("Begin processing file with command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
|
||||
numCommandsConsidered++
|
||||
newModifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
|
||||
if err != nil {
|
||||
runOtherCommandsLogger.Error("Failed to process file with command %q: %v", command.Name, err)
|
||||
continue
|
||||
}
|
||||
modifications = append(modifications, newModifications...)
|
||||
count, ok := stats.ModificationsPerCommand.Load(command.Name)
|
||||
if !ok {
|
||||
count = 0
|
||||
}
|
||||
stats.ModificationsPerCommand.Store(command.Name, count.(int)+len(newModifications))
|
||||
|
||||
cmdLogger.Debug("Command %q generated %d modifications (pattern %d/%d)", command.Name, len(newModifications), idx+1, len(patterns))
|
||||
cmdLogger.Trace("Modifications generated by command %q: %v", command.Name, newModifications)
|
||||
if len(newModifications) == 0 {
|
||||
cmdLogger.Debug("No modifications yielded by command %q (pattern %d/%d)", command.Name, idx+1, len(patterns))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
logger.Debug("Found %d files to process: %v", len(files), files)
|
||||
runOtherCommandsLogger.Debug("Aggregated %d modifications from %d command-pattern runs", len(modifications), numCommandsConsidered)
|
||||
runOtherCommandsLogger.Trace("All aggregated modifications: %v", modifications)
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runOtherCommandsLogger.Warning("No modifications found for file")
|
||||
return fileDataStr, NothingToDo
|
||||
}
|
||||
return files, nil
|
||||
runOtherCommandsLogger.Debug("Executing %d modifications for file", len(modifications))
|
||||
|
||||
// Sort commands in reverse order for safe replacements
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
|
||||
runOtherCommandsLogger.Trace("File data after modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runOtherCommandsLogger.Info("Executed %d modifications for file", count)
|
||||
return fileDataStr, nil
|
||||
}
|
||||
|
||||
func cleanupGitFiles(files []string) error {
|
||||
for _, file := range files {
|
||||
logger.Debug("Checking git status for file: %s", file)
|
||||
status, err := worktree.Status()
|
||||
func RunIsolateCommands(association utils.FileCommandAssociation, file string, fileDataStr string) (string, error) {
|
||||
runIsolateCommandsLogger := mainLogger.WithPrefix("RunIsolateCommands").WithField("file", file)
|
||||
runIsolateCommandsLogger.Debug("Running isolate commands for file")
|
||||
runIsolateCommandsLogger.Trace("File data before isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
anythingDone := false
|
||||
for _, isolateCommand := range association.IsolateCommands {
|
||||
// Check if this isolate command should use JSON mode
|
||||
if isolateCommand.JSON || *utils.JSON {
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with JSON isolate command %q", isolateCommand.Name)
|
||||
modifications, err := processor.ProcessJSON(fileDataStr, isolateCommand, file)
|
||||
if err != nil {
|
||||
logger.Error("Error getting worktree status: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error getting worktree status: %v\n", err)
|
||||
return fmt.Errorf("error getting worktree status: %w", err)
|
||||
}
|
||||
if status.IsUntracked(file) {
|
||||
logger.Info("Detected untracked file: %s. Adding to git index.", file)
|
||||
_, err = worktree.Add(file)
|
||||
if err != nil {
|
||||
logger.Error("Error adding file to git: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error adding file to git: %v\n", err)
|
||||
return fmt.Errorf("error adding file to git: %w", err)
|
||||
runIsolateCommandsLogger.Error("Failed to process file with JSON isolate command %q: %v", isolateCommand.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
filename := filepath.Base(file)
|
||||
logger.Info("File %s added successfully. Committing with message: 'Track %s'", filename, filename)
|
||||
_, err = worktree.Commit("Track "+filename, &git.CommitOptions{
|
||||
Author: &object.Signature{
|
||||
Name: "Big Chef",
|
||||
Email: "bigchef@bigchef.com",
|
||||
When: time.Now(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("Error committing file: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error committing file: %v\n", err)
|
||||
return fmt.Errorf("error committing file: %w", err)
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("JSON isolate command %q produced no modifications", isolateCommand.Name)
|
||||
continue
|
||||
}
|
||||
logger.Info("Successfully committed file: %s", filename)
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d JSON isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("JSON isolate modifications: %v", modifications)
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
|
||||
runIsolateCommandsLogger.Trace("File data after JSON isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d JSON isolate modifications for file", count)
|
||||
} else {
|
||||
logger.Info("File %s is already tracked. Restoring it to the working tree.", file)
|
||||
err := worktree.Restore(&git.RestoreOptions{
|
||||
Files: []string{file},
|
||||
Staged: true,
|
||||
Worktree: true,
|
||||
})
|
||||
// Regular regex processing for isolate commands
|
||||
runIsolateCommandsLogger.Debug("Begin processing file with isolate command %q", isolateCommand.Regex)
|
||||
patterns := isolateCommand.Regexes
|
||||
if len(patterns) == 0 {
|
||||
patterns = []string{isolateCommand.Regex}
|
||||
}
|
||||
for idx, pattern := range patterns {
|
||||
tmpCmd := isolateCommand
|
||||
tmpCmd.Regex = pattern
|
||||
modifications, err := processor.ProcessRegex(fileDataStr, tmpCmd, file)
|
||||
if err != nil {
|
||||
logger.Error("Error restoring file: %v", err)
|
||||
fmt.Fprintf(os.Stderr, "Error restoring file: %v\n", err)
|
||||
return fmt.Errorf("error restoring file: %w", err)
|
||||
runIsolateCommandsLogger.Error("Failed to process file with isolate command %q (pattern %d/%d): %v", isolateCommand.Name, idx+1, len(patterns), err)
|
||||
continue
|
||||
}
|
||||
logger.Info("File %s restored successfully", file)
|
||||
|
||||
if len(modifications) == 0 {
|
||||
runIsolateCommandsLogger.Debug("Isolate command %q produced no modifications (pattern %d/%d)", isolateCommand.Name, idx+1, len(patterns))
|
||||
continue
|
||||
}
|
||||
anythingDone = true
|
||||
|
||||
runIsolateCommandsLogger.Debug("Executing %d isolate modifications for file", len(modifications))
|
||||
runIsolateCommandsLogger.Trace("Isolate modifications: %v", modifications)
|
||||
var count int
|
||||
fileDataStr, count = utils.ExecuteModifications(modifications, fileDataStr)
|
||||
runIsolateCommandsLogger.Trace("File data after isolate modifications: %s", utils.LimitString(fileDataStr, 200))
|
||||
|
||||
atomic.AddInt64(&stats.TotalModifications, int64(count))
|
||||
|
||||
runIsolateCommandsLogger.Info("Executed %d isolate modifications for file", count)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if !anythingDone {
|
||||
runIsolateCommandsLogger.Debug("No isolate modifications were made for file")
|
||||
return fileDataStr, NothingToDo
|
||||
}
|
||||
return fileDataStr, nil
|
||||
}
|
||||
|
@@ -1,194 +1,407 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"modify/logger"
|
||||
"modify/processor/jsonpath"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/tidwall/sjson"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// JSONProcessor implements the Processor interface for JSON documents
|
||||
type JSONProcessor struct{}
|
||||
// jsonLogger is a scoped logger for the processor/json package.
|
||||
var jsonLogger = logger.Default.WithPrefix("processor/json")
|
||||
|
||||
// ProcessContent implements the Processor interface for JSONProcessor
|
||||
func (p *JSONProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
|
||||
logger.Debug("Processing JSON content with JSONPath: %s", pattern)
|
||||
// ProcessJSON applies Lua processing to JSON content
|
||||
func ProcessJSON(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||
processJsonLogger := jsonLogger.WithPrefix("ProcessJSON").WithField("commandName", command.Name).WithField("file", filename)
|
||||
processJsonLogger.Debug("Starting JSON processing for file")
|
||||
processJsonLogger.Trace("Initial file content length: %d", len(content))
|
||||
|
||||
// Parse JSON document
|
||||
logger.Trace("Parsing JSON document")
|
||||
var commands []utils.ReplaceCommand
|
||||
startTime := time.Now()
|
||||
|
||||
// Parse JSON content
|
||||
var jsonData interface{}
|
||||
err := json.Unmarshal([]byte(content), &jsonData)
|
||||
if err != nil {
|
||||
logger.Error("Failed to parse JSON: %v", err)
|
||||
return content, 0, 0, fmt.Errorf("error parsing JSON: %v", err)
|
||||
processJsonLogger.Error("Failed to parse JSON content: %v", err)
|
||||
return commands, fmt.Errorf("failed to parse JSON: %v", err)
|
||||
}
|
||||
processJsonLogger.Debug("Successfully parsed JSON content")
|
||||
|
||||
// Find nodes matching the JSONPath pattern
|
||||
logger.Debug("Executing JSONPath query: %s", pattern)
|
||||
nodes, err := jsonpath.Get(jsonData, pattern)
|
||||
if err != nil {
|
||||
logger.Error("Failed to execute JSONPath: %v", err)
|
||||
return content, 0, 0, fmt.Errorf("error getting nodes: %v", err)
|
||||
}
|
||||
|
||||
matchCount := len(nodes)
|
||||
logger.Debug("Found %d nodes matching JSONPath", matchCount)
|
||||
if matchCount == 0 {
|
||||
logger.Warning("No nodes matched the JSONPath pattern: %s", pattern)
|
||||
return content, 0, 0, nil
|
||||
}
|
||||
|
||||
modCount := 0
|
||||
for i, node := range nodes {
|
||||
logger.Trace("Processing node #%d at path: %s with value: %v", i+1, node.Path, node.Value)
|
||||
|
||||
// Initialize Lua
|
||||
// Create Lua state
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
logger.Error("Failed to create Lua state: %v", err)
|
||||
return content, len(nodes), 0, fmt.Errorf("error creating Lua state: %v", err)
|
||||
processJsonLogger.Error("Error creating Lua state: %v", err)
|
||||
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
defer L.Close()
|
||||
logger.Trace("Lua state initialized successfully")
|
||||
|
||||
err = p.ToLua(L, node.Value)
|
||||
// Set filename global
|
||||
L.SetGlobal("file", lua.LString(filename))
|
||||
|
||||
// Convert JSON data to Lua table
|
||||
luaTable, err := ToLuaTable(L, jsonData)
|
||||
if err != nil {
|
||||
logger.Error("Failed to convert value to Lua: %v", err)
|
||||
return content, len(nodes), 0, fmt.Errorf("error converting to Lua: %v", err)
|
||||
processJsonLogger.Error("Failed to convert JSON to Lua table: %v", err)
|
||||
return commands, fmt.Errorf("failed to convert JSON to Lua table: %v", err)
|
||||
}
|
||||
logger.Trace("Converted node value to Lua: %v", node.Value)
|
||||
|
||||
originalScript := luaExpr
|
||||
fullScript := BuildLuaScript(luaExpr)
|
||||
logger.Debug("Original script: %q, Full script: %q", originalScript, fullScript)
|
||||
// Set the JSON data as a global variable
|
||||
L.SetGlobal("data", luaTable)
|
||||
processJsonLogger.Debug("Set JSON data as Lua global 'data'")
|
||||
|
||||
// Execute Lua script
|
||||
logger.Trace("Executing Lua script: %q", fullScript)
|
||||
if err := L.DoString(fullScript); err != nil {
|
||||
logger.Error("Failed to execute Lua script: %v", err)
|
||||
return content, len(nodes), 0, fmt.Errorf("error executing Lua %q: %v", fullScript, err)
|
||||
// Build and execute Lua script for JSON mode
|
||||
luaExpr := BuildJSONLuaScript(command.Lua)
|
||||
processJsonLogger.Debug("Built Lua script from expression: %q", command.Lua)
|
||||
processJsonLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
||||
|
||||
if err := L.DoString(luaExpr); err != nil {
|
||||
processJsonLogger.Error("Lua script execution failed: %v\nScript: %s", err, utils.LimitString(luaExpr, 200))
|
||||
return commands, fmt.Errorf("lua script execution failed: %v", err)
|
||||
}
|
||||
logger.Trace("Lua script executed successfully")
|
||||
processJsonLogger.Debug("Lua script executed successfully")
|
||||
|
||||
// Get modified value
|
||||
result, err := p.FromLua(L)
|
||||
// Check if modification flag is set
|
||||
modifiedVal := L.GetGlobal("modified")
|
||||
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
||||
processJsonLogger.Debug("Skipping - no modifications indicated by Lua script")
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// Get the modified data from Lua
|
||||
modifiedData := L.GetGlobal("data")
|
||||
if modifiedData.Type() != lua.LTTable {
|
||||
processJsonLogger.Error("Expected 'data' to be a table after Lua processing, got %s", modifiedData.Type().String())
|
||||
return commands, fmt.Errorf("expected 'data' to be a table after Lua processing")
|
||||
}
|
||||
|
||||
// Convert back to Go interface
|
||||
goData, err := FromLua(L, modifiedData)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get result from Lua: %v", err)
|
||||
return content, len(nodes), 0, fmt.Errorf("error getting result from Lua: %v", err)
|
||||
}
|
||||
logger.Trace("Retrieved modified value from Lua: %v", result)
|
||||
|
||||
modified := false
|
||||
modified = L.GetGlobal("modified").String() == "true"
|
||||
if !modified {
|
||||
logger.Debug("No changes made to node at path: %s", node.Path)
|
||||
continue
|
||||
processJsonLogger.Error("Failed to convert Lua table back to Go: %v", err)
|
||||
return commands, fmt.Errorf("failed to convert Lua table back to Go: %v", err)
|
||||
}
|
||||
|
||||
// Apply the modification to the JSON data
|
||||
logger.Debug("Updating JSON at path: %s with new value: %v", node.Path, result)
|
||||
err = p.updateJSONValue(jsonData, node.Path, result)
|
||||
// Use surgical JSON editing instead of full replacement
|
||||
commands, err = applySurgicalJSONChanges(content, jsonData, goData)
|
||||
if err != nil {
|
||||
logger.Error("Failed to update JSON at path %s: %v", node.Path, err)
|
||||
return content, len(nodes), 0, fmt.Errorf("error updating JSON: %v", err)
|
||||
}
|
||||
logger.Debug("Updated JSON at path: %s successfully", node.Path)
|
||||
modCount++
|
||||
processJsonLogger.Error("Failed to apply surgical JSON changes: %v", err)
|
||||
return commands, fmt.Errorf("failed to apply surgical JSON changes: %v", err)
|
||||
}
|
||||
|
||||
logger.Info("JSON processing complete: %d modifications from %d matches", modCount, matchCount)
|
||||
|
||||
// Convert the modified JSON back to a string with same formatting
|
||||
logger.Trace("Marshalling JSON data back to string")
|
||||
var jsonBytes []byte
|
||||
jsonBytes, err = json.MarshalIndent(jsonData, "", " ")
|
||||
if err != nil {
|
||||
logger.Error("Failed to marshal JSON: %v", err)
|
||||
return content, modCount, matchCount, fmt.Errorf("error marshalling JSON: %v", err)
|
||||
}
|
||||
return string(jsonBytes), modCount, matchCount, nil
|
||||
processJsonLogger.Debug("Total JSON processing time: %v", time.Since(startTime))
|
||||
processJsonLogger.Debug("Generated %d total modifications", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// updateJSONValue updates a value in the JSON structure based on its JSONPath
|
||||
func (p *JSONProcessor) updateJSONValue(jsonData interface{}, path string, newValue interface{}) error {
|
||||
logger.Trace("Updating JSON value at path: %s", path)
|
||||
// applySurgicalJSONChanges compares original and modified data and applies changes surgically
|
||||
func applySurgicalJSONChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
||||
var commands []utils.ReplaceCommand
|
||||
|
||||
// Special handling for root node
|
||||
if path == "$" {
|
||||
logger.Debug("Handling special case for root node update")
|
||||
// For the root node, we'll copy the value to the jsonData reference
|
||||
// This is a special case since we can't directly replace the interface{} variable
|
||||
// Convert both to JSON for comparison
|
||||
originalJSON, err := json.Marshal(originalData)
|
||||
if err != nil {
|
||||
return commands, fmt.Errorf("failed to marshal original data: %v", err)
|
||||
}
|
||||
|
||||
// We need to handle different types of root elements
|
||||
switch rootValue := newValue.(type) {
|
||||
modifiedJSON, err := json.Marshal(modifiedData)
|
||||
if err != nil {
|
||||
return commands, fmt.Errorf("failed to marshal modified data: %v", err)
|
||||
}
|
||||
|
||||
// If no changes, return empty commands
|
||||
if string(originalJSON) == string(modifiedJSON) {
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// Try true surgical approach that preserves formatting
|
||||
surgicalCommands, err := applyTrueSurgicalChanges(content, originalData, modifiedData)
|
||||
if err == nil && len(surgicalCommands) > 0 {
|
||||
return surgicalCommands, nil
|
||||
}
|
||||
|
||||
// Fall back to full replacement with proper formatting
|
||||
modifiedJSONIndented, err := json.MarshalIndent(modifiedData, "", " ")
|
||||
if err != nil {
|
||||
return commands, fmt.Errorf("failed to marshal modified data with indentation: %v", err)
|
||||
}
|
||||
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: 0,
|
||||
To: len(content),
|
||||
With: string(modifiedJSONIndented),
|
||||
})
|
||||
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// applyTrueSurgicalChanges attempts to make surgical changes while preserving exact formatting
|
||||
func applyTrueSurgicalChanges(content string, originalData, modifiedData interface{}) ([]utils.ReplaceCommand, error) {
|
||||
var commands []utils.ReplaceCommand
|
||||
|
||||
// Find changes by comparing the data structures
|
||||
changes := findDeepChanges("", originalData, modifiedData)
|
||||
|
||||
if len(changes) == 0 {
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// Apply changes surgically using sjson.Set() to preserve formatting
|
||||
modifiedContent := content
|
||||
for path, newValue := range changes {
|
||||
var err error
|
||||
modifiedContent, err = sjson.Set(modifiedContent, path, newValue)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply surgical change at path %s: %v", path, err)
|
||||
}
|
||||
}
|
||||
|
||||
// If we successfully made changes, create a replacement command
|
||||
if modifiedContent != content {
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: 0,
|
||||
To: len(content),
|
||||
With: modifiedContent,
|
||||
})
|
||||
}
|
||||
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// findDeepChanges recursively finds all paths that need to be changed
|
||||
func findDeepChanges(basePath string, original, modified interface{}) map[string]interface{} {
|
||||
changes := make(map[string]interface{})
|
||||
|
||||
switch orig := original.(type) {
|
||||
case map[string]interface{}:
|
||||
// For objects, we need to copy over all keys
|
||||
rootMap, ok := jsonData.(map[string]interface{})
|
||||
if !ok {
|
||||
// If the original wasn't a map, completely replace it with the new map
|
||||
// This is handled by the jsonpath.Set function
|
||||
logger.Debug("Root was not a map, replacing entire root")
|
||||
return jsonpath.Set(jsonData, path, newValue)
|
||||
if mod, ok := modified.(map[string]interface{}); ok {
|
||||
// Check each key in the modified data
|
||||
for key, modValue := range mod {
|
||||
var currentPath string
|
||||
if basePath == "" {
|
||||
currentPath = key
|
||||
} else {
|
||||
currentPath = basePath + "." + key
|
||||
}
|
||||
|
||||
// Clear the original map
|
||||
logger.Trace("Clearing original root map")
|
||||
for k := range rootMap {
|
||||
delete(rootMap, k)
|
||||
if origValue, exists := orig[key]; exists {
|
||||
// Key exists in both, check if value changed
|
||||
if !deepEqual(origValue, modValue) {
|
||||
// If it's a nested object/array, recurse
|
||||
switch modValue.(type) {
|
||||
case map[string]interface{}, []interface{}:
|
||||
nestedChanges := findDeepChanges(currentPath, origValue, modValue)
|
||||
for nestedPath, nestedValue := range nestedChanges {
|
||||
changes[nestedPath] = nestedValue
|
||||
}
|
||||
default:
|
||||
// Primitive value changed
|
||||
changes[currentPath] = modValue
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// New key added
|
||||
changes[currentPath] = modValue
|
||||
}
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
if mod, ok := modified.([]interface{}); ok {
|
||||
// For arrays, check each index
|
||||
for i, modValue := range mod {
|
||||
var currentPath string
|
||||
if basePath == "" {
|
||||
currentPath = fmt.Sprintf("%d", i)
|
||||
} else {
|
||||
currentPath = fmt.Sprintf("%s.%d", basePath, i)
|
||||
}
|
||||
|
||||
// Copy all keys from the new map
|
||||
logger.Trace("Copying keys to root map")
|
||||
for k, v := range rootValue {
|
||||
rootMap[k] = v
|
||||
if i < len(orig) {
|
||||
// Index exists in both, check if value changed
|
||||
if !deepEqual(orig[i], modValue) {
|
||||
// If it's a nested object/array, recurse
|
||||
switch modValue.(type) {
|
||||
case map[string]interface{}, []interface{}:
|
||||
nestedChanges := findDeepChanges(currentPath, orig[i], modValue)
|
||||
for nestedPath, nestedValue := range nestedChanges {
|
||||
changes[nestedPath] = nestedValue
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
// Primitive value changed
|
||||
changes[currentPath] = modValue
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// New array element added
|
||||
changes[currentPath] = modValue
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
// For primitive types, compare directly
|
||||
if !deepEqual(original, modified) {
|
||||
if basePath == "" {
|
||||
changes[""] = modified
|
||||
} else {
|
||||
changes[basePath] = modified
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
// deepEqual performs deep comparison of two values
|
||||
func deepEqual(a, b interface{}) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
switch av := a.(type) {
|
||||
case map[string]interface{}:
|
||||
if bv, ok := b.(map[string]interface{}); ok {
|
||||
if len(av) != len(bv) {
|
||||
return false
|
||||
}
|
||||
for k, v := range av {
|
||||
if !deepEqual(v, bv[k]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
case []interface{}:
|
||||
if bv, ok := b.([]interface{}); ok {
|
||||
if len(av) != len(bv) {
|
||||
return false
|
||||
}
|
||||
for i, v := range av {
|
||||
if !deepEqual(v, bv[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
default:
|
||||
return a == b
|
||||
}
|
||||
}
|
||||
|
||||
// ToLuaTable converts a Go interface{} to a Lua table recursively
|
||||
func ToLuaTable(L *lua.LState, data interface{}) (*lua.LTable, error) {
|
||||
toLuaTableLogger := jsonLogger.WithPrefix("ToLuaTable")
|
||||
toLuaTableLogger.Debug("Converting Go interface to Lua table")
|
||||
toLuaTableLogger.Trace("Input data type: %T", data)
|
||||
|
||||
switch v := data.(type) {
|
||||
case map[string]interface{}:
|
||||
toLuaTableLogger.Debug("Converting map to Lua table")
|
||||
table := L.CreateTable(0, len(v))
|
||||
for key, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
toLuaTableLogger.Error("Failed to convert map value for key %q: %v", key, err)
|
||||
return nil, err
|
||||
}
|
||||
table.RawSetString(key, luaValue)
|
||||
}
|
||||
return table, nil
|
||||
|
||||
case []interface{}:
|
||||
// For arrays, we need to handle similarly
|
||||
rootArray, ok := jsonData.([]interface{})
|
||||
if !ok {
|
||||
// If the original wasn't an array, use jsonpath.Set
|
||||
logger.Debug("Root was not an array, replacing entire root")
|
||||
return jsonpath.Set(jsonData, path, newValue)
|
||||
toLuaTableLogger.Debug("Converting slice to Lua table")
|
||||
table := L.CreateTable(len(v), 0)
|
||||
for i, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
toLuaTableLogger.Error("Failed to convert slice value at index %d: %v", i, err)
|
||||
return nil, err
|
||||
}
|
||||
table.RawSetInt(i+1, luaValue) // Lua arrays are 1-indexed
|
||||
}
|
||||
return table, nil
|
||||
|
||||
// Clear and recreate the array
|
||||
logger.Trace("Replacing root array")
|
||||
*&rootArray = rootValue
|
||||
return nil
|
||||
case string:
|
||||
toLuaTableLogger.Debug("Converting string to Lua string")
|
||||
return nil, fmt.Errorf("expected table or array, got string")
|
||||
|
||||
case float64:
|
||||
toLuaTableLogger.Debug("Converting float64 to Lua number")
|
||||
return nil, fmt.Errorf("expected table or array, got number")
|
||||
|
||||
case bool:
|
||||
toLuaTableLogger.Debug("Converting bool to Lua boolean")
|
||||
return nil, fmt.Errorf("expected table or array, got boolean")
|
||||
|
||||
case nil:
|
||||
toLuaTableLogger.Debug("Converting nil to Lua nil")
|
||||
return nil, fmt.Errorf("expected table or array, got nil")
|
||||
|
||||
default:
|
||||
// For other types, use jsonpath.Set
|
||||
logger.Debug("Replacing root with primitive value")
|
||||
return jsonpath.Set(jsonData, path, newValue)
|
||||
}
|
||||
toLuaTableLogger.Error("Unsupported type for Lua table conversion: %T", v)
|
||||
return nil, fmt.Errorf("unsupported type for Lua table conversion: %T", v)
|
||||
}
|
||||
}
|
||||
|
||||
// For non-root paths, use the regular Set method
|
||||
logger.Trace("Using regular Set method for non-root path")
|
||||
err := jsonpath.Set(jsonData, path, newValue)
|
||||
// ToLuaValue converts a Go interface{} to a Lua value
|
||||
func ToLuaValue(L *lua.LState, data interface{}) (lua.LValue, error) {
|
||||
toLuaValueLogger := jsonLogger.WithPrefix("ToLuaValue")
|
||||
toLuaValueLogger.Debug("Converting Go interface to Lua value")
|
||||
toLuaValueLogger.Trace("Input data type: %T", data)
|
||||
|
||||
switch v := data.(type) {
|
||||
case map[string]interface{}:
|
||||
toLuaValueLogger.Debug("Converting map to Lua table")
|
||||
table := L.CreateTable(0, len(v))
|
||||
for key, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
logger.Error("Failed to set JSON value at path %s: %v", path, err)
|
||||
return fmt.Errorf("failed to update JSON value at path '%s': %w", path, err)
|
||||
toLuaValueLogger.Error("Failed to convert map value for key %q: %v", key, err)
|
||||
return lua.LNil, err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
table.RawSetString(key, luaValue)
|
||||
}
|
||||
return table, nil
|
||||
|
||||
// ToLua converts JSON values to Lua variables
|
||||
func (p *JSONProcessor) ToLua(L *lua.LState, data interface{}) error {
|
||||
table, err := ToLua(L, data)
|
||||
case []interface{}:
|
||||
toLuaValueLogger.Debug("Converting slice to Lua table")
|
||||
table := L.CreateTable(len(v), 0)
|
||||
for i, value := range v {
|
||||
luaValue, err := ToLuaValue(L, value)
|
||||
if err != nil {
|
||||
return err
|
||||
toLuaValueLogger.Error("Failed to convert slice value at index %d: %v", i, err)
|
||||
return lua.LNil, err
|
||||
}
|
||||
L.SetGlobal("v", table)
|
||||
return nil
|
||||
}
|
||||
table.RawSetInt(i+1, luaValue) // Lua arrays are 1-indexed
|
||||
}
|
||||
return table, nil
|
||||
|
||||
// FromLua retrieves values from Lua
|
||||
func (p *JSONProcessor) FromLua(L *lua.LState) (interface{}, error) {
|
||||
luaValue := L.GetGlobal("v")
|
||||
return FromLua(L, luaValue)
|
||||
case string:
|
||||
toLuaValueLogger.Debug("Converting string to Lua string")
|
||||
return lua.LString(v), nil
|
||||
|
||||
case float64:
|
||||
toLuaValueLogger.Debug("Converting float64 to Lua number")
|
||||
return lua.LNumber(v), nil
|
||||
|
||||
case bool:
|
||||
toLuaValueLogger.Debug("Converting bool to Lua boolean")
|
||||
return lua.LBool(v), nil
|
||||
|
||||
case nil:
|
||||
toLuaValueLogger.Debug("Converting nil to Lua nil")
|
||||
return lua.LNil, nil
|
||||
|
||||
default:
|
||||
toLuaValueLogger.Error("Unsupported type for Lua value conversion: %T", v)
|
||||
return lua.LNil, fmt.Errorf("unsupported type for Lua value conversion: %T", v)
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,490 +0,0 @@
|
||||
package jsonpath
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// JSONStep represents a single step in a JSONPath query
|
||||
type JSONStep struct {
|
||||
Type StepType
|
||||
Key string // For Child/RecursiveDescent
|
||||
Index int // For Index (use -1 for wildcard "*")
|
||||
}
|
||||
|
||||
// JSONNode represents a value in the JSON data with its path
|
||||
type JSONNode struct {
|
||||
Value interface{} // The value found at the path
|
||||
Path string // The exact JSONPath where the value was found
|
||||
}
|
||||
|
||||
// StepType defines the types of steps in a JSONPath
|
||||
type StepType int
|
||||
|
||||
const (
|
||||
RootStep StepType = iota // $ - The root element
|
||||
ChildStep // .key - Direct child access
|
||||
RecursiveDescentStep // ..key - Recursive search for key
|
||||
WildcardStep // .* - All children of an object
|
||||
IndexStep // [n] - Array index access (or [*] for all elements)
|
||||
)
|
||||
|
||||
// TraversalMode determines how the traversal behaves
|
||||
type TraversalMode int
|
||||
|
||||
const (
|
||||
CollectMode TraversalMode = iota // Just collect matched nodes
|
||||
ModifyFirstMode // Modify first matching node
|
||||
ModifyAllMode // Modify all matching nodes
|
||||
)
|
||||
|
||||
// ParseJSONPath parses a JSONPath string into a sequence of steps
|
||||
func ParseJSONPath(path string) ([]JSONStep, error) {
|
||||
if len(path) == 0 || path[0] != '$' {
|
||||
return nil, fmt.Errorf("path must start with $; received: %q", path)
|
||||
}
|
||||
|
||||
steps := []JSONStep{}
|
||||
i := 0
|
||||
|
||||
for i < len(path) {
|
||||
switch path[i] {
|
||||
case '$':
|
||||
steps = append(steps, JSONStep{Type: RootStep})
|
||||
i++
|
||||
case '.':
|
||||
i++
|
||||
if i < len(path) && path[i] == '.' {
|
||||
// Recursive descent
|
||||
i++
|
||||
key, nextPos := readKey(path, i)
|
||||
steps = append(steps, JSONStep{Type: RecursiveDescentStep, Key: key})
|
||||
i = nextPos
|
||||
} else {
|
||||
// Child step or wildcard
|
||||
key, nextPos := readKey(path, i)
|
||||
if key == "*" {
|
||||
steps = append(steps, JSONStep{Type: WildcardStep})
|
||||
} else {
|
||||
steps = append(steps, JSONStep{Type: ChildStep, Key: key})
|
||||
}
|
||||
i = nextPos
|
||||
}
|
||||
case '[':
|
||||
// Index step
|
||||
i++
|
||||
indexStr, nextPos := readIndex(path, i)
|
||||
if indexStr == "*" {
|
||||
steps = append(steps, JSONStep{Type: IndexStep, Index: -1})
|
||||
} else {
|
||||
index, err := strconv.Atoi(indexStr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid index: %s; error: %w", indexStr, err)
|
||||
}
|
||||
steps = append(steps, JSONStep{Type: IndexStep, Index: index})
|
||||
}
|
||||
i = nextPos + 1 // Skip closing ]
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected character: %c at position %d; path: %q", path[i], i, path)
|
||||
}
|
||||
}
|
||||
|
||||
return steps, nil
|
||||
}
|
||||
|
||||
// readKey extracts a key name from the path
|
||||
func readKey(path string, start int) (string, int) {
|
||||
i := start
|
||||
for ; i < len(path); i++ {
|
||||
if path[i] == '.' || path[i] == '[' {
|
||||
break
|
||||
}
|
||||
}
|
||||
return path[start:i], i
|
||||
}
|
||||
|
||||
// readIndex extracts an array index or wildcard from the path
|
||||
func readIndex(path string, start int) (string, int) {
|
||||
i := start
|
||||
for ; i < len(path); i++ {
|
||||
if path[i] == ']' {
|
||||
break
|
||||
}
|
||||
}
|
||||
return path[start:i], i
|
||||
}
|
||||
|
||||
// Get retrieves values with their paths from data at the specified JSONPath
|
||||
// Each returned JSONNode contains both the value and its exact path in the data structure
|
||||
func Get(data interface{}, path string) ([]JSONNode, error) {
|
||||
steps, err := ParseJSONPath(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
|
||||
}
|
||||
|
||||
results := []JSONNode{}
|
||||
err = traverseWithPaths(data, steps, &results, "$")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to traverse JSONPath %q: %w", path, err)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// Set updates the value at the specified JSONPath in the original data structure.
|
||||
// It only modifies the first matching node.
|
||||
func Set(data interface{}, path string, value interface{}) error {
|
||||
steps, err := ParseJSONPath(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
|
||||
}
|
||||
|
||||
success := false
|
||||
err = setWithPath(data, steps, &success, value, "$", ModifyFirstMode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetAll updates all matching values at the specified JSONPath.
|
||||
func SetAll(data interface{}, path string, value interface{}) error {
|
||||
steps, err := ParseJSONPath(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse JSONPath %q: %w", path, err)
|
||||
}
|
||||
|
||||
success := false
|
||||
err = setWithPath(data, steps, &success, value, "$", ModifyAllMode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// setWithPath modifies values while tracking paths
|
||||
func setWithPath(node interface{}, steps []JSONStep, success *bool, value interface{}, currentPath string, mode TraversalMode) error {
|
||||
if node == nil || *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Skip root step
|
||||
actualSteps := steps
|
||||
if len(steps) > 0 && steps[0].Type == RootStep {
|
||||
actualSteps = steps[1:]
|
||||
}
|
||||
|
||||
// If we have no steps left, we're setting the root value
|
||||
if len(actualSteps) == 0 {
|
||||
// For the root node, we need to handle it differently depending on what's passed in
|
||||
// since we can't directly replace the interface{} variable
|
||||
|
||||
// We'll signal success and let the JSONProcessor handle updating the root
|
||||
*success = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// Process the first step
|
||||
step := actualSteps[0]
|
||||
remainingSteps := actualSteps[1:]
|
||||
isLastStep := len(remainingSteps) == 0
|
||||
|
||||
switch step.Type {
|
||||
case ChildStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node at path %q is not a map; actual type: %T", currentPath, node)
|
||||
}
|
||||
|
||||
childPath := currentPath + "." + step.Key
|
||||
|
||||
if isLastStep {
|
||||
// We've reached the target, set the value
|
||||
m[step.Key] = value
|
||||
*success = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create intermediate nodes if necessary
|
||||
child, exists := m[step.Key]
|
||||
if !exists {
|
||||
// Create missing intermediate node
|
||||
if len(remainingSteps) > 0 && remainingSteps[0].Type == IndexStep {
|
||||
child = []interface{}{}
|
||||
} else {
|
||||
child = map[string]interface{}{}
|
||||
}
|
||||
m[step.Key] = child
|
||||
}
|
||||
|
||||
err := setWithPath(child, remainingSteps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
|
||||
case IndexStep:
|
||||
arr, ok := node.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node at path %q is not an array; actual type: %T", currentPath, node)
|
||||
}
|
||||
|
||||
// Handle wildcard index
|
||||
if step.Index == -1 {
|
||||
for i, item := range arr {
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
if isLastStep {
|
||||
arr[i] = value
|
||||
*success = true
|
||||
if mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err := setWithPath(item, remainingSteps, success, value, itemPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle specific index
|
||||
if step.Index >= 0 && step.Index < len(arr) {
|
||||
item := arr[step.Index]
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, step.Index)
|
||||
if isLastStep {
|
||||
arr[step.Index] = value
|
||||
*success = true
|
||||
} else {
|
||||
err := setWithPath(item, remainingSteps, success, value, itemPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case RecursiveDescentStep:
|
||||
// For recursive descent, first check direct match at this level
|
||||
if m, ok := node.(map[string]interface{}); ok && step.Key != "*" {
|
||||
if val, exists := m[step.Key]; exists {
|
||||
directPath := currentPath + "." + step.Key
|
||||
if isLastStep {
|
||||
m[step.Key] = value
|
||||
*success = true
|
||||
if mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err := setWithPath(val, remainingSteps, success, value, directPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", directPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then continue recursion to all children
|
||||
switch n := node.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, v := range n {
|
||||
childPath := currentPath + "." + k
|
||||
// Skip keys we've already processed directly
|
||||
if step.Key != "*" && k == step.Key {
|
||||
continue
|
||||
}
|
||||
err := setWithPath(v, steps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, v := range n {
|
||||
childPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
err := setWithPath(v, steps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case WildcardStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node at path %q is not a map; actual type: %T", currentPath, node)
|
||||
}
|
||||
|
||||
for k, v := range m {
|
||||
childPath := currentPath + "." + k
|
||||
if isLastStep {
|
||||
m[k] = value
|
||||
*success = true
|
||||
if mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err := setWithPath(v, remainingSteps, success, value, childPath, mode)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to set value at JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
if *success && mode == ModifyFirstMode {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// traverseWithPaths tracks both nodes and their paths during traversal
|
||||
func traverseWithPaths(node interface{}, steps []JSONStep, results *[]JSONNode, currentPath string) error {
|
||||
if len(steps) == 0 || node == nil {
|
||||
return fmt.Errorf("cannot traverse with empty steps or nil node; steps length: %d, node: %v", len(steps), node)
|
||||
}
|
||||
|
||||
// Skip root step
|
||||
actualSteps := steps
|
||||
if steps[0].Type == RootStep {
|
||||
if len(steps) == 1 {
|
||||
*results = append(*results, JSONNode{Value: node, Path: currentPath})
|
||||
return nil
|
||||
}
|
||||
actualSteps = steps[1:]
|
||||
}
|
||||
|
||||
// Process the first step
|
||||
step := actualSteps[0]
|
||||
remainingSteps := actualSteps[1:]
|
||||
isLastStep := len(remainingSteps) == 0
|
||||
|
||||
switch step.Type {
|
||||
case ChildStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node is not a map; actual type: %T", node)
|
||||
}
|
||||
|
||||
child, exists := m[step.Key]
|
||||
if !exists {
|
||||
return fmt.Errorf("key not found: %s in node at path: %s", step.Key, currentPath)
|
||||
}
|
||||
|
||||
childPath := currentPath + "." + step.Key
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: child, Path: childPath})
|
||||
} else {
|
||||
err := traverseWithPaths(child, remainingSteps, results, childPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
case IndexStep:
|
||||
arr, ok := node.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node is not an array; actual type: %T", node)
|
||||
}
|
||||
|
||||
// Handle wildcard index
|
||||
if step.Index == -1 {
|
||||
for i, item := range arr {
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: item, Path: itemPath})
|
||||
} else {
|
||||
err := traverseWithPaths(item, remainingSteps, results, itemPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Handle specific index
|
||||
if step.Index >= 0 && step.Index < len(arr) {
|
||||
item := arr[step.Index]
|
||||
itemPath := fmt.Sprintf("%s[%d]", currentPath, step.Index)
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: item, Path: itemPath})
|
||||
} else {
|
||||
err := traverseWithPaths(item, remainingSteps, results, itemPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", itemPath, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("index %d out of bounds for array at path: %s", step.Index, currentPath)
|
||||
}
|
||||
|
||||
case RecursiveDescentStep:
|
||||
// For recursive descent, first check direct match at this level
|
||||
if m, ok := node.(map[string]interface{}); ok && step.Key != "*" {
|
||||
if val, exists := m[step.Key]; exists {
|
||||
directPath := currentPath + "." + step.Key
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: val, Path: directPath})
|
||||
} else {
|
||||
err := traverseWithPaths(val, remainingSteps, results, directPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", directPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For wildcard, collect this node
|
||||
if step.Key == "*" && isLastStep {
|
||||
*results = append(*results, JSONNode{Value: node, Path: currentPath})
|
||||
}
|
||||
|
||||
// Then continue recursion to all children
|
||||
switch n := node.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, v := range n {
|
||||
childPath := currentPath + "." + k
|
||||
err := traverseWithPaths(v, steps, results, childPath) // Use the same steps
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, v := range n {
|
||||
childPath := fmt.Sprintf("%s[%d]", currentPath, i)
|
||||
err := traverseWithPaths(v, steps, results, childPath) // Use the same steps
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case WildcardStep:
|
||||
m, ok := node.(map[string]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("node is not a map; actual type: %T", node)
|
||||
}
|
||||
|
||||
for k, v := range m {
|
||||
childPath := currentPath + "." + k
|
||||
if isLastStep {
|
||||
*results = append(*results, JSONNode{Value: v, Path: childPath})
|
||||
} else {
|
||||
err := traverseWithPaths(v, remainingSteps, results, childPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to traverse JSONPath %q: %w", childPath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
@@ -1,577 +0,0 @@
|
||||
package jsonpath
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetWithPathsBasic(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data map[string]interface{}
|
||||
path string
|
||||
expected []JSONNode
|
||||
error bool
|
||||
}{
|
||||
{
|
||||
name: "simple property",
|
||||
data: map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
path: "$.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested property",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
},
|
||||
path: "$.user.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.user.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array access",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[1].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[*].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.users[0].name"},
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive descent",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"profile": map[string]interface{}{
|
||||
"email": "john@example.com",
|
||||
},
|
||||
},
|
||||
"admin": map[string]interface{}{
|
||||
"email": "admin@example.com",
|
||||
},
|
||||
},
|
||||
path: "$..email",
|
||||
expected: []JSONNode{
|
||||
{Value: "john@example.com", Path: "$.user.profile.email"},
|
||||
{Value: "admin@example.com", Path: "$.admin.email"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nonexistent path",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
},
|
||||
path: "$.user.email",
|
||||
expected: []JSONNode{},
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(tt.data, tt.path)
|
||||
if err != nil {
|
||||
if !tt.error {
|
||||
t.Errorf("GetWithPaths() returned error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For nonexistent path, we expect empty slice
|
||||
if tt.name == "nonexistent path" {
|
||||
if len(result) > 0 {
|
||||
t.Errorf("GetWithPaths() returned %v, expected empty result", result)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Check if lengths match
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
|
||||
return
|
||||
}
|
||||
|
||||
// For wildcard results, we need to check containment rather than exact order
|
||||
if tt.name == "wildcard" || tt.name == "recursive descent" {
|
||||
// For each expected item, check if it exists in the results by both value and path
|
||||
for _, expected := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
if reflect.DeepEqual(r.Value, expected.Value) && r.Path == expected.Path {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("GetWithPaths() missing expected value: %v with path: %s", expected.Value, expected.Path)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Otherwise check exact equality of both values and paths
|
||||
for i, expected := range tt.expected {
|
||||
if !reflect.DeepEqual(result[i].Value, expected.Value) {
|
||||
t.Errorf("GetWithPaths() value at [%d] = %v, expected %v", i, result[i].Value, expected.Value)
|
||||
}
|
||||
if result[i].Path != expected.Path {
|
||||
t.Errorf("GetWithPaths() path at [%d] = %s, expected %s", i, result[i].Path, expected.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSet(t *testing.T) {
|
||||
t.Run("simple property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
}
|
||||
err := Set(data, "$.name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if data["name"] != "Jane" {
|
||||
t.Errorf("Set() failed: expected name to be 'Jane', got %v", data["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("nested property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
}
|
||||
err := Set(data, "$.user.name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
user, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
if user["name"] != "Jane" {
|
||||
t.Errorf("Set() failed: expected user.name to be 'Jane', got %v", user["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("array element", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
}
|
||||
err := Set(data, "$.users[0].name", "Bob")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
users, ok := data["users"].([]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Users is not a slice")
|
||||
}
|
||||
user0, ok := users[0].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
if user0["name"] != "Bob" {
|
||||
t.Errorf("Set() failed: expected users[0].name to be 'Bob', got %v", user0["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("complex value", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"profile": map[string]interface{}{
|
||||
"email": "john@example.com",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
newProfile := map[string]interface{}{
|
||||
"email": "john.doe@example.com",
|
||||
"phone": "123-456-7890",
|
||||
}
|
||||
|
||||
err := Set(data, "$.user.profile", newProfile)
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
userMap, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
|
||||
profile, ok := userMap["profile"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Profile is not a map")
|
||||
}
|
||||
|
||||
if profile["email"] != "john.doe@example.com" || profile["phone"] != "123-456-7890" {
|
||||
t.Errorf("Set() failed: expected profile to be updated with new values")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("create new property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
}
|
||||
|
||||
err := Set(data, "$.user.email", "john@example.com")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
userMap, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
|
||||
if email, exists := userMap["email"]; !exists || email != "john@example.com" {
|
||||
t.Errorf("Set() failed: expected user.email to be 'john@example.com', got %v", userMap["email"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("create nested properties", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
}
|
||||
|
||||
err := Set(data, "$.user.contact.email", "john@example.com")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
userMap, ok := data["user"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User is not a map")
|
||||
}
|
||||
|
||||
contact, ok := userMap["contact"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Contact is not a map")
|
||||
}
|
||||
|
||||
if email, exists := contact["email"]; !exists || email != "john@example.com" {
|
||||
t.Errorf("Set() failed: expected user.contact.email to be 'john@example.com', got %v", contact["email"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("create array and element", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
},
|
||||
}
|
||||
|
||||
// This should create an empty addresses array, but won't be able to set index 0
|
||||
// since the array is empty
|
||||
err := Set(data, "$.user.addresses[0].street", "123 Main St")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("multiple targets (should only update first)", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"active": true},
|
||||
map[string]interface{}{"active": true},
|
||||
},
|
||||
}
|
||||
|
||||
err := Set(data, "$.users[*].active", false)
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
users, ok := data["users"].([]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Users is not a slice")
|
||||
}
|
||||
|
||||
user0, ok := users[0].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User0 is not a map")
|
||||
}
|
||||
|
||||
user1, ok := users[1].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User1 is not a map")
|
||||
}
|
||||
|
||||
// Only the first one should be changed
|
||||
if active, exists := user0["active"]; !exists || active != false {
|
||||
t.Errorf("Set() failed: expected users[0].active to be false, got %v", user0["active"])
|
||||
}
|
||||
|
||||
// The second one should remain unchanged
|
||||
if active, exists := user1["active"]; !exists || active != true {
|
||||
t.Errorf("Set() incorrectly modified users[1].active: expected true, got %v", user1["active"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("setting on root should not fail (anymore)", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"name": "John",
|
||||
}
|
||||
|
||||
err := Set(data, "$", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Data should be unchanged
|
||||
if data["name"] != "John" {
|
||||
t.Errorf("Data was modified when setting on root")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSetAll(t *testing.T) {
|
||||
t.Run("simple property", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
}
|
||||
err := SetAll(data, "$.name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if data["name"] != "Jane" {
|
||||
t.Errorf("SetAll() failed: expected name to be 'Jane', got %v", data["name"])
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("all array elements", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"active": true},
|
||||
map[string]interface{}{"active": true},
|
||||
},
|
||||
}
|
||||
|
||||
err := SetAll(data, "$.users[*].active", false)
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
users, ok := data["users"].([]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Users is not a slice")
|
||||
}
|
||||
|
||||
// Both elements should be updated
|
||||
for i, user := range users {
|
||||
userMap, ok := user.(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("User%d is not a map", i)
|
||||
}
|
||||
|
||||
if active, exists := userMap["active"]; !exists || active != false {
|
||||
t.Errorf("SetAll() failed: expected users[%d].active to be false, got %v", i, userMap["active"])
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("recursive descent", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"profile": map[string]interface{}{
|
||||
"active": true,
|
||||
},
|
||||
},
|
||||
"admin": map[string]interface{}{
|
||||
"profile": map[string]interface{}{
|
||||
"active": true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := SetAll(data, "$..active", false)
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check user profile
|
||||
userProfile, ok := data["user"].(map[string]interface{})["profile"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Failed to access user.profile")
|
||||
}
|
||||
if active, exists := userProfile["active"]; !exists || active != false {
|
||||
t.Errorf("SetAll() didn't update user.profile.active, got: %v", active)
|
||||
}
|
||||
|
||||
// Check admin profile
|
||||
adminProfile, ok := data["admin"].(map[string]interface{})["profile"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("Failed to access admin.profile")
|
||||
}
|
||||
if active, exists := adminProfile["active"]; !exists || active != false {
|
||||
t.Errorf("SetAll() didn't update admin.profile.active, got: %v", active)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetWithPathsExtended(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data map[string]interface{}
|
||||
path string
|
||||
expected []JSONNode
|
||||
}{
|
||||
{
|
||||
name: "simple property",
|
||||
data: map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
path: "$.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested property",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
},
|
||||
},
|
||||
path: "$.user.name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.user.name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array access",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[1].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard",
|
||||
data: map[string]interface{}{
|
||||
"users": []interface{}{
|
||||
map[string]interface{}{"name": "John", "age": 30},
|
||||
map[string]interface{}{"name": "Jane", "age": 25},
|
||||
},
|
||||
},
|
||||
path: "$.users[*].name",
|
||||
expected: []JSONNode{
|
||||
{Value: "John", Path: "$.users[0].name"},
|
||||
{Value: "Jane", Path: "$.users[1].name"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive descent",
|
||||
data: map[string]interface{}{
|
||||
"user": map[string]interface{}{
|
||||
"name": "John",
|
||||
"profile": map[string]interface{}{
|
||||
"email": "john@example.com",
|
||||
},
|
||||
},
|
||||
"admin": map[string]interface{}{
|
||||
"email": "admin@example.com",
|
||||
},
|
||||
},
|
||||
path: "$..email",
|
||||
expected: []JSONNode{
|
||||
{Value: "john@example.com", Path: "$.user.profile.email"},
|
||||
{Value: "admin@example.com", Path: "$.admin.email"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(tt.data, tt.path)
|
||||
if err != nil {
|
||||
t.Errorf("GetWithPaths() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if lengths match
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
|
||||
return
|
||||
}
|
||||
|
||||
// For each expected item, find its match in the results and verify both value and path
|
||||
for _, expected := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
// Check if value matches
|
||||
if reflect.DeepEqual(r.Value, expected.Value) {
|
||||
found = true
|
||||
// Check if path matches
|
||||
if r.Path != expected.Path {
|
||||
t.Errorf("Path mismatch for value %v: got %s, expected %s", r.Value, r.Path, expected.Path)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected node with value %v and path %s not found in results", expected.Value, expected.Path)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -1,318 +0,0 @@
|
||||
package jsonpath
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var testData = map[string]interface{}{
|
||||
"store": map[string]interface{}{
|
||||
"book": []interface{}{
|
||||
map[string]interface{}{
|
||||
"title": "The Fellowship of the Ring",
|
||||
"price": 22.99,
|
||||
},
|
||||
map[string]interface{}{
|
||||
"title": "The Two Towers",
|
||||
"price": 23.45,
|
||||
},
|
||||
},
|
||||
"bicycle": map[string]interface{}{
|
||||
"color": "red",
|
||||
"price": 199.95,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestParser(t *testing.T) {
|
||||
tests := []struct {
|
||||
path string
|
||||
steps []JSONStep
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
path: "$.store.bicycle.color",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: ChildStep, Key: "store"},
|
||||
{Type: ChildStep, Key: "bicycle"},
|
||||
{Type: ChildStep, Key: "color"},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "$..price",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: RecursiveDescentStep, Key: "price"},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "$.store.book[*].title",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: ChildStep, Key: "store"},
|
||||
{Type: ChildStep, Key: "book"},
|
||||
{Type: IndexStep, Index: -1}, // Wildcard
|
||||
{Type: ChildStep, Key: "title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "$.store.book[0]",
|
||||
steps: []JSONStep{
|
||||
{Type: RootStep},
|
||||
{Type: ChildStep, Key: "store"},
|
||||
{Type: ChildStep, Key: "book"},
|
||||
{Type: IndexStep, Index: 0},
|
||||
},
|
||||
},
|
||||
{
|
||||
path: "invalid.path",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
path: "$.store.book[abc]",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.path, func(t *testing.T) {
|
||||
steps, err := ParseJSONPath(tt.path)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Fatalf("ParseJSONPath() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
if !tt.wantErr && !reflect.DeepEqual(steps, tt.steps) {
|
||||
t.Errorf("ParseJSONPath() steps = %+v, want %+v", steps, tt.steps)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEvaluator(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expected []JSONNode
|
||||
error bool
|
||||
}{
|
||||
{
|
||||
name: "simple_property_access",
|
||||
path: "$.store.bicycle.color",
|
||||
expected: []JSONNode{
|
||||
{Value: "red", Path: "$.store.bicycle.color"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array_index_access",
|
||||
path: "$.store.book[0].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard_array_access",
|
||||
path: "$.store.book[*].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
{Value: "The Two Towers", Path: "$.store.book[1].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive_price_search",
|
||||
path: "$..price",
|
||||
expected: []JSONNode{
|
||||
{Value: 22.99, Path: "$.store.book[0].price"},
|
||||
{Value: 23.45, Path: "$.store.book[1].price"},
|
||||
{Value: 199.95, Path: "$.store.bicycle.price"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard_recursive",
|
||||
path: "$..*",
|
||||
expected: []JSONNode{
|
||||
// These will be compared by value only, paths will be validated separately
|
||||
{Value: testData["store"].(map[string]interface{})["book"]},
|
||||
{Value: testData["store"].(map[string]interface{})["bicycle"]},
|
||||
{Value: testData["store"].(map[string]interface{})["book"].([]interface{})[0]},
|
||||
{Value: testData["store"].(map[string]interface{})["book"].([]interface{})[1]},
|
||||
{Value: "The Fellowship of the Ring"},
|
||||
{Value: 22.99},
|
||||
{Value: "The Two Towers"},
|
||||
{Value: 23.45},
|
||||
{Value: "red"},
|
||||
{Value: 199.95},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid_index",
|
||||
path: "$.store.book[5]",
|
||||
expected: []JSONNode{},
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "nonexistent_property",
|
||||
path: "$.store.nonexistent",
|
||||
expected: []JSONNode{},
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Use GetWithPaths directly
|
||||
result, err := Get(testData, tt.path)
|
||||
if err != nil {
|
||||
if !tt.error {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Special handling for wildcard recursive test
|
||||
if tt.name == "wildcard_recursive" {
|
||||
// Skip length check for wildcard recursive since it might vary
|
||||
// Just verify that each expected item is in the results
|
||||
|
||||
// Validate values match and paths are filled in
|
||||
for _, e := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
if reflect.DeepEqual(r.Value, e.Value) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected value %v not found in results", e.Value)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("Expected %d items, got %d", len(tt.expected), len(result))
|
||||
}
|
||||
|
||||
// Validate both values and paths
|
||||
for i, e := range tt.expected {
|
||||
if i < len(result) {
|
||||
if !reflect.DeepEqual(result[i].Value, e.Value) {
|
||||
t.Errorf("Value at [%d]: got %v, expected %v", i, result[i].Value, e.Value)
|
||||
}
|
||||
if result[i].Path != e.Path {
|
||||
t.Errorf("Path at [%d]: got %s, expected %s", i, result[i].Path, e.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEdgeCases(t *testing.T) {
|
||||
t.Run("empty_data", func(t *testing.T) {
|
||||
result, err := Get(nil, "$.a.b")
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for empty data")
|
||||
return
|
||||
}
|
||||
if len(result) > 0 {
|
||||
t.Errorf("Expected empty result, got %v", result)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("empty_path", func(t *testing.T) {
|
||||
_, err := ParseJSONPath("")
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty path")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("numeric_keys", func(t *testing.T) {
|
||||
data := map[string]interface{}{
|
||||
"42": "answer",
|
||||
}
|
||||
result, err := Get(data, "$.42")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) == 0 || result[0].Value != "answer" {
|
||||
t.Errorf("Expected 'answer', got %v", result)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetWithPaths(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expected []JSONNode
|
||||
}{
|
||||
{
|
||||
name: "simple_property_access",
|
||||
path: "$.store.bicycle.color",
|
||||
expected: []JSONNode{
|
||||
{Value: "red", Path: "$.store.bicycle.color"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "array_index_access",
|
||||
path: "$.store.book[0].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "wildcard_array_access",
|
||||
path: "$.store.book[*].title",
|
||||
expected: []JSONNode{
|
||||
{Value: "The Fellowship of the Ring", Path: "$.store.book[0].title"},
|
||||
{Value: "The Two Towers", Path: "$.store.book[1].title"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "recursive_price_search",
|
||||
path: "$..price",
|
||||
expected: []JSONNode{
|
||||
{Value: 22.99, Path: "$.store.book[0].price"},
|
||||
{Value: 23.45, Path: "$.store.book[1].price"},
|
||||
{Value: 199.95, Path: "$.store.bicycle.price"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(testData, tt.path)
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if lengths match
|
||||
if len(result) != len(tt.expected) {
|
||||
t.Errorf("GetWithPaths() returned %d items, expected %d", len(result), len(tt.expected))
|
||||
return
|
||||
}
|
||||
|
||||
// For each expected item, find its match in the results and verify both value and path
|
||||
for _, expected := range tt.expected {
|
||||
found := false
|
||||
for _, r := range result {
|
||||
// First verify the value matches
|
||||
if reflect.DeepEqual(r.Value, expected.Value) {
|
||||
found = true
|
||||
// Then verify the path matches
|
||||
if r.Path != expected.Path {
|
||||
t.Errorf("Path mismatch for value %v: got %s, expected %s", r.Value, r.Path, expected.Path)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("Expected node with value %v and path %s not found in results", expected.Value, expected.Path)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@@ -2,201 +2,147 @@ package processor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/antchfx/xmlquery"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
"cook/utils"
|
||||
|
||||
"modify/logger"
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// Processor defines the interface for all file processors
|
||||
type Processor interface {
|
||||
// Process handles processing a file with the given pattern and Lua expression
|
||||
// Now implemented as a base function in processor.go
|
||||
// Process(filename string, pattern string, luaExpr string) (int, int, error)
|
||||
// processorLogger is a scoped logger for the processor package.
|
||||
var processorLogger = logger.Default.WithPrefix("processor")
|
||||
|
||||
// ProcessContent handles processing a string content directly with the given pattern and Lua expression
|
||||
// Returns the modified content, modification count, match count, and any error
|
||||
ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error)
|
||||
// Maybe we make this an interface again for the shits and giggles
|
||||
// We will see, it could easily be...
|
||||
|
||||
// ToLua converts processor-specific data to Lua variables
|
||||
ToLua(L *lua.LState, data interface{}) error
|
||||
var globalVariables = map[string]interface{}{}
|
||||
|
||||
// FromLua retrieves modified data from Lua
|
||||
FromLua(L *lua.LState) (interface{}, error)
|
||||
}
|
||||
|
||||
// ModificationRecord tracks a single value modification
|
||||
type ModificationRecord struct {
|
||||
File string
|
||||
OldValue string
|
||||
NewValue string
|
||||
Operation string
|
||||
Context string
|
||||
func SetVariables(vars map[string]interface{}) {
|
||||
for k, v := range vars {
|
||||
globalVariables[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
func NewLuaState() (*lua.LState, error) {
|
||||
newLStateLogger := processorLogger.WithPrefix("NewLuaState")
|
||||
newLStateLogger.Debug("Creating new Lua state")
|
||||
L := lua.NewState()
|
||||
// defer L.Close()
|
||||
|
||||
// Load math library
|
||||
logger.Debug("Loading Lua math library")
|
||||
newLStateLogger.Debug("Loading Lua math library")
|
||||
L.Push(L.GetGlobal("require"))
|
||||
L.Push(lua.LString("math"))
|
||||
if err := L.PCall(1, 1, nil); err != nil {
|
||||
logger.Error("Failed to load Lua math library: %v", err)
|
||||
newLStateLogger.Error("Failed to load Lua math library: %v", err)
|
||||
return nil, fmt.Errorf("error loading Lua math library: %v", err)
|
||||
}
|
||||
newLStateLogger.Debug("Lua math library loaded")
|
||||
|
||||
// Initialize helper functions
|
||||
logger.Debug("Initializing Lua helper functions")
|
||||
newLStateLogger.Debug("Initializing Lua helper functions")
|
||||
if err := InitLuaHelpers(L); err != nil {
|
||||
logger.Error("Failed to initialize Lua helper functions: %v", err)
|
||||
newLStateLogger.Error("Failed to initialize Lua helper functions: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
newLStateLogger.Debug("Lua helper functions initialized")
|
||||
|
||||
return L, nil
|
||||
}
|
||||
|
||||
func Process(p Processor, filename string, pattern string, luaExpr string) (int, int, error) {
|
||||
// Read file content
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
logger.Error("Failed to get current working directory: %v", err)
|
||||
return 0, 0, fmt.Errorf("error getting current working directory: %v", err)
|
||||
}
|
||||
|
||||
fullPath := filepath.Join(cwd, filename)
|
||||
logger.Trace("Reading file content from: %s", fullPath)
|
||||
content, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
logger.Error("Failed to read file %s: %v", fullPath, err)
|
||||
return 0, 0, fmt.Errorf("error reading file: %v", err)
|
||||
}
|
||||
|
||||
fileContent := string(content)
|
||||
logger.Trace("File %s read successfully, size: %d bytes", fullPath, len(content))
|
||||
|
||||
// Process the content
|
||||
logger.Debug("Processing content for file: %s", filename)
|
||||
modifiedContent, modCount, matchCount, err := p.ProcessContent(fileContent, pattern, luaExpr)
|
||||
if err != nil {
|
||||
logger.Error("Error processing content for file %s: %v", filename, err)
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
// If we made modifications, save the file
|
||||
if modCount > 0 {
|
||||
logger.Info("Writing %d modifications to file: %s", modCount, filename)
|
||||
err = os.WriteFile(fullPath, []byte(modifiedContent), 0644)
|
||||
if err != nil {
|
||||
logger.Error("Failed to write to file %s: %v", fullPath, err)
|
||||
return 0, 0, fmt.Errorf("error writing file: %v", err)
|
||||
}
|
||||
logger.Debug("File %s written successfully", filename)
|
||||
} else {
|
||||
logger.Debug("No modifications to write for file: %s", filename)
|
||||
}
|
||||
|
||||
return modCount, matchCount, nil
|
||||
}
|
||||
|
||||
// ToLua converts a struct or map to a Lua table recursively
|
||||
func ToLua(L *lua.LState, data interface{}) (lua.LValue, error) {
|
||||
switch v := data.(type) {
|
||||
case *xmlquery.Node:
|
||||
luaTable := L.NewTable()
|
||||
luaTable.RawSetString("text", lua.LString(v.Data))
|
||||
// Should be a map, simple key value pairs
|
||||
attr, err := ToLua(L, v.Attr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
luaTable.RawSetString("attr", attr)
|
||||
return luaTable, nil
|
||||
case map[string]interface{}:
|
||||
luaTable := L.NewTable()
|
||||
for key, value := range v {
|
||||
luaValue, err := ToLua(L, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
luaTable.RawSetString(key, luaValue)
|
||||
}
|
||||
return luaTable, nil
|
||||
case []interface{}:
|
||||
luaTable := L.NewTable()
|
||||
for i, value := range v {
|
||||
luaValue, err := ToLua(L, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
luaTable.RawSetInt(i+1, luaValue) // Lua arrays are 1-indexed
|
||||
}
|
||||
return luaTable, nil
|
||||
case string:
|
||||
return lua.LString(v), nil
|
||||
case bool:
|
||||
return lua.LBool(v), nil
|
||||
// Inject global variables
|
||||
if len(globalVariables) > 0 {
|
||||
newLStateLogger.Debug("Injecting %d global variables into Lua state", len(globalVariables))
|
||||
for k, v := range globalVariables {
|
||||
switch val := v.(type) {
|
||||
case int:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case int64:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case float32:
|
||||
L.SetGlobal(k, lua.LNumber(float64(val)))
|
||||
case float64:
|
||||
return lua.LNumber(v), nil
|
||||
case nil:
|
||||
return lua.LNil, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported data type: %T", data)
|
||||
L.SetGlobal(k, lua.LNumber(val))
|
||||
case string:
|
||||
L.SetGlobal(k, lua.LString(val))
|
||||
case bool:
|
||||
if val {
|
||||
L.SetGlobal(k, lua.LTrue)
|
||||
} else {
|
||||
L.SetGlobal(k, lua.LFalse)
|
||||
}
|
||||
default:
|
||||
// Fallback to string representation
|
||||
L.SetGlobal(k, lua.LString(fmt.Sprintf("%v", val)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newLStateLogger.Debug("New Lua state created successfully")
|
||||
return L, nil
|
||||
}
|
||||
|
||||
// FromLua converts a Lua table to a struct or map recursively
|
||||
func FromLua(L *lua.LState, luaValue lua.LValue) (interface{}, error) {
|
||||
fromLuaLogger := processorLogger.WithPrefix("FromLua").WithField("luaType", luaValue.Type().String())
|
||||
fromLuaLogger.Debug("Converting Lua value to Go interface")
|
||||
switch v := luaValue.(type) {
|
||||
// Well shit...
|
||||
// Tables in lua are both maps and arrays
|
||||
// As arrays they are ordered and as maps, obviously, not
|
||||
// So when we parse them to a go map we fuck up the order for arrays
|
||||
// We have to find a better way....
|
||||
case *lua.LTable:
|
||||
fromLuaLogger.Debug("Processing Lua table")
|
||||
isArray, err := IsLuaTableArray(L, v)
|
||||
if err != nil {
|
||||
fromLuaLogger.Error("Failed to determine if Lua table is array: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
fromLuaLogger.Debug("Lua table is array: %t", isArray)
|
||||
if isArray {
|
||||
fromLuaLogger.Debug("Converting Lua table to Go array")
|
||||
result := make([]interface{}, 0)
|
||||
v.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
converted, _ := FromLua(L, value)
|
||||
result = append(result, converted)
|
||||
})
|
||||
fromLuaLogger.Trace("Converted Go array: %v", result)
|
||||
return result, nil
|
||||
} else {
|
||||
fromLuaLogger.Debug("Converting Lua table to Go map")
|
||||
result := make(map[string]interface{})
|
||||
v.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
converted, _ := FromLua(L, value)
|
||||
result[key.String()] = converted
|
||||
})
|
||||
fromLuaLogger.Trace("Converted Go map: %v", result)
|
||||
return result, nil
|
||||
}
|
||||
case lua.LString:
|
||||
fromLuaLogger.Debug("Converting Lua string to Go string")
|
||||
fromLuaLogger.Trace("Lua string: %q", string(v))
|
||||
return string(v), nil
|
||||
case lua.LBool:
|
||||
fromLuaLogger.Debug("Converting Lua boolean to Go boolean")
|
||||
fromLuaLogger.Trace("Lua boolean: %t", bool(v))
|
||||
return bool(v), nil
|
||||
case lua.LNumber:
|
||||
fromLuaLogger.Debug("Converting Lua number to Go float64")
|
||||
fromLuaLogger.Trace("Lua number: %f", float64(v))
|
||||
return float64(v), nil
|
||||
default:
|
||||
fromLuaLogger.Debug("Unsupported Lua type, returning nil")
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
func IsLuaTableArray(L *lua.LState, v *lua.LTable) (bool, error) {
|
||||
logger.Trace("Checking if Lua table is an array")
|
||||
isLuaTableArrayLogger := processorLogger.WithPrefix("IsLuaTableArray")
|
||||
isLuaTableArrayLogger.Debug("Checking if Lua table is an array")
|
||||
isLuaTableArrayLogger.Trace("Lua table input: %v", v)
|
||||
L.SetGlobal("table_to_check", v)
|
||||
|
||||
// Use our predefined helper function from InitLuaHelpers
|
||||
err := L.DoString(`is_array = isArray(table_to_check)`)
|
||||
if err != nil {
|
||||
logger.Error("Error determining if table is an array: %v", err)
|
||||
isLuaTableArrayLogger.Error("Error determining if table is an array: %v", err)
|
||||
return false, fmt.Errorf("error determining if table is array: %w", err)
|
||||
}
|
||||
|
||||
@@ -204,13 +150,15 @@ func IsLuaTableArray(L *lua.LState, v *lua.LTable) (bool, error) {
|
||||
isArray := L.GetGlobal("is_array")
|
||||
// LVIsFalse returns true if a given LValue is a nil or false otherwise false.
|
||||
result := !lua.LVIsFalse(isArray)
|
||||
logger.Trace("Lua table is array: %v", result)
|
||||
isLuaTableArrayLogger.Debug("Lua table is array: %t", result)
|
||||
isLuaTableArrayLogger.Trace("isArray result Lua value: %v", isArray)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// InitLuaHelpers initializes common Lua helper functions
|
||||
func InitLuaHelpers(L *lua.LState) error {
|
||||
logger.Debug("Loading Lua helper functions")
|
||||
initLuaHelpersLogger := processorLogger.WithPrefix("InitLuaHelpers")
|
||||
initLuaHelpersLogger.Debug("Loading Lua helper functions")
|
||||
|
||||
helperScript := `
|
||||
-- Custom Lua helpers for math operations
|
||||
@@ -225,6 +173,39 @@ function ceil(x) return math.ceil(x) end
|
||||
function upper(s) return string.upper(s) end
|
||||
function lower(s) return string.lower(s) end
|
||||
function format(s, ...) return string.format(s, ...) end
|
||||
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
||||
|
||||
-- String split helper
|
||||
function strsplit(inputstr, sep)
|
||||
if sep == nil then
|
||||
sep = "%s"
|
||||
end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
---@param table table
|
||||
---@param depth number?
|
||||
function DumpTable(table, depth)
|
||||
if depth == nil then
|
||||
depth = 0
|
||||
end
|
||||
if (depth > 200) then
|
||||
print("Error: Depth > 200 in dumpTable()")
|
||||
return
|
||||
end
|
||||
for k, v in pairs(table) do
|
||||
if (type(v) == "table") then
|
||||
print(string.rep(" ", depth) .. k .. ":")
|
||||
DumpTable(v, depth + 1)
|
||||
else
|
||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- String to number conversion helper
|
||||
function num(str)
|
||||
@@ -258,27 +239,21 @@ end
|
||||
modified = false
|
||||
`
|
||||
if err := L.DoString(helperScript); err != nil {
|
||||
logger.Error("Failed to load Lua helper functions: %v", err)
|
||||
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
|
||||
return fmt.Errorf("error loading helper functions: %v", err)
|
||||
}
|
||||
initLuaHelpersLogger.Debug("Lua helper functions loaded")
|
||||
|
||||
logger.Debug("Setting up Lua print function to Go")
|
||||
initLuaHelpersLogger.Debug("Setting up Lua print function to Go")
|
||||
L.SetGlobal("print", L.NewFunction(printToGo))
|
||||
L.SetGlobal("fetch", L.NewFunction(fetch))
|
||||
initLuaHelpersLogger.Debug("Lua print and fetch functions bound to Go")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Helper utility functions
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
func LimitString(s string, maxLen int) string {
|
||||
s = strings.ReplaceAll(s, "\n", "\\n")
|
||||
if len(s) <= maxLen {
|
||||
return s
|
||||
}
|
||||
return s[:maxLen-3] + "..."
|
||||
}
|
||||
|
||||
func PrependLuaAssignment(luaExpr string) string {
|
||||
prependLuaAssignmentLogger := processorLogger.WithPrefix("PrependLuaAssignment").WithField("originalLuaExpr", luaExpr)
|
||||
prependLuaAssignmentLogger.Debug("Prepending Lua assignment if necessary")
|
||||
// Auto-prepend v1 for expressions starting with operators
|
||||
if strings.HasPrefix(luaExpr, "*") ||
|
||||
strings.HasPrefix(luaExpr, "/") ||
|
||||
@@ -287,30 +262,32 @@ func PrependLuaAssignment(luaExpr string) string {
|
||||
strings.HasPrefix(luaExpr, "^") ||
|
||||
strings.HasPrefix(luaExpr, "%") {
|
||||
luaExpr = "v1 = v1" + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1 = v1' due to operator prefix")
|
||||
} else if strings.HasPrefix(luaExpr, "=") {
|
||||
// Handle direct assignment with = operator
|
||||
luaExpr = "v1 " + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1' due to direct assignment operator")
|
||||
}
|
||||
|
||||
// Add assignment if needed
|
||||
if !strings.Contains(luaExpr, "=") {
|
||||
luaExpr = "v1 = " + luaExpr
|
||||
prependLuaAssignmentLogger.Debug("Prepended 'v1 =' as no assignment was found")
|
||||
}
|
||||
prependLuaAssignmentLogger.Trace("Final Lua expression after prepending: %q", luaExpr)
|
||||
return luaExpr
|
||||
}
|
||||
|
||||
// BuildLuaScript prepares a Lua expression from shorthand notation
|
||||
func BuildLuaScript(luaExpr string) string {
|
||||
logger.Debug("Building Lua script from expression: %s", luaExpr)
|
||||
buildLuaScriptLogger := processorLogger.WithPrefix("BuildLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||
buildLuaScriptLogger.Debug("Building full Lua script from expression")
|
||||
|
||||
// Perform $var substitutions from globalVariables
|
||||
luaExpr = replaceVariables(luaExpr)
|
||||
|
||||
luaExpr = PrependLuaAssignment(luaExpr)
|
||||
|
||||
// This allows the user to specify whether or not they modified a value
|
||||
// If they do nothing we assume they did modify (no return at all)
|
||||
// If they return before our return then they themselves specify what they did
|
||||
// If nothing is returned lua assumes nil
|
||||
// So we can say our value was modified if the return value is either nil or true
|
||||
// If the return value is false then the user wants to keep the original
|
||||
fullScript := fmt.Sprintf(`
|
||||
function run()
|
||||
%s
|
||||
@@ -318,33 +295,189 @@ func BuildLuaScript(luaExpr string) string {
|
||||
local res = run()
|
||||
modified = res == nil or res
|
||||
`, luaExpr)
|
||||
buildLuaScriptLogger.Trace("Generated full Lua script: %q", utils.LimitString(fullScript, 200))
|
||||
|
||||
return fullScript
|
||||
}
|
||||
|
||||
// BuildJSONLuaScript prepares a Lua expression for JSON mode
|
||||
func BuildJSONLuaScript(luaExpr string) string {
|
||||
buildJsonLuaScriptLogger := processorLogger.WithPrefix("BuildJSONLuaScript").WithField("inputLuaExpr", luaExpr)
|
||||
buildJsonLuaScriptLogger.Debug("Building full Lua script for JSON mode from expression")
|
||||
|
||||
// Perform $var substitutions from globalVariables
|
||||
luaExpr = replaceVariables(luaExpr)
|
||||
|
||||
fullScript := fmt.Sprintf(`
|
||||
function run()
|
||||
%s
|
||||
end
|
||||
local res = run()
|
||||
modified = res == nil or res
|
||||
`, luaExpr)
|
||||
buildJsonLuaScriptLogger.Trace("Generated full JSON Lua script: %q", utils.LimitString(fullScript, 200))
|
||||
|
||||
return fullScript
|
||||
}
|
||||
|
||||
func replaceVariables(expr string) string {
|
||||
// $varName -> literal value
|
||||
varNameRe := regexp.MustCompile(`\$(\w+)`)
|
||||
return varNameRe.ReplaceAllStringFunc(expr, func(m string) string {
|
||||
name := varNameRe.FindStringSubmatch(m)[1]
|
||||
if v, ok := globalVariables[name]; ok {
|
||||
switch val := v.(type) {
|
||||
case int, int64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case bool:
|
||||
if val {
|
||||
return "true"
|
||||
} else {
|
||||
return "false"
|
||||
}
|
||||
case string:
|
||||
// Quote strings for Lua literal
|
||||
return fmt.Sprintf("%q", val)
|
||||
default:
|
||||
return fmt.Sprintf("%q", fmt.Sprintf("%v", val))
|
||||
}
|
||||
}
|
||||
return m
|
||||
})
|
||||
}
|
||||
|
||||
func printToGo(L *lua.LState) int {
|
||||
printToGoLogger := processorLogger.WithPrefix("printToGo")
|
||||
printToGoLogger.Debug("Lua print function called, redirecting to Go logger")
|
||||
top := L.GetTop()
|
||||
|
||||
args := make([]interface{}, top)
|
||||
for i := 1; i <= top; i++ {
|
||||
args[i-1] = L.Get(i)
|
||||
}
|
||||
message := fmt.Sprint(args...)
|
||||
logger.Info("[Lua] %s", message)
|
||||
|
||||
// Format the message with proper spacing between arguments
|
||||
var parts []string
|
||||
for _, arg := range args {
|
||||
parts = append(parts, fmt.Sprintf("%v", arg))
|
||||
}
|
||||
message := strings.Join(parts, " ")
|
||||
printToGoLogger.Trace("Lua print message: %q", message)
|
||||
|
||||
// Use the LUA log level with a script tag
|
||||
logger.Lua("%s", message)
|
||||
printToGoLogger.Debug("Message logged from Lua")
|
||||
return 0
|
||||
}
|
||||
|
||||
// Max returns the maximum of two integers
|
||||
func Max(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
func fetch(L *lua.LState) int {
|
||||
fetchLogger := processorLogger.WithPrefix("fetch")
|
||||
fetchLogger.Debug("Lua fetch function called")
|
||||
// Get URL from first argument
|
||||
url := L.ToString(1)
|
||||
if url == "" {
|
||||
fetchLogger.Error("Fetch failed: URL is required")
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString("URL is required"))
|
||||
return 2
|
||||
}
|
||||
return b
|
||||
}
|
||||
fetchLogger.Debug("Fetching URL: %q", url)
|
||||
|
||||
// Min returns the minimum of two integers
|
||||
func Min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
// Get options from second argument if provided
|
||||
var method string = "GET"
|
||||
var headers map[string]string = make(map[string]string)
|
||||
var body string = ""
|
||||
|
||||
if L.GetTop() > 1 {
|
||||
options := L.ToTable(2)
|
||||
if options != nil {
|
||||
fetchLogger.Debug("Processing fetch options")
|
||||
// Get method
|
||||
if methodVal := options.RawGetString("method"); methodVal != lua.LNil {
|
||||
method = methodVal.String()
|
||||
fetchLogger.Trace("Method from options: %q", method)
|
||||
}
|
||||
return b
|
||||
|
||||
// Get headers
|
||||
if headersVal := options.RawGetString("headers"); headersVal != lua.LNil {
|
||||
if headersTable, ok := headersVal.(*lua.LTable); ok {
|
||||
fetchLogger.Trace("Processing headers table")
|
||||
headersTable.ForEach(func(key lua.LValue, value lua.LValue) {
|
||||
headers[key.String()] = value.String()
|
||||
fetchLogger.Trace("Header: %q = %q", key.String(), value.String())
|
||||
})
|
||||
}
|
||||
fetchLogger.Trace("All headers: %v", headers)
|
||||
}
|
||||
|
||||
// Get body
|
||||
if bodyVal := options.RawGetString("body"); bodyVal != lua.LNil {
|
||||
body = bodyVal.String()
|
||||
fetchLogger.Trace("Body from options: %q", utils.LimitString(body, 100))
|
||||
}
|
||||
}
|
||||
}
|
||||
fetchLogger.Debug("Fetch request details: Method=%q, URL=%q, BodyLength=%d, Headers=%v", method, url, len(body), headers)
|
||||
|
||||
// Create HTTP request
|
||||
req, err := http.NewRequest(method, url, strings.NewReader(body))
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error creating HTTP request: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error creating request: %v", err)))
|
||||
return 2
|
||||
}
|
||||
|
||||
// Set headers
|
||||
for key, value := range headers {
|
||||
req.Header.Set(key, value)
|
||||
}
|
||||
fetchLogger.Debug("HTTP request created and headers set")
|
||||
fetchLogger.Trace("HTTP Request: %+v", req)
|
||||
|
||||
// Make request
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error making HTTP request: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error making request: %v", err)))
|
||||
return 2
|
||||
}
|
||||
defer func() {
|
||||
fetchLogger.Debug("Closing HTTP response body")
|
||||
resp.Body.Close()
|
||||
}()
|
||||
fetchLogger.Debug("HTTP request executed. Status Code: %d", resp.StatusCode)
|
||||
|
||||
// Read response body
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
fetchLogger.Error("Error reading response body: %v", err)
|
||||
L.Push(lua.LNil)
|
||||
L.Push(lua.LString(fmt.Sprintf("Error reading response: %v", err)))
|
||||
return 2
|
||||
}
|
||||
fetchLogger.Trace("Response body length: %d", len(bodyBytes))
|
||||
|
||||
// Create response table
|
||||
responseTable := L.NewTable()
|
||||
responseTable.RawSetString("status", lua.LNumber(resp.StatusCode))
|
||||
responseTable.RawSetString("statusText", lua.LString(resp.Status))
|
||||
responseTable.RawSetString("ok", lua.LBool(resp.StatusCode >= 200 && resp.StatusCode < 300))
|
||||
responseTable.RawSetString("body", lua.LString(string(bodyBytes)))
|
||||
fetchLogger.Debug("Created Lua response table")
|
||||
|
||||
// Set headers in response
|
||||
headersTable := L.NewTable()
|
||||
for key, values := range resp.Header {
|
||||
headersTable.RawSetString(key, lua.LString(values[0]))
|
||||
fetchLogger.Trace("Response header: %q = %q", key, values[0])
|
||||
}
|
||||
responseTable.RawSetString("headers", headersTable)
|
||||
fetchLogger.Trace("Full response table: %v", responseTable)
|
||||
|
||||
L.Push(responseTable)
|
||||
fetchLogger.Debug("Pushed response table to Lua stack")
|
||||
return 1
|
||||
}
|
||||
|
@@ -1,88 +1,19 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
|
||||
"modify/logger"
|
||||
)
|
||||
|
||||
// RegexProcessor implements the Processor interface using regex patterns
|
||||
type RegexProcessor struct{}
|
||||
|
||||
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
|
||||
func (p *RegexProcessor) ToLua(L *lua.LState, data interface{}) error {
|
||||
captureGroups, ok := data.([]*CaptureGroup)
|
||||
if !ok {
|
||||
return fmt.Errorf("expected []*CaptureGroup for captures, got %T", data)
|
||||
}
|
||||
|
||||
groupindex := 0
|
||||
for _, capture := range captureGroups {
|
||||
if capture.Name == "" {
|
||||
// We don't want to change the name of the capture group
|
||||
// Even if it's empty
|
||||
tempName := fmt.Sprintf("%d", groupindex+1)
|
||||
groupindex++
|
||||
|
||||
L.SetGlobal("s"+tempName, lua.LString(capture.Value))
|
||||
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal("v"+tempName, lua.LNumber(val))
|
||||
}
|
||||
} else {
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal(capture.Name, lua.LNumber(val))
|
||||
} else {
|
||||
L.SetGlobal(capture.Name, lua.LString(capture.Value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *RegexProcessor) FromLua(L *lua.LState) (interface{}, error) {
|
||||
// Stub to satisfy interface
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// FromLua implements the Processor interface for RegexProcessor
|
||||
func (p *RegexProcessor) FromLuaCustom(L *lua.LState, captureGroups []*CaptureGroup) ([]*CaptureGroup, error) {
|
||||
captureIndex := 0
|
||||
for _, capture := range captureGroups {
|
||||
if capture.Name == "" {
|
||||
capture.Name = fmt.Sprintf("%d", captureIndex+1)
|
||||
|
||||
vVarName := fmt.Sprintf("v%s", capture.Name)
|
||||
sVarName := fmt.Sprintf("s%s", capture.Name)
|
||||
captureIndex++
|
||||
|
||||
vLuaVal := L.GetGlobal(vVarName)
|
||||
sLuaVal := L.GetGlobal(sVarName)
|
||||
|
||||
if sLuaVal.Type() == lua.LTString {
|
||||
capture.Updated = sLuaVal.String()
|
||||
}
|
||||
// Numbers have priority
|
||||
if vLuaVal.Type() == lua.LTNumber {
|
||||
capture.Updated = vLuaVal.String()
|
||||
}
|
||||
} else {
|
||||
// Easy shit
|
||||
capture.Updated = L.GetGlobal(capture.Name).String()
|
||||
}
|
||||
}
|
||||
|
||||
return captureGroups, nil
|
||||
}
|
||||
// regexLogger is a scoped logger for the processor/regex package.
|
||||
var regexLogger = logger.Default.WithPrefix("processor/regex")
|
||||
|
||||
type CaptureGroup struct {
|
||||
Name string
|
||||
@@ -90,53 +21,87 @@ type CaptureGroup struct {
|
||||
Updated string
|
||||
Range [2]int
|
||||
}
|
||||
type ReplaceCommand struct {
|
||||
From int
|
||||
To int
|
||||
With string
|
||||
}
|
||||
|
||||
// ProcessContent applies regex replacement with Lua processing
|
||||
func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr string) (string, int, int, error) {
|
||||
pattern = ResolveRegexPlaceholders(pattern)
|
||||
logger.Debug("Compiling regex pattern: %s", pattern)
|
||||
// The filename here exists ONLY so we can pass it to the lua environment
|
||||
// It's not used for anything else
|
||||
func ProcessRegex(content string, command utils.ModifyCommand, filename string) ([]utils.ReplaceCommand, error) {
|
||||
processRegexLogger := regexLogger.WithPrefix("ProcessRegex").WithField("commandName", command.Name).WithField("file", filename)
|
||||
processRegexLogger.Debug("Starting regex processing for file")
|
||||
processRegexLogger.Trace("Initial file content length: %d", len(content))
|
||||
processRegexLogger.Trace("Command details: %+v", command)
|
||||
|
||||
var commands []utils.ReplaceCommand
|
||||
// Start timing the regex processing
|
||||
startTime := time.Now()
|
||||
|
||||
// We don't HAVE to do this multiple times for a pattern
|
||||
// But it's quick enough for us to not care
|
||||
pattern := resolveRegexPlaceholders(command.Regex)
|
||||
processRegexLogger.Debug("Resolved regex placeholders. Pattern: %s", pattern)
|
||||
|
||||
// I'm not too happy about having to trim regex, we could have meaningful whitespace or newlines
|
||||
// But it's a compromise that allows us to use | in yaml
|
||||
// Otherwise we would have to escape every god damn pair of quotation marks
|
||||
// And a bunch of other shit
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
processRegexLogger.Debug("Trimmed regex pattern: %s", pattern)
|
||||
|
||||
patternCompileStart := time.Now()
|
||||
compiledPattern, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
logger.Error("Error compiling pattern: %v", err)
|
||||
return "", 0, 0, fmt.Errorf("error compiling pattern: %v", err)
|
||||
processRegexLogger.Error("Error compiling pattern %q: %v", pattern, err)
|
||||
return commands, fmt.Errorf("error compiling pattern: %v", err)
|
||||
}
|
||||
logger.Debug("Compiled pattern successfully: %s", pattern)
|
||||
processRegexLogger.Debug("Compiled pattern successfully in %v", time.Since(patternCompileStart))
|
||||
|
||||
previous := luaExpr
|
||||
luaExpr = BuildLuaScript(luaExpr)
|
||||
logger.Debug("Changing Lua expression from: %s to: %s", previous, luaExpr)
|
||||
|
||||
// Initialize Lua environment
|
||||
modificationCount := 0
|
||||
// Same here, it's just string concatenation, it won't kill us
|
||||
// More important is that we don't fuck up the command
|
||||
// But we shouldn't be able to since it's passed by value
|
||||
previousLuaExpr := command.Lua
|
||||
luaExpr := BuildLuaScript(command.Lua)
|
||||
processRegexLogger.Debug("Transformed Lua expression: %q → %q", previousLuaExpr, luaExpr)
|
||||
processRegexLogger.Trace("Full Lua script: %q", utils.LimitString(luaExpr, 200))
|
||||
|
||||
// Process all regex matches
|
||||
result := content
|
||||
matchFindStart := time.Now()
|
||||
indices := compiledPattern.FindAllStringSubmatchIndex(content, -1)
|
||||
logger.Debug("Found %d matches in the content", len(indices))
|
||||
matchFindDuration := time.Since(matchFindStart)
|
||||
|
||||
processRegexLogger.Debug("Found %d matches in content of length %d (search took %v)",
|
||||
len(indices), len(content), matchFindDuration)
|
||||
processRegexLogger.Trace("Match indices: %v", indices)
|
||||
|
||||
// Log pattern complexity metrics
|
||||
patternComplexity := estimatePatternComplexity(pattern)
|
||||
processRegexLogger.Debug("Pattern complexity estimate: %d", patternComplexity)
|
||||
|
||||
if len(indices) == 0 {
|
||||
processRegexLogger.Warning("No matches found for regex: %q", pattern)
|
||||
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// We walk backwards because we're replacing something with something else that might be longer
|
||||
// And in the case it is longer than the original all indicces past that change will be fucked up
|
||||
// By going backwards we fuck up all the indices to the end of the file that we don't care about
|
||||
// Because there either aren't any (last match) or they're already modified (subsequent matches)
|
||||
for i := len(indices) - 1; i >= 0; i-- {
|
||||
for i, matchIndices := range indices {
|
||||
matchLogger := processRegexLogger.WithField("matchNum", i+1)
|
||||
matchLogger.Debug("Processing match %d of %d", i+1, len(indices))
|
||||
matchLogger.Trace("Match indices: %v (match position %d-%d)", matchIndices, matchIndices[0], matchIndices[1])
|
||||
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
logger.Error("Error creating Lua state: %v", err)
|
||||
return "", 0, 0, fmt.Errorf("error creating Lua state: %v", err)
|
||||
matchLogger.Error("Error creating Lua state: %v", err)
|
||||
return commands, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
L.SetGlobal("file", lua.LString(filename))
|
||||
// Hmm... Maybe we don't want to defer this..
|
||||
// Maybe we want to close them every iteration
|
||||
// We'll leave it as is for now
|
||||
defer L.Close()
|
||||
logger.Trace("Lua state created successfully")
|
||||
|
||||
matchIndices := indices[i]
|
||||
logger.Trace("Processing match indices: %v", matchIndices)
|
||||
matchLogger.Trace("Lua state created successfully for match %d", i+1)
|
||||
|
||||
// Why we're doing this whole song and dance of indices is to properly handle empty matches
|
||||
// Plus it's a little cleaner to surgically replace our matches
|
||||
@@ -145,22 +110,32 @@ func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr
|
||||
// So when we're cutting open the array we say 0:7 + modified + 7:end
|
||||
// As if concatenating in the middle of the array
|
||||
// Plus it supports lookarounds
|
||||
match := content[matchIndices[0]:matchIndices[1]]
|
||||
logger.Trace("Matched content: %s", match)
|
||||
matchContent := content[matchIndices[0]:matchIndices[1]]
|
||||
matchPreview := utils.LimitString(matchContent, 50)
|
||||
matchLogger.Trace("Matched content: %q (length: %d)", matchPreview, len(matchContent))
|
||||
|
||||
groups := matchIndices[2:]
|
||||
if len(groups) <= 0 {
|
||||
logger.Warning("No capture groups for lua to chew on")
|
||||
matchLogger.Warning("No capture groups found for match %q and regex %q", matchPreview, pattern)
|
||||
continue
|
||||
}
|
||||
if len(groups)%2 == 1 {
|
||||
logger.Warning("Odd number of indices of groups, what the fuck?")
|
||||
matchLogger.Warning("Invalid number of group indices (%d), should be even: %v", len(groups), groups)
|
||||
continue
|
||||
}
|
||||
|
||||
// Count how many valid groups we have
|
||||
validGroups := 0
|
||||
for j := 0; j < len(groups); j += 2 {
|
||||
if groups[j] != -1 && groups[j+1] != -1 {
|
||||
validGroups++
|
||||
}
|
||||
}
|
||||
matchLogger.Debug("Found %d valid capture groups in match", validGroups)
|
||||
|
||||
for _, index := range groups {
|
||||
if index == -1 {
|
||||
// return "", 0, 0, fmt.Errorf("negative indices encountered: %v. This indicates that there was an issue with the match indices, possibly due to an empty match or an unexpected pattern. Please check the regex pattern and input content.", matchIndices)
|
||||
logger.Warning("Negative indices encountered: %v. This indicates that there was an issue with the match indices, possibly due to an empty match or an unexpected pattern. This is not an error but it's possibly not what you want.", matchIndices)
|
||||
matchLogger.Warning("Negative index encountered in match indices %v. This may indicate an issue with the regex pattern or an empty/optional capture group.", matchIndices)
|
||||
continue
|
||||
}
|
||||
}
|
||||
@@ -172,90 +147,156 @@ func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr
|
||||
captureGroups := make([]*CaptureGroup, 0, len(groups)/2)
|
||||
groupNames := compiledPattern.SubexpNames()[1:]
|
||||
for i, name := range groupNames {
|
||||
// if name == "" {
|
||||
// continue
|
||||
// }
|
||||
start := groups[i*2]
|
||||
end := groups[i*2+1]
|
||||
if start == -1 || end == -1 {
|
||||
matchLogger.Debug("Skipping empty or unmatched capture group #%d (name: %q)", i+1, name)
|
||||
continue
|
||||
}
|
||||
|
||||
value := content[start:end]
|
||||
captureGroups = append(captureGroups, &CaptureGroup{
|
||||
Name: name,
|
||||
Value: content[start:end],
|
||||
Value: value,
|
||||
Range: [2]int{start, end},
|
||||
})
|
||||
|
||||
// Include name info in log if available
|
||||
if name != "" {
|
||||
matchLogger.Trace("Capture group '%s': %q (pos %d-%d)", name, value, start, end)
|
||||
} else {
|
||||
matchLogger.Trace("Capture group #%d: %q (pos %d-%d)", i+1, value, start, end)
|
||||
}
|
||||
}
|
||||
|
||||
for _, capture := range captureGroups {
|
||||
logger.Trace("Capture group: %+v", *capture)
|
||||
// Use the DeduplicateGroups flag to control whether to deduplicate capture groups
|
||||
if !command.NoDedup {
|
||||
matchLogger.Debug("Deduplicating capture groups as specified in command settings")
|
||||
captureGroups = deduplicateGroups(captureGroups)
|
||||
matchLogger.Trace("Capture groups after deduplication: %v", captureGroups)
|
||||
} else {
|
||||
matchLogger.Debug("Skipping deduplication of capture groups (NoDedup is true)")
|
||||
}
|
||||
|
||||
if err := p.ToLua(L, captureGroups); err != nil {
|
||||
logger.Error("Error setting Lua variables: %v", err)
|
||||
if err := toLua(L, captureGroups); err != nil {
|
||||
matchLogger.Error("Failed to set Lua variables for capture groups: %v", err)
|
||||
continue
|
||||
}
|
||||
logger.Trace("Lua variables set successfully")
|
||||
matchLogger.Debug("Set %d capture groups as Lua variables", len(captureGroups))
|
||||
matchLogger.Trace("Lua globals set for capture groups")
|
||||
|
||||
if err := L.DoString(luaExpr); err != nil {
|
||||
logger.Error("Error executing Lua code %s for groups %+v: %v", luaExpr, captureGroups, err)
|
||||
matchLogger.Error("Lua script execution failed: %v\nScript: %s\nCapture Groups: %+v",
|
||||
err, utils.LimitString(luaExpr, 200), captureGroups)
|
||||
continue
|
||||
}
|
||||
logger.Trace("Lua code executed successfully")
|
||||
matchLogger.Debug("Lua script executed successfully")
|
||||
|
||||
// Get modifications from Lua
|
||||
captureGroups, err = p.FromLuaCustom(L, captureGroups)
|
||||
updatedCaptureGroups, err := fromLua(L, captureGroups)
|
||||
if err != nil {
|
||||
logger.Error("Error getting modifications: %v", err)
|
||||
matchLogger.Error("Failed to retrieve modifications from Lua: %v", err)
|
||||
continue
|
||||
}
|
||||
matchLogger.Debug("Retrieved updated values from Lua")
|
||||
matchLogger.Trace("Updated capture groups from Lua: %v", updatedCaptureGroups)
|
||||
|
||||
replacement := ""
|
||||
replacementVar := L.GetGlobal("replacement")
|
||||
if replacementVar.Type() != lua.LTNil {
|
||||
replacement = replacementVar.String()
|
||||
matchLogger.Debug("Using global replacement variable from Lua: %q", replacement)
|
||||
}
|
||||
|
||||
// Check if modification flag is set
|
||||
modifiedVal := L.GetGlobal("modified")
|
||||
if modifiedVal.Type() != lua.LTBool || !lua.LVAsBool(modifiedVal) {
|
||||
logger.Debug("No modifications made by Lua script")
|
||||
matchLogger.Debug("Skipping match - no modifications indicated by Lua script")
|
||||
continue
|
||||
}
|
||||
|
||||
if replacement == "" {
|
||||
commands := make([]ReplaceCommand, 0, len(captureGroups))
|
||||
// Apply the modifications to the original match
|
||||
replacement = match
|
||||
for _, capture := range captureGroups {
|
||||
logger.Debug("Applying modification: %s", capture.Updated)
|
||||
replacement = matchContent
|
||||
|
||||
// Count groups that were actually modified
|
||||
modifiedGroupsCount := 0
|
||||
for _, capture := range updatedCaptureGroups {
|
||||
if capture.Value != capture.Updated {
|
||||
modifiedGroupsCount++
|
||||
}
|
||||
}
|
||||
matchLogger.Info("%d of %d capture groups identified for modification", modifiedGroupsCount, len(updatedCaptureGroups))
|
||||
|
||||
for _, capture := range updatedCaptureGroups {
|
||||
if capture.Value == capture.Updated {
|
||||
matchLogger.Debug("Capture group unchanged: %s", utils.LimitString(capture.Value, 50))
|
||||
continue
|
||||
}
|
||||
|
||||
// Log what changed with context
|
||||
matchLogger.Debug("Capture group %q scheduled for modification: %q → %q",
|
||||
capture.Name, utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
|
||||
|
||||
// Indices of the group are relative to content
|
||||
// To relate them to match we have to subtract the match start index
|
||||
// replacement = replacement[:groupStart] + newVal + replacement[groupEnd:]
|
||||
commands = append(commands, ReplaceCommand{
|
||||
From: capture.Range[0] - matchIndices[0],
|
||||
To: capture.Range[1] - matchIndices[0],
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: capture.Range[0],
|
||||
To: capture.Range[1],
|
||||
With: capture.Updated,
|
||||
})
|
||||
matchLogger.Trace("Added replacement command: %+v", commands[len(commands)-1])
|
||||
}
|
||||
|
||||
sort.Slice(commands, func(i, j int) bool {
|
||||
return commands[i].From > commands[j].From
|
||||
} else {
|
||||
matchLogger.Debug("Using full replacement string from Lua: %q", utils.LimitString(replacement, 50))
|
||||
commands = append(commands, utils.ReplaceCommand{
|
||||
From: matchIndices[0],
|
||||
To: matchIndices[1],
|
||||
With: replacement,
|
||||
})
|
||||
|
||||
for _, command := range commands {
|
||||
replacement = replacement[:command.From] + command.With + replacement[command.To:]
|
||||
matchLogger.Trace("Added full replacement command: %+v", commands[len(commands)-1])
|
||||
}
|
||||
}
|
||||
|
||||
modificationCount++
|
||||
result = result[:matchIndices[0]] + replacement + result[matchIndices[1]:]
|
||||
logger.Debug("Modification count updated: %d", modificationCount)
|
||||
processRegexLogger.Debug("Total regex processing time: %v", time.Since(startTime))
|
||||
processRegexLogger.Debug("Generated %d total modifications", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func deduplicateGroups(captureGroups []*CaptureGroup) []*CaptureGroup {
|
||||
deduplicateGroupsLogger := regexLogger.WithPrefix("deduplicateGroups")
|
||||
deduplicateGroupsLogger.Debug("Starting deduplication of capture groups")
|
||||
deduplicateGroupsLogger.Trace("Input capture groups: %v", captureGroups)
|
||||
|
||||
// Preserve input order and drop any group that overlaps with an already accepted group
|
||||
accepted := make([]*CaptureGroup, 0, len(captureGroups))
|
||||
for _, group := range captureGroups {
|
||||
groupLogger := deduplicateGroupsLogger.WithField("groupName", group.Name).WithField("groupRange", group.Range)
|
||||
groupLogger.Debug("Processing capture group")
|
||||
|
||||
overlaps := false
|
||||
for _, kept := range accepted {
|
||||
// Overlap if start < keptEnd and end > keptStart (adjacent is allowed)
|
||||
if group.Range[0] < kept.Range[1] && group.Range[1] > kept.Range[0] {
|
||||
overlaps = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
logger.Debug("Process completed with %d modifications", modificationCount)
|
||||
return result, modificationCount, len(indices), nil
|
||||
if overlaps {
|
||||
groupLogger.Warning("Overlapping capture group detected and skipped.")
|
||||
continue
|
||||
}
|
||||
|
||||
groupLogger.Debug("Capture group does not overlap with previously accepted groups. Adding.")
|
||||
accepted = append(accepted, group)
|
||||
}
|
||||
|
||||
deduplicateGroupsLogger.Debug("Finished deduplication. Original %d groups, %d deduplicated.", len(captureGroups), len(accepted))
|
||||
deduplicateGroupsLogger.Trace("Deduplicated groups: %v", accepted)
|
||||
|
||||
return accepted
|
||||
}
|
||||
|
||||
// The order of these replaces is important
|
||||
@@ -263,37 +304,196 @@ func (p *RegexProcessor) ProcessContent(content string, pattern string, luaExpr
|
||||
// If it were not here our !num in a named capture group would
|
||||
// Expand to another capture group in the capture group
|
||||
// We really only want one (our named) capture group
|
||||
func ResolveRegexPlaceholders(pattern string) string {
|
||||
func resolveRegexPlaceholders(pattern string) string {
|
||||
resolveLogger := regexLogger.WithPrefix("resolveRegexPlaceholders").WithField("originalPattern", utils.LimitString(pattern, 100))
|
||||
resolveLogger.Debug("Resolving regex placeholders in pattern")
|
||||
|
||||
// Handle special pattern modifications
|
||||
if !strings.HasPrefix(pattern, "(?s)") {
|
||||
pattern = "(?s)" + pattern
|
||||
// Use fmt.Printf for test compatibility
|
||||
fmt.Printf("Pattern modified to include (?s): %s\n", pattern)
|
||||
resolveLogger.Debug("Prepended '(?s)' to pattern for single-line mode")
|
||||
}
|
||||
|
||||
namedGroupNum := regexp.MustCompile(`(?:(\?<[^>]+>)(!num))`)
|
||||
pattern = namedGroupNum.ReplaceAllStringFunc(pattern, func(match string) string {
|
||||
funcLogger := resolveLogger.WithPrefix("namedGroupNumReplace").WithField("match", utils.LimitString(match, 50))
|
||||
funcLogger.Debug("Processing named group !num placeholder")
|
||||
parts := namedGroupNum.FindStringSubmatch(match)
|
||||
if len(parts) != 3 {
|
||||
funcLogger.Warning("Unexpected number of submatches for namedGroupNum: %d. Returning original match.", len(parts))
|
||||
return match
|
||||
}
|
||||
replacement := `-?\d*\.?\d+`
|
||||
funcLogger.Trace("Replacing !num in named group with: %q", replacement)
|
||||
return parts[1] + replacement
|
||||
})
|
||||
pattern = strings.ReplaceAll(pattern, "!num", `"?(-?\d*\.?\d+)"?`)
|
||||
resolveLogger.Debug("Handled named group !num placeholders")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "!num", `(-?\d*\.?\d+)`)
|
||||
resolveLogger.Debug("Replaced !num with numeric capture group")
|
||||
|
||||
pattern = strings.ReplaceAll(pattern, "!any", `.*?`)
|
||||
resolveLogger.Debug("Replaced !any with non-greedy wildcard")
|
||||
|
||||
repPattern := regexp.MustCompile(`!rep\(([^,]+),\s*(\d+)\)`)
|
||||
// !rep(pattern, count) repeats the pattern n times
|
||||
// Inserting !any between each repetition
|
||||
pattern = repPattern.ReplaceAllStringFunc(pattern, func(match string) string {
|
||||
funcLogger := resolveLogger.WithPrefix("repPatternReplace").WithField("match", utils.LimitString(match, 50))
|
||||
funcLogger.Debug("Processing !rep placeholder")
|
||||
parts := repPattern.FindStringSubmatch(match)
|
||||
if len(parts) != 3 {
|
||||
funcLogger.Warning("Unexpected number of submatches for repPattern: %d. Returning original match.", len(parts))
|
||||
return match
|
||||
}
|
||||
repeatedPattern := parts[1]
|
||||
count := parts[2]
|
||||
repetitions, _ := strconv.Atoi(count)
|
||||
return strings.Repeat(repeatedPattern+".*?", repetitions-1) + repeatedPattern
|
||||
countStr := parts[2]
|
||||
repetitions, err := strconv.Atoi(countStr)
|
||||
if err != nil {
|
||||
funcLogger.Error("Failed to parse repetition count %q: %v. Returning original match.", countStr, err)
|
||||
return match
|
||||
}
|
||||
|
||||
var finalReplacement string
|
||||
if repetitions > 0 {
|
||||
finalReplacement = strings.Repeat(repeatedPattern+".*?", repetitions-1) + repeatedPattern
|
||||
} else {
|
||||
finalReplacement = ""
|
||||
}
|
||||
|
||||
funcLogger.Trace("Replaced !rep with %d repetitions of %q: %q", repetitions, utils.LimitString(repeatedPattern, 30), utils.LimitString(finalReplacement, 100))
|
||||
return finalReplacement
|
||||
})
|
||||
resolveLogger.Debug("Handled !rep placeholders")
|
||||
|
||||
resolveLogger.Debug("Finished resolving regex placeholders")
|
||||
resolveLogger.Trace("Final resolved pattern: %q", utils.LimitString(pattern, 100))
|
||||
return pattern
|
||||
}
|
||||
|
||||
// ToLua sets capture groups as Lua variables (v1, v2, etc. for numeric values and s1, s2, etc. for strings)
|
||||
func toLua(L *lua.LState, data interface{}) error {
|
||||
toLuaLogger := regexLogger.WithPrefix("toLua")
|
||||
toLuaLogger.Debug("Setting capture groups as Lua variables")
|
||||
|
||||
captureGroups, ok := data.([]*CaptureGroup)
|
||||
if !ok {
|
||||
toLuaLogger.Error("Invalid data type for toLua. Expected []*CaptureGroup, got %T", data)
|
||||
return fmt.Errorf("expected []*CaptureGroup for captures, got %T", data)
|
||||
}
|
||||
toLuaLogger.Trace("Input capture groups: %v", captureGroups)
|
||||
|
||||
groupindex := 0
|
||||
for _, capture := range captureGroups {
|
||||
groupLogger := toLuaLogger.WithField("captureGroup", capture.Name).WithField("value", utils.LimitString(capture.Value, 50))
|
||||
groupLogger.Debug("Processing capture group for Lua")
|
||||
|
||||
if capture.Name == "" {
|
||||
// We don't want to change the name of the capture group
|
||||
// Even if it's empty
|
||||
tempName := fmt.Sprintf("%d", groupindex+1)
|
||||
groupindex++
|
||||
groupLogger.Debug("Unnamed capture group, assigning temporary name: %q", tempName)
|
||||
|
||||
L.SetGlobal("s"+tempName, lua.LString(capture.Value))
|
||||
groupLogger.Trace("Set Lua global s%s = %q", tempName, capture.Value)
|
||||
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal("v"+tempName, lua.LNumber(val))
|
||||
groupLogger.Trace("Set Lua global v%s = %f", tempName, val)
|
||||
} else {
|
||||
groupLogger.Trace("Value %q is not numeric, skipping v%s assignment", capture.Value, tempName)
|
||||
}
|
||||
} else {
|
||||
val, err := strconv.ParseFloat(capture.Value, 64)
|
||||
if err == nil {
|
||||
L.SetGlobal(capture.Name, lua.LNumber(val))
|
||||
groupLogger.Trace("Set Lua global %s = %f (numeric)", capture.Name, val)
|
||||
} else {
|
||||
L.SetGlobal(capture.Name, lua.LString(capture.Value))
|
||||
groupLogger.Trace("Set Lua global %s = %q (string)", capture.Name, capture.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toLuaLogger.Debug("Finished setting capture groups as Lua variables")
|
||||
return nil
|
||||
}
|
||||
|
||||
// FromLua implements the Processor interface for RegexProcessor
|
||||
func fromLua(L *lua.LState, captureGroups []*CaptureGroup) ([]*CaptureGroup, error) {
|
||||
fromLuaLogger := regexLogger.WithPrefix("fromLua")
|
||||
fromLuaLogger.Debug("Retrieving modifications from Lua for capture groups")
|
||||
fromLuaLogger.Trace("Initial capture groups: %v", captureGroups)
|
||||
|
||||
captureIndex := 0
|
||||
for _, capture := range captureGroups {
|
||||
groupLogger := fromLuaLogger.WithField("originalCaptureName", capture.Name).WithField("originalValue", utils.LimitString(capture.Value, 50))
|
||||
groupLogger.Debug("Processing capture group to retrieve updated value")
|
||||
|
||||
if capture.Name == "" {
|
||||
// This case means it was an unnamed capture group originally.
|
||||
// We need to reconstruct the original temporary name to fetch its updated value.
|
||||
// The name will be set to an integer if it was empty, then incremented.
|
||||
// So, we use the captureIndex to get the correct 'vX' and 'sX' variables.
|
||||
tempName := fmt.Sprintf("%d", captureIndex+1)
|
||||
groupLogger.Debug("Retrieving updated value for unnamed group (temp name: %q)", tempName)
|
||||
|
||||
vVarName := fmt.Sprintf("v%s", tempName)
|
||||
sVarName := fmt.Sprintf("s%s", tempName)
|
||||
captureIndex++
|
||||
|
||||
vLuaVal := L.GetGlobal(vVarName)
|
||||
sLuaVal := L.GetGlobal(sVarName)
|
||||
|
||||
groupLogger.Trace("Lua values for unnamed group: v=%v, s=%v", vLuaVal, sLuaVal)
|
||||
|
||||
if sLuaVal.Type() == lua.LTString {
|
||||
capture.Updated = sLuaVal.String()
|
||||
groupLogger.Trace("Updated value from s%s (string): %q", tempName, capture.Updated)
|
||||
}
|
||||
// Numbers have priority
|
||||
if vLuaVal.Type() == lua.LTNumber {
|
||||
capture.Updated = vLuaVal.String()
|
||||
groupLogger.Trace("Updated value from v%s (numeric): %q", tempName, capture.Updated)
|
||||
}
|
||||
} else {
|
||||
// Easy shit, directly use the named capture group
|
||||
updatedValue := L.GetGlobal(capture.Name)
|
||||
if updatedValue.Type() != lua.LTNil {
|
||||
capture.Updated = updatedValue.String()
|
||||
groupLogger.Trace("Updated value for named group %q: %q", capture.Name, capture.Updated)
|
||||
} else {
|
||||
groupLogger.Debug("Named capture group %q not found in Lua globals or is nil. Keeping original value.", capture.Name)
|
||||
capture.Updated = capture.Value // Keep original if not found or nil
|
||||
}
|
||||
}
|
||||
groupLogger.Debug("Finished processing capture group. Original: %q, Updated: %q", utils.LimitString(capture.Value, 50), utils.LimitString(capture.Updated, 50))
|
||||
}
|
||||
|
||||
fromLuaLogger.Debug("Finished retrieving modifications from Lua")
|
||||
fromLuaLogger.Trace("Final updated capture groups: %v", captureGroups)
|
||||
return captureGroups, nil
|
||||
}
|
||||
|
||||
// estimatePatternComplexity gives a rough estimate of regex pattern complexity
|
||||
// This can help identify potentially problematic patterns
|
||||
func estimatePatternComplexity(pattern string) int {
|
||||
estimateComplexityLogger := regexLogger.WithPrefix("estimatePatternComplexity").WithField("pattern", utils.LimitString(pattern, 100))
|
||||
estimateComplexityLogger.Debug("Estimating regex pattern complexity")
|
||||
complexity := len(pattern)
|
||||
|
||||
// Add complexity for potentially expensive operations
|
||||
complexity += strings.Count(pattern, ".*") * 10 // Greedy wildcard
|
||||
complexity += strings.Count(pattern, ".*?") * 5 // Non-greedy wildcard
|
||||
complexity += strings.Count(pattern, "[^") * 3 // Negated character class
|
||||
complexity += strings.Count(pattern, "\\b") * 2 // Word boundary
|
||||
complexity += strings.Count(pattern, "(") * 2 // Capture groups
|
||||
complexity += strings.Count(pattern, "(?:") * 1 // Non-capture groups
|
||||
complexity += strings.Count(pattern, "\\1") * 3 // Backreferences
|
||||
complexity += strings.Count(pattern, "{") * 2 // Counted repetition
|
||||
|
||||
estimateComplexityLogger.Debug("Estimated pattern complexity: %d", complexity)
|
||||
return complexity
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
283
processor/surgical_json_test.go
Normal file
283
processor/surgical_json_test.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"cook/utils"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSurgicalJSONEditing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
content string
|
||||
luaCode string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Modify single field",
|
||||
content: `{
|
||||
"name": "test",
|
||||
"value": 42,
|
||||
"description": "original"
|
||||
}`,
|
||||
luaCode: `
|
||||
data.value = 84
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"name": "test",
|
||||
"value": 84,
|
||||
"description": "original"
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "Add new field",
|
||||
content: `{
|
||||
"name": "test",
|
||||
"value": 42
|
||||
}`,
|
||||
luaCode: `
|
||||
data.newField = "added"
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"name": "test",
|
||||
"value": 42,
|
||||
"newField": "added"
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "Modify nested field",
|
||||
content: `{
|
||||
"config": {
|
||||
"settings": {
|
||||
"enabled": false,
|
||||
"timeout": 30
|
||||
}
|
||||
}
|
||||
}`,
|
||||
luaCode: `
|
||||
data.config.settings.enabled = true
|
||||
data.config.settings.timeout = 60
|
||||
modified = true
|
||||
`,
|
||||
expected: `{
|
||||
"config": {
|
||||
"settings": {
|
||||
"enabled": true,
|
||||
"timeout": 60
|
||||
}
|
||||
}
|
||||
}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: tt.luaCode,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(tt.content, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := tt.content
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Instead of exact string comparison, check that key values are present
|
||||
// This accounts for field ordering differences in JSON
|
||||
if !contains(result, `"value": 84`) && tt.name == "Modify single field" {
|
||||
t.Errorf("Expected value to be 84, got:\n%s", result)
|
||||
}
|
||||
if !contains(result, `"newField": "added"`) && tt.name == "Add new field" {
|
||||
t.Errorf("Expected newField to be added, got:\n%s", result)
|
||||
}
|
||||
if !contains(result, `"enabled": true`) && tt.name == "Modify nested field" {
|
||||
t.Errorf("Expected enabled to be true, got:\n%s", result)
|
||||
}
|
||||
if !contains(result, `"timeout": 60`) && tt.name == "Modify nested field" {
|
||||
t.Errorf("Expected timeout to be 60, got:\n%s", result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSurgicalJSONPreservesFormatting(t *testing.T) {
|
||||
// Test that surgical editing preserves the original formatting structure
|
||||
content := `{
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"Description": "",
|
||||
"DisplayName": "",
|
||||
"FlavorText": "",
|
||||
"Icon": "None",
|
||||
"MaxStack": 1,
|
||||
"Override_Glow_Icon": "None",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false
|
||||
},
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Rows": [
|
||||
{
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber",
|
||||
"Weight": 10
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
-- Modify the weight of the first item
|
||||
data.Rows[1].Weight = 500
|
||||
modified = true
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(content, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := content
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Check that the weight was changed
|
||||
if !contains(result, `"Weight": 500`) {
|
||||
t.Errorf("Expected weight to be changed to 500, got:\n%s", result)
|
||||
}
|
||||
|
||||
// Check that formatting is preserved (should have proper indentation)
|
||||
if !contains(result, " \"Weight\": 500") {
|
||||
t.Errorf("Expected proper indentation, got:\n%s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetardedJSONEditing(t *testing.T) {
|
||||
original := `{
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"DisplayName": "",
|
||||
"Icon": "None",
|
||||
"Override_Glow_Icon": "None",
|
||||
"Description": "",
|
||||
"FlavorText": "",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false,
|
||||
"MaxStack": 1
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Weight": 10,
|
||||
"MaxStack": 200,
|
||||
"Name": "Item_Fiber"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"RowStruct": "/Script/Icarus.ItemableData",
|
||||
"Defaults": {
|
||||
"Behaviour": "None",
|
||||
"DisplayName": "",
|
||||
"Icon": "None",
|
||||
"Override_Glow_Icon": "None",
|
||||
"Description": "",
|
||||
"FlavorText": "",
|
||||
"Weight": 0,
|
||||
"bAllowZeroWeight": false,
|
||||
"MaxStack": 1
|
||||
},
|
||||
"Rows": [
|
||||
{
|
||||
"DisplayName": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-DisplayName\", \"Fiber\")",
|
||||
"Icon": "/Game/Assets/2DArt/UI/Items/Item_Icons/Resources/ITEM_Fibre.ITEM_Fibre",
|
||||
"Description": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-Description\", \"A bundle of soft fiber, highly useful.\")",
|
||||
"FlavorText": "NSLOCTEXT(\"D_Itemable\", \"Item_Fiber-FlavorText\", \"Fiber is collected from bast, the strong inner bark of certain flowering plants.\")",
|
||||
"Weight": 10,
|
||||
"MaxStack": 1000000,
|
||||
"Name": "Item_Fiber"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
command := utils.ModifyCommand{
|
||||
Name: "test",
|
||||
Lua: `
|
||||
for _, row in ipairs(data.Rows) do
|
||||
if row.MaxStack then
|
||||
if string.find(row.Name, "Carrot") or string.find(row.Name, "Potato") then
|
||||
row.MaxStack = 25
|
||||
else
|
||||
row.MaxStack = row.MaxStack * 10000
|
||||
if row.MaxStack > 1000000 then
|
||||
row.MaxStack = 1000000
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
`,
|
||||
}
|
||||
|
||||
commands, err := ProcessJSON(original, command, "test.json")
|
||||
if err != nil {
|
||||
t.Fatalf("ProcessJSON failed: %v", err)
|
||||
}
|
||||
|
||||
if len(commands) == 0 {
|
||||
t.Fatal("Expected at least one command")
|
||||
}
|
||||
|
||||
// Apply the commands
|
||||
result := original
|
||||
for _, cmd := range commands {
|
||||
result = result[:cmd.From] + cmd.With + result[cmd.To:]
|
||||
}
|
||||
|
||||
// Check that the weight was changed
|
||||
if result != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
func contains(s, substr string) bool {
|
||||
return len(s) >= len(substr) && (s == substr ||
|
||||
(len(s) > len(substr) && (s[:len(substr)] == substr ||
|
||||
s[len(s)-len(substr):] == substr ||
|
||||
containsSubstring(s, substr))))
|
||||
}
|
||||
|
||||
func containsSubstring(s, substr string) bool {
|
||||
for i := 0; i <= len(s)-len(substr); i++ {
|
||||
if s[i:i+len(substr)] == substr {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
@@ -1,12 +1,16 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"modify/logger"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Only modify logger in test mode
|
||||
// This checks if we're running under 'go test'
|
||||
if os.Getenv("GO_TESTING") == "1" || os.Getenv("TESTING") == "1" {
|
||||
// Initialize logger with ERROR level for tests
|
||||
// to minimize noise in test output
|
||||
logger.Init(logger.LevelError)
|
||||
@@ -16,7 +20,8 @@ func init() {
|
||||
disableTestLogs := os.Getenv("ENABLE_TEST_LOGS") != "1"
|
||||
if disableTestLogs {
|
||||
// Create a new logger that writes to nowhere
|
||||
silentLogger := logger.New(ioutil.Discard, "", 0)
|
||||
logger.DefaultLogger = silentLogger
|
||||
silentLogger := logger.New(io.Discard, "", 0)
|
||||
logger.Default = silentLogger
|
||||
}
|
||||
}
|
||||
}
|
||||
|
434
processor/xml.go
434
processor/xml.go
@@ -1,434 +0,0 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"modify/logger"
|
||||
"modify/processor/xpath"
|
||||
"strings"
|
||||
|
||||
"github.com/antchfx/xmlquery"
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
// XMLProcessor implements the Processor interface for XML documents
|
||||
type XMLProcessor struct{}
|
||||
|
||||
// ProcessContent implements the Processor interface for XMLProcessor
|
||||
func (p *XMLProcessor) ProcessContent(content string, path string, luaExpr string) (string, int, int, error) {
|
||||
logger.Debug("Processing XML content with XPath: %s", path)
|
||||
|
||||
// Parse XML document
|
||||
// We can't really use encoding/xml here because it requires a pre defined struct
|
||||
// And we HAVE TO parse dynamic unknown XML
|
||||
logger.Trace("Parsing XML document")
|
||||
doc, err := xmlquery.Parse(strings.NewReader(content))
|
||||
if err != nil {
|
||||
logger.Error("Failed to parse XML: %v", err)
|
||||
return content, 0, 0, fmt.Errorf("error parsing XML: %v", err)
|
||||
}
|
||||
|
||||
// Find nodes matching the XPath pattern
|
||||
logger.Debug("Executing XPath query: %s", path)
|
||||
nodes, err := xpath.Get(doc, path)
|
||||
if err != nil {
|
||||
logger.Error("Failed to execute XPath: %v", err)
|
||||
return content, 0, 0, fmt.Errorf("error executing XPath: %v", err)
|
||||
}
|
||||
|
||||
matchCount := len(nodes)
|
||||
logger.Debug("Found %d nodes matching XPath", matchCount)
|
||||
if matchCount == 0 {
|
||||
logger.Warning("No nodes matched the XPath pattern: %s", path)
|
||||
return content, 0, 0, nil
|
||||
}
|
||||
|
||||
// Apply modifications to each node
|
||||
modCount := 0
|
||||
for i, node := range nodes {
|
||||
logger.Trace("Processing node #%d: %s", i+1, node.Data)
|
||||
|
||||
L, err := NewLuaState()
|
||||
if err != nil {
|
||||
logger.Error("Failed to create Lua state: %v", err)
|
||||
return content, 0, 0, fmt.Errorf("error creating Lua state: %v", err)
|
||||
}
|
||||
defer L.Close()
|
||||
|
||||
logger.Trace("Converting XML node to Lua")
|
||||
err = p.ToLua(L, node)
|
||||
if err != nil {
|
||||
logger.Error("Failed to convert XML node to Lua: %v", err)
|
||||
return content, modCount, matchCount, fmt.Errorf("error converting to Lua: %v", err)
|
||||
}
|
||||
|
||||
luaScript := BuildLuaScript(luaExpr)
|
||||
logger.Trace("Executing Lua script: %s", luaScript)
|
||||
err = L.DoString(luaScript)
|
||||
if err != nil {
|
||||
logger.Error("Failed to execute Lua script: %v", err)
|
||||
return content, modCount, matchCount, fmt.Errorf("error executing Lua: %v", err)
|
||||
}
|
||||
|
||||
result, err := p.FromLua(L)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get result from Lua: %v", err)
|
||||
return content, modCount, matchCount, fmt.Errorf("error getting result from Lua: %v", err)
|
||||
}
|
||||
logger.Trace("Lua returned result: %#v", result)
|
||||
|
||||
modified := false
|
||||
modified = L.GetGlobal("modified").String() == "true"
|
||||
if !modified {
|
||||
logger.Debug("No changes made to node at path: %s", node.Data)
|
||||
continue
|
||||
}
|
||||
|
||||
// Apply modification based on the result
|
||||
if updatedValue, ok := result.(string); ok {
|
||||
// If the result is a simple string, update the node value directly
|
||||
logger.Debug("Updating node with string value: %s", updatedValue)
|
||||
xpath.Set(doc, path, updatedValue)
|
||||
} else if nodeData, ok := result.(map[string]interface{}); ok {
|
||||
// If the result is a map, apply more complex updates
|
||||
logger.Debug("Updating node with complex data structure")
|
||||
updateNodeFromMap(node, nodeData)
|
||||
}
|
||||
|
||||
modCount++
|
||||
logger.Debug("Successfully modified node #%d", i+1)
|
||||
}
|
||||
|
||||
logger.Info("XML processing complete: %d modifications from %d matches", modCount, matchCount)
|
||||
|
||||
// Serialize the modified XML document to string
|
||||
if doc.FirstChild != nil && doc.FirstChild.Type == xmlquery.DeclarationNode {
|
||||
// If we have an XML declaration, start with it
|
||||
declaration := doc.FirstChild.OutputXML(true)
|
||||
// Remove the firstChild (declaration) before serializing the rest of the document
|
||||
doc.FirstChild = doc.FirstChild.NextSibling
|
||||
return ConvertToNamedEntities(declaration + doc.OutputXML(true)), modCount, matchCount, nil
|
||||
}
|
||||
|
||||
// Convert numeric entities to named entities for better readability
|
||||
return ConvertToNamedEntities(doc.OutputXML(true)), modCount, matchCount, nil
|
||||
}
|
||||
|
||||
func (p *XMLProcessor) ToLua(L *lua.LState, data interface{}) error {
|
||||
table, err := p.ToLuaTable(L, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
L.SetGlobal("v", table)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ToLua converts XML node values to Lua variables
|
||||
func (p *XMLProcessor) ToLuaTable(L *lua.LState, data interface{}) (lua.LValue, error) {
|
||||
// Check if data is an xmlquery.Node
|
||||
node, ok := data.(*xmlquery.Node)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected xmlquery.Node, got %T", data)
|
||||
}
|
||||
|
||||
// Create a simple table with essential data
|
||||
table := L.NewTable()
|
||||
|
||||
// For element nodes, just provide basic info
|
||||
L.SetField(table, "type", lua.LString(nodeTypeToString(node.Type)))
|
||||
L.SetField(table, "name", lua.LString(node.Data))
|
||||
L.SetField(table, "value", lua.LString(node.InnerText()))
|
||||
|
||||
// Add children if any
|
||||
children := L.NewTable()
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
childTable, err := p.ToLuaTable(L, child)
|
||||
if err == nil {
|
||||
children.Append(childTable)
|
||||
}
|
||||
}
|
||||
L.SetField(table, "children", children)
|
||||
|
||||
attrs := L.NewTable()
|
||||
if len(node.Attr) > 0 {
|
||||
for _, attr := range node.Attr {
|
||||
L.SetField(attrs, attr.Name.Local, lua.LString(attr.Value))
|
||||
}
|
||||
}
|
||||
L.SetField(table, "attr", attrs)
|
||||
|
||||
return table, nil
|
||||
}
|
||||
|
||||
// FromLua gets modified values from Lua
|
||||
func (p *XMLProcessor) FromLua(L *lua.LState) (interface{}, error) {
|
||||
luaValue := L.GetGlobal("v")
|
||||
|
||||
// Handle string values directly
|
||||
if luaValue.Type() == lua.LTString {
|
||||
return luaValue.String(), nil
|
||||
}
|
||||
|
||||
// Handle tables (for attributes and more complex updates)
|
||||
if luaValue.Type() == lua.LTTable {
|
||||
return luaTableToMap(L, luaValue.(*lua.LTable)), nil
|
||||
}
|
||||
|
||||
return luaValue.String(), nil
|
||||
}
|
||||
|
||||
// Simple helper to convert a Lua table to a Go map
|
||||
func luaTableToMap(L *lua.LState, table *lua.LTable) map[string]interface{} {
|
||||
result := make(map[string]interface{})
|
||||
|
||||
table.ForEach(func(k, v lua.LValue) {
|
||||
if k.Type() == lua.LTString {
|
||||
key := k.String()
|
||||
|
||||
if v.Type() == lua.LTTable {
|
||||
result[key] = luaTableToMap(L, v.(*lua.LTable))
|
||||
} else {
|
||||
result[key] = v.String()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Simple helper to convert node type to string
|
||||
func nodeTypeToString(nodeType xmlquery.NodeType) string {
|
||||
switch nodeType {
|
||||
case xmlquery.ElementNode:
|
||||
return "element"
|
||||
case xmlquery.TextNode:
|
||||
return "text"
|
||||
case xmlquery.AttributeNode:
|
||||
return "attribute"
|
||||
default:
|
||||
return "other"
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to update an XML node from a map
|
||||
func updateNodeFromMap(node *xmlquery.Node, data map[string]interface{}) {
|
||||
// Update node value if present
|
||||
if value, ok := data["value"]; ok {
|
||||
if strValue, ok := value.(string); ok {
|
||||
// For element nodes, replace text content
|
||||
if node.Type == xmlquery.ElementNode {
|
||||
// Find the first text child if it exists
|
||||
var textNode *xmlquery.Node
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
if child.Type == xmlquery.TextNode {
|
||||
textNode = child
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if textNode != nil {
|
||||
// Update existing text node
|
||||
textNode.Data = strValue
|
||||
} else {
|
||||
// Create new text node
|
||||
newText := &xmlquery.Node{
|
||||
Type: xmlquery.TextNode,
|
||||
Data: strValue,
|
||||
Parent: node,
|
||||
}
|
||||
|
||||
// Insert at beginning of children
|
||||
if node.FirstChild != nil {
|
||||
newText.NextSibling = node.FirstChild
|
||||
node.FirstChild.PrevSibling = newText
|
||||
node.FirstChild = newText
|
||||
} else {
|
||||
node.FirstChild = newText
|
||||
node.LastChild = newText
|
||||
}
|
||||
}
|
||||
} else if node.Type == xmlquery.TextNode {
|
||||
// Directly update text node
|
||||
node.Data = strValue
|
||||
} else if node.Type == xmlquery.AttributeNode {
|
||||
// Update attribute value
|
||||
if node.Parent != nil {
|
||||
for i, attr := range node.Parent.Attr {
|
||||
if attr.Name.Local == node.Data {
|
||||
node.Parent.Attr[i].Value = strValue
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update attributes if present
|
||||
if attrs, ok := data["attr"].(map[string]interface{}); ok && node.Type == xmlquery.ElementNode {
|
||||
for name, value := range attrs {
|
||||
if strValue, ok := value.(string); ok {
|
||||
// Look for existing attribute
|
||||
found := false
|
||||
for i, attr := range node.Attr {
|
||||
if attr.Name.Local == name {
|
||||
node.Attr[i].Value = strValue
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Add new attribute if not found
|
||||
if !found {
|
||||
node.Attr = append(node.Attr, xmlquery.Attr{
|
||||
Name: struct {
|
||||
Space, Local string
|
||||
}{Local: name},
|
||||
Value: strValue,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to get a string representation of node type
|
||||
func nodeTypeName(nodeType xmlquery.NodeType) string {
|
||||
switch nodeType {
|
||||
case xmlquery.ElementNode:
|
||||
return "element"
|
||||
case xmlquery.TextNode:
|
||||
return "text"
|
||||
case xmlquery.AttributeNode:
|
||||
return "attribute"
|
||||
case xmlquery.CommentNode:
|
||||
return "comment"
|
||||
case xmlquery.DeclarationNode:
|
||||
return "declaration"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertToNamedEntities replaces numeric XML entities with their named counterparts
|
||||
func ConvertToNamedEntities(xml string) string {
|
||||
// Basic XML entities
|
||||
replacements := map[string]string{
|
||||
// Basic XML entities
|
||||
""": """, // double quote
|
||||
"'": "'", // single quote
|
||||
"<": "<", // less than
|
||||
">": ">", // greater than
|
||||
"&": "&", // ampersand
|
||||
|
||||
// Common symbols
|
||||
" ": " ", // non-breaking space
|
||||
"©": "©", // copyright
|
||||
"®": "®", // registered trademark
|
||||
"€": "€", // euro
|
||||
"£": "£", // pound
|
||||
"¥": "¥", // yen
|
||||
"¢": "¢", // cent
|
||||
"§": "§", // section
|
||||
"™": "™", // trademark
|
||||
"♠": "♠", // spade
|
||||
"♣": "♣", // club
|
||||
"♥": "♥", // heart
|
||||
"♦": "♦", // diamond
|
||||
|
||||
// Special characters
|
||||
"¡": "¡", // inverted exclamation
|
||||
"¿": "¿", // inverted question
|
||||
"«": "«", // left angle quotes
|
||||
"»": "»", // right angle quotes
|
||||
"·": "·", // middle dot
|
||||
"•": "•", // bullet
|
||||
"…": "…", // horizontal ellipsis
|
||||
"′": "′", // prime
|
||||
"″": "″", // double prime
|
||||
"‾": "‾", // overline
|
||||
"⁄": "⁄", // fraction slash
|
||||
|
||||
// Math symbols
|
||||
"±": "±", // plus-minus
|
||||
"×": "×", // multiplication
|
||||
"÷": "÷", // division
|
||||
"∞": "∞", // infinity
|
||||
"≈": "≈", // almost equal
|
||||
"≠": "≠", // not equal
|
||||
"≤": "≤", // less than or equal
|
||||
"≥": "≥", // greater than or equal
|
||||
"∑": "∑", // summation
|
||||
"√": "√", // square root
|
||||
"∫": "∫", // integral
|
||||
|
||||
// Accented characters
|
||||
"À": "À", // A grave
|
||||
"Á": "Á", // A acute
|
||||
"Â": "Â", // A circumflex
|
||||
"Ã": "Ã", // A tilde
|
||||
"Ä": "Ä", // A umlaut
|
||||
"Å": "Å", // A ring
|
||||
"Æ": "Æ", // AE ligature
|
||||
"Ç": "Ç", // C cedilla
|
||||
"È": "È", // E grave
|
||||
"É": "É", // E acute
|
||||
"Ê": "Ê", // E circumflex
|
||||
"Ë": "Ë", // E umlaut
|
||||
"Ì": "Ì", // I grave
|
||||
"Í": "Í", // I acute
|
||||
"Î": "Î", // I circumflex
|
||||
"Ï": "Ï", // I umlaut
|
||||
"Ð": "Ð", // Eth
|
||||
"Ñ": "Ñ", // N tilde
|
||||
"Ò": "Ò", // O grave
|
||||
"Ó": "Ó", // O acute
|
||||
"Ô": "Ô", // O circumflex
|
||||
"Õ": "Õ", // O tilde
|
||||
"Ö": "Ö", // O umlaut
|
||||
"Ø": "Ø", // O slash
|
||||
"Ù": "Ù", // U grave
|
||||
"Ú": "Ú", // U acute
|
||||
"Û": "Û", // U circumflex
|
||||
"Ü": "Ü", // U umlaut
|
||||
"Ý": "Ý", // Y acute
|
||||
"Þ": "Þ", // Thorn
|
||||
"ß": "ß", // Sharp s
|
||||
"à": "à", // a grave
|
||||
"á": "á", // a acute
|
||||
"â": "â", // a circumflex
|
||||
"ã": "ã", // a tilde
|
||||
"ä": "ä", // a umlaut
|
||||
"å": "å", // a ring
|
||||
"æ": "æ", // ae ligature
|
||||
"ç": "ç", // c cedilla
|
||||
"è": "è", // e grave
|
||||
"é": "é", // e acute
|
||||
"ê": "ê", // e circumflex
|
||||
"ë": "ë", // e umlaut
|
||||
"ì": "ì", // i grave
|
||||
"í": "í", // i acute
|
||||
"î": "î", // i circumflex
|
||||
"ï": "ï", // i umlaut
|
||||
"ð": "ð", // eth
|
||||
"ñ": "ñ", // n tilde
|
||||
"ò": "ò", // o grave
|
||||
"ó": "ó", // o acute
|
||||
"ô": "ô", // o circumflex
|
||||
"õ": "õ", // o tilde
|
||||
"ö": "ö", // o umlaut
|
||||
"ø": "ø", // o slash
|
||||
"ù": "ù", // u grave
|
||||
"ú": "ú", // u acute
|
||||
"û": "û", // u circumflex
|
||||
"ü": "ü", // u umlaut
|
||||
"ý": "ý", // y acute
|
||||
"þ": "þ", // thorn
|
||||
"ÿ": "ÿ", // y umlaut
|
||||
}
|
||||
|
||||
result := xml
|
||||
for numeric, named := range replacements {
|
||||
result = strings.ReplaceAll(result, numeric, named)
|
||||
}
|
||||
return result
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -1,4 +0,0 @@
|
||||
// The package is now using github.com/antchfx/xmlquery for XPath parsing.
|
||||
// The parsing functionality tests have been removed since we're now
|
||||
// delegating XPath parsing to the xmlquery library.
|
||||
package xpath
|
@@ -1,4 +0,0 @@
|
||||
// The package is now using github.com/antchfx/xmlquery for XPath parsing.
|
||||
// The parsing functionality tests have been removed since we're now
|
||||
// delegating XPath parsing to the xmlquery library.
|
||||
package xpath
|
@@ -1,133 +0,0 @@
|
||||
package xpath
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/antchfx/xmlquery"
|
||||
)
|
||||
|
||||
// Get retrieves nodes from XML data using an XPath expression
|
||||
func Get(node *xmlquery.Node, path string) ([]*xmlquery.Node, error) {
|
||||
if node == nil {
|
||||
return nil, errors.New("nil node provided")
|
||||
}
|
||||
|
||||
// Execute xpath query directly
|
||||
nodes, err := xmlquery.QueryAll(node, path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to execute XPath query: %v", err)
|
||||
}
|
||||
|
||||
return nodes, nil
|
||||
}
|
||||
|
||||
// Set updates a single node in the XML data using an XPath expression
|
||||
func Set(node *xmlquery.Node, path string, value interface{}) error {
|
||||
if node == nil {
|
||||
return errors.New("nil node provided")
|
||||
}
|
||||
|
||||
// Find the node to update
|
||||
nodes, err := xmlquery.QueryAll(node, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute XPath query: %v", err)
|
||||
}
|
||||
|
||||
if len(nodes) == 0 {
|
||||
return fmt.Errorf("no nodes found for path: %s", path)
|
||||
}
|
||||
|
||||
// Update the first matching node
|
||||
updateNodeValue(nodes[0], value)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetAll updates all nodes that match the XPath expression
|
||||
func SetAll(node *xmlquery.Node, path string, value interface{}) error {
|
||||
if node == nil {
|
||||
return errors.New("nil node provided")
|
||||
}
|
||||
|
||||
// Find all nodes to update
|
||||
nodes, err := xmlquery.QueryAll(node, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute XPath query: %v", err)
|
||||
}
|
||||
|
||||
if len(nodes) == 0 {
|
||||
return fmt.Errorf("no nodes found for path: %s", path)
|
||||
}
|
||||
|
||||
// Update all matching nodes
|
||||
for _, matchNode := range nodes {
|
||||
updateNodeValue(matchNode, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Helper function to update a node's value
|
||||
func updateNodeValue(node *xmlquery.Node, value interface{}) {
|
||||
strValue := fmt.Sprintf("%v", value)
|
||||
|
||||
// Handle different node types
|
||||
switch node.Type {
|
||||
case xmlquery.AttributeNode:
|
||||
// For attribute nodes, update the attribute value
|
||||
parent := node.Parent
|
||||
if parent != nil {
|
||||
for i, attr := range parent.Attr {
|
||||
if attr.Name.Local == node.Data {
|
||||
parent.Attr[i].Value = strValue
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
case xmlquery.TextNode:
|
||||
// For text nodes, update the text content
|
||||
node.Data = strValue
|
||||
case xmlquery.ElementNode:
|
||||
// For element nodes, clear existing text children and add a new text node
|
||||
// First, remove all existing text children
|
||||
var nonTextChildren []*xmlquery.Node
|
||||
for child := node.FirstChild; child != nil; child = child.NextSibling {
|
||||
if child.Type != xmlquery.TextNode {
|
||||
nonTextChildren = append(nonTextChildren, child)
|
||||
}
|
||||
}
|
||||
|
||||
// Clear all children
|
||||
node.FirstChild = nil
|
||||
node.LastChild = nil
|
||||
|
||||
// Add a new text node
|
||||
textNode := &xmlquery.Node{
|
||||
Type: xmlquery.TextNode,
|
||||
Data: strValue,
|
||||
Parent: node,
|
||||
}
|
||||
|
||||
// Set the text node as the first child
|
||||
node.FirstChild = textNode
|
||||
node.LastChild = textNode
|
||||
|
||||
// Add back non-text children
|
||||
for _, child := range nonTextChildren {
|
||||
child.Parent = node
|
||||
|
||||
// If this is the first child being added back
|
||||
if node.FirstChild == textNode && node.LastChild == textNode {
|
||||
node.FirstChild.NextSibling = child
|
||||
child.PrevSibling = node.FirstChild
|
||||
node.LastChild = child
|
||||
} else {
|
||||
// Add to the end of the chain
|
||||
node.LastChild.NextSibling = child
|
||||
child.PrevSibling = node.LastChild
|
||||
node.LastChild = child
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,474 +0,0 @@
|
||||
package xpath
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/antchfx/xmlquery"
|
||||
)
|
||||
|
||||
// Parse test XML data once at the beginning for use in multiple tests
|
||||
func parseTestXML(t *testing.T, xmlData string) *xmlquery.Node {
|
||||
doc, err := xmlquery.Parse(strings.NewReader(xmlData))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse test XML: %v", err)
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
// XML test data as a string for our tests
|
||||
var testXML = `
|
||||
<store>
|
||||
<book category="fiction">
|
||||
<title lang="en">The Fellowship of the Ring</title>
|
||||
<author>J.R.R. Tolkien</author>
|
||||
<year>1954</year>
|
||||
<price>22.99</price>
|
||||
</book>
|
||||
<book category="fiction">
|
||||
<title lang="en">The Two Towers</title>
|
||||
<author>J.R.R. Tolkien</author>
|
||||
<year>1954</year>
|
||||
<price>23.45</price>
|
||||
</book>
|
||||
<book category="technical">
|
||||
<title lang="en">Learning XML</title>
|
||||
<author>Erik T. Ray</author>
|
||||
<year>2003</year>
|
||||
<price>39.95</price>
|
||||
</book>
|
||||
<bicycle>
|
||||
<color>red</color>
|
||||
<price>199.95</price>
|
||||
</bicycle>
|
||||
</store>
|
||||
`
|
||||
|
||||
func TestEvaluator(t *testing.T) {
|
||||
// Parse the test XML data once for all test cases
|
||||
doc := parseTestXML(t, testXML)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
error bool
|
||||
}{
|
||||
{
|
||||
name: "simple_element_access",
|
||||
path: "/store/bicycle/color",
|
||||
},
|
||||
{
|
||||
name: "recursive_element_access",
|
||||
path: "//price",
|
||||
},
|
||||
{
|
||||
name: "wildcard_element_access",
|
||||
path: "/store/book/*",
|
||||
},
|
||||
{
|
||||
name: "attribute_exists_predicate",
|
||||
path: "//title[@lang]",
|
||||
},
|
||||
{
|
||||
name: "attribute_equals_predicate",
|
||||
path: "//title[@lang='en']",
|
||||
},
|
||||
{
|
||||
name: "value_comparison_predicate",
|
||||
path: "/store/book[price>35.00]/title",
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "last_predicate",
|
||||
path: "/store/book[last()]/title",
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "last_minus_predicate",
|
||||
path: "/store/book[last()-1]/title",
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "position_predicate",
|
||||
path: "/store/book[position()<3]/title",
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "invalid_index",
|
||||
path: "/store/book[10]/title",
|
||||
error: true,
|
||||
},
|
||||
{
|
||||
name: "nonexistent_element",
|
||||
path: "/store/nonexistent",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := Get(doc, tt.path)
|
||||
|
||||
// Handle expected errors
|
||||
if tt.error {
|
||||
if err == nil && len(result) == 0 {
|
||||
// If we expected an error but got empty results instead, that's okay
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
// If we got an error as expected, that's okay
|
||||
return
|
||||
}
|
||||
} else if err != nil {
|
||||
// If we didn't expect an error but got one, that's a test failure
|
||||
t.Errorf("Get(%q) returned unexpected error: %v", tt.path, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Special cases where we don't care about exact matches
|
||||
switch tt.name {
|
||||
case "wildcard_element_access":
|
||||
// Just check that we got some elements
|
||||
if len(result) == 0 {
|
||||
t.Errorf("Expected multiple elements for wildcard, got none")
|
||||
}
|
||||
return
|
||||
case "attribute_exists_predicate", "attribute_equals_predicate":
|
||||
// Just check that we got some titles
|
||||
if len(result) == 0 {
|
||||
t.Errorf("Expected titles with lang attribute, got none")
|
||||
}
|
||||
// Ensure all are title elements
|
||||
for _, node := range result {
|
||||
if node.Data != "title" {
|
||||
t.Errorf("Expected title elements, got: %s", node.Data)
|
||||
}
|
||||
}
|
||||
return
|
||||
case "nonexistent_element":
|
||||
// Just check that we got empty results
|
||||
if len(result) != 0 {
|
||||
t.Errorf("Expected empty results for nonexistent element, got %d items", len(result))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For other cases, just verify we got results
|
||||
if len(result) == 0 {
|
||||
t.Errorf("Expected results for path %s, got none", tt.path)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEdgeCases(t *testing.T) {
|
||||
t.Run("nil_node", func(t *testing.T) {
|
||||
result, err := Get(nil, "/store/book")
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for nil node")
|
||||
return
|
||||
}
|
||||
if len(result) > 0 {
|
||||
t.Errorf("Expected empty result, got %v", result)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("invalid_xml", func(t *testing.T) {
|
||||
invalidXML, err := xmlquery.Parse(strings.NewReader("<invalid>xml"))
|
||||
if err != nil {
|
||||
// If parsing fails, that's expected
|
||||
return
|
||||
}
|
||||
|
||||
_, err = Get(invalidXML, "/store")
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid XML structure")
|
||||
}
|
||||
})
|
||||
|
||||
// For these tests with the simple XML, we expect just one result
|
||||
simpleXML := `<root><book><title lang="en">Test</title></book></root>`
|
||||
doc := parseTestXML(t, simpleXML)
|
||||
|
||||
t.Run("current_node", func(t *testing.T) {
|
||||
result, err := Get(doc, "/root/book/.")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) > 1 {
|
||||
t.Errorf("Expected at most 1 result, got %d", len(result))
|
||||
}
|
||||
if len(result) > 0 {
|
||||
// Verify it's the book node
|
||||
if result[0].Data != "book" {
|
||||
t.Errorf("Expected book node, got %v", result[0].Data)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("attributes", func(t *testing.T) {
|
||||
result, err := Get(doc, "/root/book/title/@lang")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) != 1 || result[0].InnerText() != "en" {
|
||||
t.Errorf("Expected 'en', got %v", result[0].InnerText())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetWithPaths(t *testing.T) {
|
||||
// Use a simplified, well-formed XML document
|
||||
simpleXML := `<store>
|
||||
<book category="fiction">
|
||||
<title lang="en">The Book Title</title>
|
||||
<author>Author Name</author>
|
||||
<price>19.99</price>
|
||||
</book>
|
||||
<bicycle>
|
||||
<color>red</color>
|
||||
<price>199.95</price>
|
||||
</bicycle>
|
||||
</store>`
|
||||
|
||||
// Parse the XML for testing
|
||||
doc := parseTestXML(t, simpleXML)
|
||||
|
||||
// Debug: Print the test XML
|
||||
t.Logf("Test XML:\n%s", simpleXML)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expectedValue string
|
||||
}{
|
||||
{
|
||||
name: "simple_element_access",
|
||||
path: "/store/bicycle/color",
|
||||
expectedValue: "red",
|
||||
},
|
||||
{
|
||||
name: "attribute_access",
|
||||
path: "/store/book/title/@lang",
|
||||
expectedValue: "en",
|
||||
},
|
||||
{
|
||||
name: "recursive_with_attribute",
|
||||
path: "//title[@lang='en']",
|
||||
expectedValue: "The Book Title",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Debug: Print the path we're looking for
|
||||
t.Logf("Looking for path: %s", tt.path)
|
||||
|
||||
result, err := Get(doc, tt.path)
|
||||
if err != nil {
|
||||
t.Errorf("Get(%q) returned error: %v", tt.path, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Debug: Print the results
|
||||
t.Logf("Got %d results", len(result))
|
||||
for i, r := range result {
|
||||
t.Logf("Result %d: Node=%s, Value=%v", i, r.Data, r.InnerText())
|
||||
}
|
||||
|
||||
// Check that we got results
|
||||
if len(result) == 0 {
|
||||
t.Errorf("Get(%q) returned no results", tt.path)
|
||||
return
|
||||
}
|
||||
|
||||
// For attribute access test, do more specific checks
|
||||
if tt.name == "attribute_access" {
|
||||
// Check the first result's value matches expected
|
||||
if result[0].InnerText() != tt.expectedValue {
|
||||
t.Errorf("Attribute value: got %v, expected %s", result[0].InnerText(), tt.expectedValue)
|
||||
}
|
||||
}
|
||||
|
||||
// For simple element access, check the text content
|
||||
if tt.name == "simple_element_access" {
|
||||
if text := result[0].InnerText(); text != tt.expectedValue {
|
||||
t.Errorf("Element text: got %s, expected %s", text, tt.expectedValue)
|
||||
}
|
||||
}
|
||||
|
||||
// For recursive with attribute test, check title elements with lang="en"
|
||||
if tt.name == "recursive_with_attribute" {
|
||||
for _, node := range result {
|
||||
// Check the node is a title
|
||||
if node.Data != "title" {
|
||||
t.Errorf("Expected title element, got %s", node.Data)
|
||||
}
|
||||
|
||||
// Check text content
|
||||
if text := node.InnerText(); text != tt.expectedValue {
|
||||
t.Errorf("Text content: got %s, expected %s", text, tt.expectedValue)
|
||||
}
|
||||
|
||||
// Check attributes - find the lang attribute
|
||||
hasLang := false
|
||||
for _, attr := range node.Attr {
|
||||
if attr.Name.Local == "lang" && attr.Value == "en" {
|
||||
hasLang = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !hasLang {
|
||||
t.Errorf("Expected lang=\"en\" attribute, but it was not found")
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSet(t *testing.T) {
|
||||
t.Run("simple element", func(t *testing.T) {
|
||||
xmlData := `<root><name>John</name></root>`
|
||||
doc := parseTestXML(t, xmlData)
|
||||
|
||||
err := Set(doc, "/root/name", "Jane")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the change
|
||||
result, err := Get(doc, "/root/name")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) != 1 {
|
||||
t.Errorf("Expected 1 result, got %d", len(result))
|
||||
return
|
||||
}
|
||||
|
||||
// Check text content
|
||||
if text := result[0].InnerText(); text != "Jane" {
|
||||
t.Errorf("Expected text 'Jane', got '%s'", text)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("attribute", func(t *testing.T) {
|
||||
xmlData := `<root><element id="123"></element></root>`
|
||||
doc := parseTestXML(t, xmlData)
|
||||
|
||||
err := Set(doc, "/root/element/@id", "456")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the change
|
||||
result, err := Get(doc, "/root/element/@id")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) != 1 {
|
||||
t.Errorf("Expected 1 result, got %d", len(result))
|
||||
return
|
||||
}
|
||||
|
||||
// For attributes, check the inner text
|
||||
if text := result[0].InnerText(); text != "456" {
|
||||
t.Errorf("Expected attribute value '456', got '%s'", text)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("indexed element", func(t *testing.T) {
|
||||
xmlData := `<root><items><item>first</item><item>second</item></items></root>`
|
||||
doc := parseTestXML(t, xmlData)
|
||||
|
||||
err := Set(doc, "/root/items/item[1]", "changed")
|
||||
if err != nil {
|
||||
t.Errorf("Set() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify the change using XPath that specifically targets the first item
|
||||
result, err := Get(doc, "/root/items/item[1]")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if we have results
|
||||
if len(result) == 0 {
|
||||
t.Errorf("Expected at least one result for /root/items/item[1]")
|
||||
return
|
||||
}
|
||||
|
||||
// Check text content
|
||||
if text := result[0].InnerText(); text != "changed" {
|
||||
t.Errorf("Expected text 'changed', got '%s'", text)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSetAll(t *testing.T) {
|
||||
t.Run("multiple elements", func(t *testing.T) {
|
||||
xmlData := `<root><items><item>first</item><item>second</item></items></root>`
|
||||
doc := parseTestXML(t, xmlData)
|
||||
|
||||
err := SetAll(doc, "//item", "changed")
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify all items are changed
|
||||
result, err := Get(doc, "//item")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) != 2 {
|
||||
t.Errorf("Expected 2 results, got %d", len(result))
|
||||
return
|
||||
}
|
||||
|
||||
// Check each node
|
||||
for i, node := range result {
|
||||
if text := node.InnerText(); text != "changed" {
|
||||
t.Errorf("Item %d: expected text 'changed', got '%s'", i, text)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("attributes", func(t *testing.T) {
|
||||
xmlData := `<root><item id="1"/><item id="2"/></root>`
|
||||
doc := parseTestXML(t, xmlData)
|
||||
|
||||
err := SetAll(doc, "//item/@id", "new")
|
||||
if err != nil {
|
||||
t.Errorf("SetAll() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify all attributes are changed
|
||||
result, err := Get(doc, "//item/@id")
|
||||
if err != nil {
|
||||
t.Errorf("Get() returned error: %v", err)
|
||||
return
|
||||
}
|
||||
if len(result) != 2 {
|
||||
t.Errorf("Expected 2 results, got %d", len(result))
|
||||
return
|
||||
}
|
||||
|
||||
// For attributes, check inner text
|
||||
for i, node := range result {
|
||||
if text := node.InnerText(); text != "new" {
|
||||
t.Errorf("Attribute %d: expected value 'new', got '%s'", i, text)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
@@ -1,10 +1,29 @@
|
||||
package regression
|
||||
|
||||
import (
|
||||
"modify/processor"
|
||||
"cook/processor"
|
||||
"cook/utils"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func ApiAdaptor(content string, regex string, lua string) (string, int, int, error) {
|
||||
command := utils.ModifyCommand{
|
||||
Regex: regex,
|
||||
Lua: lua,
|
||||
LogLevel: "TRACE",
|
||||
}
|
||||
|
||||
commands, err := processor.ProcessRegex(content, command, "test")
|
||||
if err != nil {
|
||||
return "", 0, 0, err
|
||||
}
|
||||
|
||||
result, modifications := utils.ExecuteModifications(commands, content)
|
||||
return result, modifications, len(commands), nil
|
||||
}
|
||||
|
||||
func TestTalentsMechanicOutOfRange(t *testing.T) {
|
||||
given := `<Talent identifier="quickfixer">
|
||||
<Icon texture="Content/UI/TalentsIcons2.png" sheetindex="5,2" sheetelementsize="128,128"/>
|
||||
@@ -62,22 +81,57 @@ func TestTalentsMechanicOutOfRange(t *testing.T) {
|
||||
</AbilityGroupEffect>
|
||||
</Talent>`
|
||||
|
||||
p := &processor.RegexProcessor{}
|
||||
result, mods, matches, err := p.ProcessContent(given, `<Talent identifier="quickfixer">!anyvalue="(?<movementspeed>!num)"!anyvalue="(?<duration>!num)"!anyvalue="(?<repairspeed>!num)"!anyamount="(?<durationv>!num)"`, "movementspeed=round(movementspeed*1.5, 2) duration=round(duration*2, 2) repairspeed=round(repairspeed*2, 2) durationv=duration")
|
||||
result, mods, matches, err := ApiAdaptor(given, `<Talent identifier="quickfixer">!anyvalue="(?<movementspeed>!num)"!anyvalue="(?<duration>!num)"!anyvalue="(?<repairspeed>!num)"!anyamount="(?<durationv>!num)"`, "movementspeed=round(movementspeed*1.5, 2) duration=round(duration*2, 2) repairspeed=round(repairspeed*2, 2) durationv=duration")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error processing content: %v", err)
|
||||
}
|
||||
|
||||
if matches != 1 {
|
||||
t.Errorf("Expected 1 match, got %d", matches)
|
||||
if matches != 4 {
|
||||
t.Errorf("Expected 4 matches, got %d", matches)
|
||||
}
|
||||
|
||||
if mods != 1 {
|
||||
t.Errorf("Expected 1 modification, got %d", mods)
|
||||
if mods != 4 {
|
||||
t.Errorf("Expected 4 modifications, got %d", mods)
|
||||
}
|
||||
|
||||
if result != actual {
|
||||
t.Errorf("expected %s, got %s", actual, result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndexExplosions_ShouldNotPanic(t *testing.T) {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting current working directory: %v", err)
|
||||
}
|
||||
|
||||
given, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItems.xml"))
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading file: %v", err)
|
||||
}
|
||||
|
||||
expected, err := os.ReadFile(filepath.Join(cwd, "..", "testfiles", "OutpostItemsExpected.xml"))
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading file: %v", err)
|
||||
}
|
||||
|
||||
result, _, _, err := ApiAdaptor(string(given), `(?-s)LightComponent!anyrange="(!num)"`, "*4")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error processing content: %v", err)
|
||||
}
|
||||
|
||||
// We don't really care how many god damn matches there are as long as the result is correct
|
||||
// if matches != 45 {
|
||||
// t.Errorf("Expected 45 match, got %d", matches)
|
||||
// }
|
||||
//
|
||||
// if mods != 45 {
|
||||
// t.Errorf("Expected 45 modification, got %d", mods)
|
||||
// }
|
||||
|
||||
if string(result) != string(expected) {
|
||||
t.Errorf("expected %s, got %s", expected, result)
|
||||
}
|
||||
}
|
||||
|
@@ -1,12 +0,0 @@
|
||||
<config>
|
||||
<item>
|
||||
<value>75</value>
|
||||
<multiplier>2</multiplier>
|
||||
<divider>4</divider>
|
||||
</item>
|
||||
<item>
|
||||
<value>150</value>
|
||||
<multiplier>3</multiplier>
|
||||
<divider>2</divider>
|
||||
</item>
|
||||
</config>
|
@@ -1,37 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testdata>
|
||||
<!-- Numeric values -->
|
||||
<item>
|
||||
<id>1</id>
|
||||
<value>200</value>
|
||||
<price>24.99</price>
|
||||
<quantity>5</quantity>
|
||||
</item>
|
||||
|
||||
<!-- Text values -->
|
||||
<item>
|
||||
<id>2</id>
|
||||
<name>Test Product</name>
|
||||
<description>This is a test product description</description>
|
||||
<category>Test</category>
|
||||
</item>
|
||||
|
||||
<!-- Mixed content -->
|
||||
<item>
|
||||
<id>3</id>
|
||||
<name>Mixed Product</name>
|
||||
<price>19.99</price>
|
||||
<code>PRD-123</code>
|
||||
<tags>sale,discount,new</tags>
|
||||
</item>
|
||||
|
||||
<!-- Empty and special values -->
|
||||
<item>
|
||||
<id>4</id>
|
||||
<value></value>
|
||||
<specialChars>Hello & World < > " '</specialChars>
|
||||
<multiline>Line 1
|
||||
Line 2
|
||||
Line 3</multiline>
|
||||
</item>
|
||||
</testdata>
|
11
test_surgical.yml
Normal file
11
test_surgical.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
- name: SurgicalWeightTest
|
||||
json: true
|
||||
lua: |
|
||||
-- This demonstrates surgical JSON editing
|
||||
-- Only the Weight field of Item_Fiber will be modified
|
||||
data.Rows[1].Weight = 999
|
||||
modified = true
|
||||
files:
|
||||
- 'D_Itemable.json'
|
||||
reset: false
|
||||
loglevel: INFO
|
1252
testfiles/OutpostItems.xml
Normal file
1252
testfiles/OutpostItems.xml
Normal file
File diff suppressed because it is too large
Load Diff
1252
testfiles/OutpostItemsExpected.xml
Normal file
1252
testfiles/OutpostItemsExpected.xml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
||||
<config><item><value>100</value></item></config>
|
157
utils/db.go
Normal file
157
utils/db.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
gormlogger "gorm.io/gorm/logger"
|
||||
)
|
||||
|
||||
// dbLogger is a scoped logger for the utils/db package.
|
||||
var dbLogger = logger.Default.WithPrefix("utils/db")
|
||||
|
||||
type DB interface {
|
||||
DB() *gorm.DB
|
||||
Raw(sql string, args ...any) *gorm.DB
|
||||
SaveFile(filePath string, fileData []byte) error
|
||||
GetFile(filePath string) ([]byte, error)
|
||||
GetAllFiles() ([]FileSnapshot, error)
|
||||
RemoveAllFiles() error
|
||||
}
|
||||
|
||||
type FileSnapshot struct {
|
||||
Date time.Time `gorm:"primaryKey"`
|
||||
FilePath string `gorm:"primaryKey"`
|
||||
FileData []byte `gorm:"type:blob"`
|
||||
}
|
||||
|
||||
type DBWrapper struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
var globalDB *DBWrapper
|
||||
|
||||
func GetDB() (DB, error) {
|
||||
getDBLogger := dbLogger.WithPrefix("GetDB")
|
||||
getDBLogger.Debug("Attempting to get database connection")
|
||||
var err error
|
||||
|
||||
dbFile := filepath.Join("data.sqlite")
|
||||
getDBLogger.Debug("Opening database file: %q", dbFile)
|
||||
db, err := gorm.Open(sqlite.Open(dbFile), &gorm.Config{
|
||||
// SkipDefaultTransaction: true,
|
||||
PrepareStmt: true,
|
||||
Logger: gormlogger.Default.LogMode(gormlogger.Silent),
|
||||
})
|
||||
if err != nil {
|
||||
getDBLogger.Error("Failed to open database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getDBLogger.Debug("Database opened successfully, running auto migration")
|
||||
if err := db.AutoMigrate(&FileSnapshot{}); err != nil {
|
||||
getDBLogger.Error("Auto migration failed: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getDBLogger.Debug("Auto migration completed")
|
||||
|
||||
globalDB = &DBWrapper{db: db}
|
||||
getDBLogger.Debug("Database wrapper initialized")
|
||||
return globalDB, nil
|
||||
}
|
||||
|
||||
// Just a wrapper
|
||||
func (db *DBWrapper) Raw(sql string, args ...any) *gorm.DB {
|
||||
rawLogger := dbLogger.WithPrefix("Raw").WithField("sql", sql)
|
||||
rawLogger.Debug("Executing raw SQL query with args: %v", args)
|
||||
return db.db.Raw(sql, args...)
|
||||
}
|
||||
|
||||
func (db *DBWrapper) DB() *gorm.DB {
|
||||
dbLogger.WithPrefix("DB").Debug("Returning GORM DB instance")
|
||||
return db.db
|
||||
}
|
||||
|
||||
func (db *DBWrapper) FileExists(filePath string) (bool, error) {
|
||||
fileExistsLogger := dbLogger.WithPrefix("FileExists").WithField("filePath", filePath)
|
||||
fileExistsLogger.Debug("Checking if file exists in database")
|
||||
var count int64
|
||||
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).Count(&count).Error
|
||||
if err != nil {
|
||||
fileExistsLogger.Error("Error checking if file exists: %v", err)
|
||||
return false, err
|
||||
}
|
||||
fileExistsLogger.Debug("File exists: %t", count > 0)
|
||||
return count > 0, err
|
||||
}
|
||||
|
||||
func (db *DBWrapper) SaveFile(filePath string, fileData []byte) error {
|
||||
saveFileLogger := dbLogger.WithPrefix("SaveFile").WithField("filePath", filePath)
|
||||
saveFileLogger.Debug("Attempting to save file to database")
|
||||
saveFileLogger.Trace("File data length: %d", len(fileData))
|
||||
|
||||
exists, err := db.FileExists(filePath)
|
||||
if err != nil {
|
||||
saveFileLogger.Error("Error checking if file exists: %v", err)
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
saveFileLogger.Debug("File already exists, skipping save")
|
||||
return nil
|
||||
}
|
||||
saveFileLogger.Debug("Creating new file snapshot in database")
|
||||
err = db.db.Create(&FileSnapshot{
|
||||
Date: time.Now(),
|
||||
FilePath: filePath,
|
||||
FileData: fileData,
|
||||
}).Error
|
||||
if err != nil {
|
||||
saveFileLogger.Error("Failed to create file snapshot: %v", err)
|
||||
} else {
|
||||
saveFileLogger.Debug("File saved successfully to database")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *DBWrapper) GetFile(filePath string) ([]byte, error) {
|
||||
getFileLogger := dbLogger.WithPrefix("GetFile").WithField("filePath", filePath)
|
||||
getFileLogger.Debug("Getting file from database")
|
||||
var fileSnapshot FileSnapshot
|
||||
err := db.db.Model(&FileSnapshot{}).Where("file_path = ?", filePath).First(&fileSnapshot).Error
|
||||
if err != nil {
|
||||
// Downgrade not-found to warning to avoid noisy errors during first run
|
||||
getFileLogger.Warning("Failed to get file from database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getFileLogger.Debug("File found in database")
|
||||
getFileLogger.Trace("Retrieved file data length: %d", len(fileSnapshot.FileData))
|
||||
return fileSnapshot.FileData, nil
|
||||
}
|
||||
|
||||
func (db *DBWrapper) GetAllFiles() ([]FileSnapshot, error) {
|
||||
getAllFilesLogger := dbLogger.WithPrefix("GetAllFiles")
|
||||
getAllFilesLogger.Debug("Getting all files from database")
|
||||
var fileSnapshots []FileSnapshot
|
||||
err := db.db.Model(&FileSnapshot{}).Find(&fileSnapshots).Error
|
||||
if err != nil {
|
||||
getAllFilesLogger.Error("Failed to get all files from database: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
getAllFilesLogger.Debug("Found %d files in database", len(fileSnapshots))
|
||||
getAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
|
||||
return fileSnapshots, nil
|
||||
}
|
||||
|
||||
func (db *DBWrapper) RemoveAllFiles() error {
|
||||
removeAllFilesLogger := dbLogger.WithPrefix("RemoveAllFiles")
|
||||
removeAllFilesLogger.Debug("Removing all files from database")
|
||||
err := db.db.Exec("DELETE FROM file_snapshots").Error
|
||||
if err != nil {
|
||||
removeAllFilesLogger.Error("Failed to remove all files from database: %v", err)
|
||||
} else {
|
||||
removeAllFilesLogger.Debug("All files removed from database")
|
||||
}
|
||||
return err
|
||||
}
|
152
utils/file.go
Normal file
152
utils/file.go
Normal file
@@ -0,0 +1,152 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// fileLogger is a scoped logger for the utils/file package.
|
||||
var fileLogger = logger.Default.WithPrefix("utils/file")
|
||||
|
||||
func CleanPath(path string) string {
|
||||
cleanPathLogger := fileLogger.WithPrefix("CleanPath")
|
||||
cleanPathLogger.Debug("Cleaning path: %q", path)
|
||||
cleanPathLogger.Trace("Original path: %q", path)
|
||||
path = filepath.Clean(path)
|
||||
path = strings.ReplaceAll(path, "\\", "/")
|
||||
cleanPathLogger.Trace("Cleaned path result: %q", path)
|
||||
return path
|
||||
}
|
||||
|
||||
func ToAbs(path string) string {
|
||||
toAbsLogger := fileLogger.WithPrefix("ToAbs")
|
||||
toAbsLogger.Debug("Converting path to absolute: %q", path)
|
||||
toAbsLogger.Trace("Input path: %q", path)
|
||||
if filepath.IsAbs(path) {
|
||||
toAbsLogger.Debug("Path is already absolute, cleaning it.")
|
||||
cleanedPath := CleanPath(path)
|
||||
toAbsLogger.Trace("Already absolute path after cleaning: %q", cleanedPath)
|
||||
return cleanedPath
|
||||
}
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
toAbsLogger.Error("Error getting current working directory: %v", err)
|
||||
return CleanPath(path)
|
||||
}
|
||||
toAbsLogger.Trace("Current working directory: %q", cwd)
|
||||
cleanedPath := CleanPath(filepath.Join(cwd, path))
|
||||
toAbsLogger.Trace("Converted absolute path result: %q", cleanedPath)
|
||||
return cleanedPath
|
||||
}
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
func LimitString(s string, maxLen int) string {
|
||||
limitStringLogger := fileLogger.WithPrefix("LimitString").WithField("originalLength", len(s)).WithField("maxLength", maxLen)
|
||||
limitStringLogger.Debug("Limiting string length")
|
||||
s = strings.ReplaceAll(s, "\n", "\\n")
|
||||
if len(s) <= maxLen {
|
||||
limitStringLogger.Trace("String length (%d) is within max length (%d), no truncation", len(s), maxLen)
|
||||
return s
|
||||
}
|
||||
limited := s[:maxLen-3] + "..."
|
||||
limitStringLogger.Trace("String truncated from %d to %d characters: %q", len(s), len(limited), limited)
|
||||
return limited
|
||||
}
|
||||
|
||||
// StrToFloat converts a string to a float64, returning 0 on error.
|
||||
func StrToFloat(s string) float64 {
|
||||
strToFloatLogger := fileLogger.WithPrefix("StrToFloat").WithField("inputString", s)
|
||||
strToFloatLogger.Debug("Attempting to convert string to float")
|
||||
f, err := strconv.ParseFloat(s, 64)
|
||||
if err != nil {
|
||||
strToFloatLogger.Warning("Failed to convert string %q to float, returning 0: %v", s, err)
|
||||
return 0
|
||||
}
|
||||
strToFloatLogger.Trace("Successfully converted %q to float: %f", s, f)
|
||||
return f
|
||||
}
|
||||
|
||||
func ResetWhereNecessary(associations map[string]FileCommandAssociation, db DB) error {
|
||||
resetWhereNecessaryLogger := fileLogger.WithPrefix("ResetWhereNecessary")
|
||||
resetWhereNecessaryLogger.Debug("Starting reset where necessary operation")
|
||||
resetWhereNecessaryLogger.Trace("File-command associations input: %v", associations)
|
||||
dirtyFiles := make(map[string]struct{})
|
||||
for _, association := range associations {
|
||||
resetWhereNecessaryLogger.Debug("Processing association for file: %q", association.File)
|
||||
for _, command := range association.Commands {
|
||||
resetWhereNecessaryLogger.Debug("Checking command %q for reset requirement", command.Name)
|
||||
resetWhereNecessaryLogger.Trace("Command details: %v", command)
|
||||
if command.Reset {
|
||||
resetWhereNecessaryLogger.Debug("Command %q requires reset for file %q, marking as dirty", command.Name, association.File)
|
||||
dirtyFiles[association.File] = struct{}{}
|
||||
}
|
||||
}
|
||||
for _, command := range association.IsolateCommands {
|
||||
resetWhereNecessaryLogger.Debug("Checking isolate command %q for reset requirement", command.Name)
|
||||
resetWhereNecessaryLogger.Trace("Isolate command details: %v", command)
|
||||
if command.Reset {
|
||||
resetWhereNecessaryLogger.Debug("Isolate command %q requires reset for file %q, marking as dirty", command.Name, association.File)
|
||||
dirtyFiles[association.File] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Identified %d files that need to be reset", len(dirtyFiles))
|
||||
resetWhereNecessaryLogger.Trace("Dirty files identified: %v", dirtyFiles)
|
||||
|
||||
for file := range dirtyFiles {
|
||||
resetWhereNecessaryLogger.Debug("Resetting file %q", file)
|
||||
fileData, err := db.GetFile(file)
|
||||
if err != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to get original content for file %q from database: %v", file, err)
|
||||
// Seed the snapshot from current disk content if missing, then use it as fallback
|
||||
currentData, readErr := os.ReadFile(file)
|
||||
if readErr != nil {
|
||||
resetWhereNecessaryLogger.Warning("Additionally failed to read current file content for %q: %v", file, readErr)
|
||||
continue
|
||||
}
|
||||
// Best-effort attempt to save baseline; ignore errors to avoid blocking reset
|
||||
if saveErr := db.SaveFile(file, currentData); saveErr != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to seed baseline snapshot for %q: %v", file, saveErr)
|
||||
}
|
||||
fileData = currentData
|
||||
}
|
||||
resetWhereNecessaryLogger.Trace("Retrieved original file data length for %q: %d", file, len(fileData))
|
||||
resetWhereNecessaryLogger.Debug("Writing original content back to file %q", file)
|
||||
err = os.WriteFile(file, fileData, 0644)
|
||||
if err != nil {
|
||||
resetWhereNecessaryLogger.Warning("Failed to write original content back to file %q: %v", file, err)
|
||||
continue
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Successfully reset file %q", file)
|
||||
}
|
||||
resetWhereNecessaryLogger.Debug("Finished reset where necessary operation")
|
||||
return nil
|
||||
}
|
||||
|
||||
func ResetAllFiles(db DB) error {
|
||||
resetAllFilesLogger := fileLogger.WithPrefix("ResetAllFiles")
|
||||
resetAllFilesLogger.Debug("Starting reset all files operation")
|
||||
fileSnapshots, err := db.GetAllFiles()
|
||||
if err != nil {
|
||||
resetAllFilesLogger.Error("Failed to get all file snapshots from database: %v", err)
|
||||
return err
|
||||
}
|
||||
resetAllFilesLogger.Debug("Found %d files in database to reset", len(fileSnapshots))
|
||||
resetAllFilesLogger.Trace("File snapshots retrieved: %v", fileSnapshots)
|
||||
|
||||
for _, fileSnapshot := range fileSnapshots {
|
||||
resetAllFilesLogger.Debug("Resetting file %q", fileSnapshot.FilePath)
|
||||
err = os.WriteFile(fileSnapshot.FilePath, fileSnapshot.FileData, 0644)
|
||||
if err != nil {
|
||||
resetAllFilesLogger.Warning("Failed to write file %q to disk: %v", fileSnapshot.FilePath, err)
|
||||
continue
|
||||
}
|
||||
resetAllFilesLogger.Debug("File %q written to disk successfully", fileSnapshot.FilePath)
|
||||
}
|
||||
resetAllFilesLogger.Debug("Finished reset all files operation")
|
||||
return nil
|
||||
}
|
21
utils/flags.go
Normal file
21
utils/flags.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"flag"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// flagsLogger is a scoped logger for the utils/flags package.
|
||||
var flagsLogger = logger.Default.WithPrefix("utils/flags")
|
||||
|
||||
var (
|
||||
ParallelFiles = flag.Int("P", 100, "Number of files to process in parallel")
|
||||
Filter = flag.String("f", "", "Filter commands before running them")
|
||||
JSON = flag.Bool("json", false, "Enable JSON mode for processing JSON files")
|
||||
)
|
||||
|
||||
func init() {
|
||||
flagsLogger.Debug("Initializing flags")
|
||||
flagsLogger.Trace("ParallelFiles initial value: %d, Filter initial value: %q, JSON initial value: %t", *ParallelFiles, *Filter, *JSON)
|
||||
}
|
375
utils/modifycommand.go
Normal file
375
utils/modifycommand.go
Normal file
@@ -0,0 +1,375 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// modifyCommandLogger is a scoped logger for the utils/modifycommand package.
|
||||
var modifyCommandLogger = logger.Default.WithPrefix("utils/modifycommand")
|
||||
|
||||
type ModifyCommand struct {
|
||||
Name string `yaml:"name,omitempty"`
|
||||
Regex string `yaml:"regex,omitempty"`
|
||||
Regexes []string `yaml:"regexes,omitempty"`
|
||||
Lua string `yaml:"lua,omitempty"`
|
||||
Files []string `yaml:"files,omitempty"`
|
||||
Reset bool `yaml:"reset,omitempty"`
|
||||
LogLevel string `yaml:"loglevel,omitempty"`
|
||||
Isolate bool `yaml:"isolate,omitempty"`
|
||||
NoDedup bool `yaml:"nodedup,omitempty"`
|
||||
Disabled bool `yaml:"disable,omitempty"`
|
||||
JSON bool `yaml:"json,omitempty"`
|
||||
Modifiers map[string]interface{} `yaml:"modifiers,omitempty"`
|
||||
}
|
||||
|
||||
type CookFile []ModifyCommand
|
||||
|
||||
func (c *ModifyCommand) Validate() error {
|
||||
validateLogger := modifyCommandLogger.WithPrefix("Validate").WithField("commandName", c.Name)
|
||||
validateLogger.Debug("Validating command")
|
||||
|
||||
// For JSON mode, regex patterns are not required
|
||||
if !c.JSON {
|
||||
if c.Regex == "" && len(c.Regexes) == 0 {
|
||||
validateLogger.Error("Validation failed: Regex pattern is required for non-JSON mode")
|
||||
return fmt.Errorf("pattern is required for non-JSON mode")
|
||||
}
|
||||
}
|
||||
|
||||
if c.Lua == "" {
|
||||
validateLogger.Error("Validation failed: Lua expression is required")
|
||||
return fmt.Errorf("lua expression is required")
|
||||
}
|
||||
if len(c.Files) == 0 {
|
||||
validateLogger.Error("Validation failed: At least one file is required")
|
||||
return fmt.Errorf("at least one file is required")
|
||||
}
|
||||
if c.LogLevel == "" {
|
||||
validateLogger.Debug("LogLevel not specified, defaulting to INFO")
|
||||
c.LogLevel = "INFO"
|
||||
}
|
||||
validateLogger.Debug("Command validated successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ehh.. Not much better... Guess this wasn't the big deal
|
||||
var matchesMemoTable map[string]bool = make(map[string]bool)
|
||||
|
||||
func Matches(path string, glob string) (bool, error) {
|
||||
matchesLogger := modifyCommandLogger.WithPrefix("Matches").WithField("path", path).WithField("glob", glob)
|
||||
matchesLogger.Debug("Checking if path matches glob")
|
||||
key := fmt.Sprintf("%s:%s", path, glob)
|
||||
if matches, ok := matchesMemoTable[key]; ok {
|
||||
matchesLogger.Debug("Found match in memo table: %t", matches)
|
||||
return matches, nil
|
||||
}
|
||||
matches, err := doublestar.Match(glob, path)
|
||||
if err != nil {
|
||||
matchesLogger.Error("Failed to match glob: %v", err)
|
||||
return false, fmt.Errorf("failed to match glob %s with file %s: %w", glob, path, err)
|
||||
}
|
||||
matchesMemoTable[key] = matches
|
||||
matchesLogger.Debug("Match result: %t, storing in memo table", matches)
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func SplitPattern(pattern string) (string, string) {
|
||||
splitPatternLogger := modifyCommandLogger.WithPrefix("SplitPattern").WithField("pattern", pattern)
|
||||
splitPatternLogger.Debug("Splitting pattern")
|
||||
splitPatternLogger.Trace("Original pattern: %q", pattern)
|
||||
static, pattern := doublestar.SplitPattern(pattern)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
splitPatternLogger.Error("Error getting current working directory: %v", err)
|
||||
return "", ""
|
||||
}
|
||||
splitPatternLogger.Trace("Current working directory: %q", cwd)
|
||||
if static == "" {
|
||||
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory")
|
||||
static = cwd
|
||||
}
|
||||
if !filepath.IsAbs(static) {
|
||||
splitPatternLogger.Debug("Static part is not absolute, joining with current working directory")
|
||||
static = filepath.Join(cwd, static)
|
||||
static = filepath.Clean(static)
|
||||
splitPatternLogger.Trace("Static path after joining and cleaning: %q", static)
|
||||
}
|
||||
static = strings.ReplaceAll(static, "\\", "/")
|
||||
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, pattern)
|
||||
return static, pattern
|
||||
}
|
||||
|
||||
type FileCommandAssociation struct {
|
||||
File string
|
||||
IsolateCommands []ModifyCommand
|
||||
Commands []ModifyCommand
|
||||
}
|
||||
|
||||
func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[string]FileCommandAssociation, error) {
|
||||
associateFilesLogger := modifyCommandLogger.WithPrefix("AssociateFilesWithCommands")
|
||||
associateFilesLogger.Debug("Associating files with commands")
|
||||
associateFilesLogger.Trace("Input files: %v", files)
|
||||
associateFilesLogger.Trace("Input commands: %v", commands)
|
||||
associationCount := 0
|
||||
fileCommands := make(map[string]FileCommandAssociation)
|
||||
|
||||
for _, file := range files {
|
||||
file = strings.ReplaceAll(file, "\\", "/")
|
||||
associateFilesLogger.Debug("Processing file: %q", file)
|
||||
fileCommands[file] = FileCommandAssociation{
|
||||
File: file,
|
||||
IsolateCommands: []ModifyCommand{},
|
||||
Commands: []ModifyCommand{},
|
||||
}
|
||||
for _, command := range commands {
|
||||
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
|
||||
for _, glob := range command.Files {
|
||||
glob = strings.ReplaceAll(glob, "\\", "/")
|
||||
static, pattern := SplitPattern(glob)
|
||||
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", glob, static, pattern)
|
||||
|
||||
// Build absolute path for the current file to compare with static
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
associateFilesLogger.Warning("Failed to get CWD when matching %q for file %q: %v", glob, file, err)
|
||||
continue
|
||||
}
|
||||
var absFile string
|
||||
if filepath.IsAbs(file) {
|
||||
absFile = filepath.Clean(file)
|
||||
} else {
|
||||
absFile = filepath.Clean(filepath.Join(cwd, file))
|
||||
}
|
||||
absFile = strings.ReplaceAll(absFile, "\\", "/")
|
||||
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
|
||||
|
||||
// Only match if the file is under the static root
|
||||
if !(strings.HasPrefix(absFile, static+"/") || absFile == static) {
|
||||
associateFilesLogger.Trace("Skipping glob %q for file %q because file is outside static root %q", glob, file, static)
|
||||
continue
|
||||
}
|
||||
|
||||
patternFile := strings.TrimPrefix(absFile, static+`/`)
|
||||
associateFilesLogger.Trace("Pattern-relative path used for match: %q", patternFile)
|
||||
matches, err := Matches(patternFile, pattern)
|
||||
if err != nil {
|
||||
associateFilesLogger.Warning("Failed to match glob %q with file %q: %v", glob, file, err)
|
||||
continue
|
||||
}
|
||||
if matches {
|
||||
associateFilesLogger.Debug("File %q matches glob %q. Associating with command %q", file, glob, command.Name)
|
||||
association := fileCommands[file]
|
||||
|
||||
if command.Isolate {
|
||||
associateFilesLogger.Debug("Command %q is an isolate command, adding to isolate list", command.Name)
|
||||
association.IsolateCommands = append(association.IsolateCommands, command)
|
||||
} else {
|
||||
associateFilesLogger.Debug("Command %q is a regular command, adding to regular list", command.Name)
|
||||
association.Commands = append(association.Commands, command)
|
||||
}
|
||||
fileCommands[file] = association
|
||||
associationCount++
|
||||
} else {
|
||||
associateFilesLogger.Trace("File %q did not match glob %q (pattern=%q, rel=%q)", file, glob, pattern, patternFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
currentFileCommands := fileCommands[file]
|
||||
associateFilesLogger.Debug("Finished processing file %q. Found %d regular commands and %d isolate commands", file, len(currentFileCommands.Commands), len(currentFileCommands.IsolateCommands))
|
||||
associateFilesLogger.Trace("Commands for file %q: %v", file, currentFileCommands.Commands)
|
||||
associateFilesLogger.Trace("Isolate commands for file %q: %v", file, currentFileCommands.IsolateCommands)
|
||||
}
|
||||
associateFilesLogger.Info("Completed association. Found %d total associations for %d files and %d commands", associationCount, len(files), len(commands))
|
||||
return fileCommands, nil
|
||||
}
|
||||
|
||||
func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
|
||||
aggregateGlobsLogger := modifyCommandLogger.WithPrefix("AggregateGlobs")
|
||||
aggregateGlobsLogger.Debug("Aggregating glob patterns from commands")
|
||||
aggregateGlobsLogger.Trace("Input commands for aggregation: %v", commands)
|
||||
globs := make(map[string]struct{})
|
||||
for _, command := range commands {
|
||||
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
|
||||
for _, glob := range command.Files {
|
||||
resolvedGlob := strings.Replace(glob, "~", os.Getenv("HOME"), 1)
|
||||
resolvedGlob = strings.ReplaceAll(resolvedGlob, "\\", "/")
|
||||
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q)", glob, resolvedGlob)
|
||||
globs[resolvedGlob] = struct{}{}
|
||||
}
|
||||
}
|
||||
aggregateGlobsLogger.Debug("Finished aggregating globs. Found %d unique glob patterns", len(globs))
|
||||
aggregateGlobsLogger.Trace("Aggregated unique globs: %v", globs)
|
||||
return globs
|
||||
}
|
||||
|
||||
func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
|
||||
expandGlobsLogger := modifyCommandLogger.WithPrefix("ExpandGLobs")
|
||||
expandGlobsLogger.Debug("Expanding glob patterns to actual files")
|
||||
expandGlobsLogger.Trace("Input patterns for expansion: %v", patterns)
|
||||
var files []string
|
||||
filesMap := make(map[string]bool)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
expandGlobsLogger.Error("Failed to get current working directory: %v", err)
|
||||
return nil, fmt.Errorf("failed to get current working directory: %w", err)
|
||||
}
|
||||
expandGlobsLogger.Debug("Current working directory: %q", cwd)
|
||||
|
||||
for pattern := range patterns {
|
||||
expandGlobsLogger.Debug("Processing glob pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
matches, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
expandGlobsLogger.Warning("Error expanding glob %q in %q: %v", pattern, static, err)
|
||||
continue
|
||||
}
|
||||
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
|
||||
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
|
||||
for _, m := range matches {
|
||||
m = filepath.Join(static, m)
|
||||
info, err := os.Stat(m)
|
||||
if err != nil {
|
||||
expandGlobsLogger.Warning("Error getting file info for %q: %v", m, err)
|
||||
continue
|
||||
}
|
||||
if !info.IsDir() && !filesMap[m] {
|
||||
expandGlobsLogger.Trace("Adding unique file to list: %q", m)
|
||||
filesMap[m], files = true, append(files, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(files) > 0 {
|
||||
expandGlobsLogger.Debug("Finished expanding globs. Found %d unique files to process", len(files))
|
||||
expandGlobsLogger.Trace("Unique files to process: %v", files)
|
||||
} else {
|
||||
expandGlobsLogger.Warning("No files found after expanding glob patterns.")
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func LoadCommands(args []string) ([]ModifyCommand, error) {
|
||||
loadCommandsLogger := modifyCommandLogger.WithPrefix("LoadCommands")
|
||||
loadCommandsLogger.Debug("Loading commands from arguments (cook files or direct patterns)")
|
||||
loadCommandsLogger.Trace("Input arguments: %v", args)
|
||||
commands := []ModifyCommand{}
|
||||
|
||||
for _, arg := range args {
|
||||
loadCommandsLogger.Debug("Processing argument for commands: %q", arg)
|
||||
newCommands, err := LoadCommandsFromCookFiles(arg)
|
||||
if err != nil {
|
||||
loadCommandsLogger.Error("Failed to load commands from argument %q: %v", arg, err)
|
||||
return nil, fmt.Errorf("failed to load commands from cook files: %w", err)
|
||||
}
|
||||
loadCommandsLogger.Debug("Successfully loaded %d commands from %q", len(newCommands), arg)
|
||||
for _, cmd := range newCommands {
|
||||
if cmd.Disabled {
|
||||
loadCommandsLogger.Debug("Skipping disabled command: %q", cmd.Name)
|
||||
continue
|
||||
}
|
||||
commands = append(commands, cmd)
|
||||
loadCommandsLogger.Trace("Added command %q. Current total commands: %d", cmd.Name, len(commands))
|
||||
}
|
||||
}
|
||||
|
||||
loadCommandsLogger.Info("Finished loading commands. Total %d commands loaded", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFiles(pattern string) ([]ModifyCommand, error) {
|
||||
loadCookFilesLogger := modifyCommandLogger.WithPrefix("LoadCommandsFromCookFiles").WithField("pattern", pattern)
|
||||
loadCookFilesLogger.Debug("Loading commands from cook files based on pattern")
|
||||
loadCookFilesLogger.Trace("Input pattern: %q", pattern)
|
||||
static, pattern := SplitPattern(pattern)
|
||||
commands := []ModifyCommand{}
|
||||
cookFiles, err := doublestar.Glob(os.DirFS(static), pattern)
|
||||
if err != nil {
|
||||
loadCookFilesLogger.Error("Failed to glob cook files for pattern %q: %v", pattern, err)
|
||||
return nil, fmt.Errorf("failed to glob cook files: %w", err)
|
||||
}
|
||||
loadCookFilesLogger.Debug("Found %d cook files for pattern %q", len(cookFiles), pattern)
|
||||
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
|
||||
|
||||
for _, cookFile := range cookFiles {
|
||||
cookFile = filepath.Join(static, cookFile)
|
||||
cookFile = filepath.Clean(cookFile)
|
||||
cookFile = strings.ReplaceAll(cookFile, "\\", "/")
|
||||
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
|
||||
|
||||
cookFileData, err := os.ReadFile(cookFile)
|
||||
if err != nil {
|
||||
loadCookFilesLogger.Error("Failed to read cook file %q: %v", cookFile, err)
|
||||
return nil, fmt.Errorf("failed to read cook file: %w", err)
|
||||
}
|
||||
loadCookFilesLogger.Trace("Read %d bytes from cook file %q", len(cookFileData), cookFile)
|
||||
newCommands, err := LoadCommandsFromCookFile(cookFileData)
|
||||
if err != nil {
|
||||
loadCookFilesLogger.Error("Failed to load commands from cook file data for %q: %v", cookFile, err)
|
||||
return nil, fmt.Errorf("failed to load commands from cook file: %w", err)
|
||||
}
|
||||
commands = append(commands, newCommands...)
|
||||
loadCookFilesLogger.Debug("Added %d commands from cook file %q. Total commands now: %d", len(newCommands), cookFile, len(commands))
|
||||
}
|
||||
|
||||
loadCookFilesLogger.Debug("Finished loading commands from cook files. Total %d commands", len(commands))
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
func LoadCommandsFromCookFile(cookFileData []byte) ([]ModifyCommand, error) {
|
||||
loadCommandLogger := modifyCommandLogger.WithPrefix("LoadCommandsFromCookFile")
|
||||
loadCommandLogger.Debug("Unmarshaling commands from cook file data")
|
||||
loadCommandLogger.Trace("Cook file data length: %d", len(cookFileData))
|
||||
commands := []ModifyCommand{}
|
||||
err := yaml.Unmarshal(cookFileData, &commands)
|
||||
if err != nil {
|
||||
loadCommandLogger.Error("Failed to unmarshal cook file data: %v", err)
|
||||
return nil, fmt.Errorf("failed to unmarshal cook file: %w", err)
|
||||
}
|
||||
loadCommandLogger.Debug("Successfully unmarshaled %d commands", len(commands))
|
||||
loadCommandLogger.Trace("Unmarshaled commands: %v", commands)
|
||||
return commands, nil
|
||||
}
|
||||
|
||||
// CountGlobsBeforeDedup counts the total number of glob patterns across all commands before deduplication
|
||||
func CountGlobsBeforeDedup(commands []ModifyCommand) int {
|
||||
countGlobsLogger := modifyCommandLogger.WithPrefix("CountGlobsBeforeDedup")
|
||||
countGlobsLogger.Debug("Counting glob patterns before deduplication")
|
||||
count := 0
|
||||
for _, cmd := range commands {
|
||||
countGlobsLogger.Trace("Processing command %q, adding %d globs", cmd.Name, len(cmd.Files))
|
||||
count += len(cmd.Files)
|
||||
}
|
||||
countGlobsLogger.Debug("Total glob patterns before deduplication: %d", count)
|
||||
return count
|
||||
}
|
||||
|
||||
func FilterCommands(commands []ModifyCommand, filter string) []ModifyCommand {
|
||||
filterCommandsLogger := modifyCommandLogger.WithPrefix("FilterCommands").WithField("filter", filter)
|
||||
filterCommandsLogger.Debug("Filtering commands")
|
||||
filterCommandsLogger.Trace("Input commands: %v", commands)
|
||||
filteredCommands := []ModifyCommand{}
|
||||
filters := strings.Split(filter, ",")
|
||||
filterCommandsLogger.Trace("Split filters: %v", filters)
|
||||
for _, cmd := range commands {
|
||||
filterCommandsLogger.Debug("Checking command %q against filters", cmd.Name)
|
||||
for _, f := range filters {
|
||||
if strings.Contains(cmd.Name, f) {
|
||||
filterCommandsLogger.Debug("Command %q matches filter %q, adding to filtered list", cmd.Name, f)
|
||||
filteredCommands = append(filteredCommands, cmd)
|
||||
break // Command matches, no need to check other filters
|
||||
}
|
||||
}
|
||||
}
|
||||
filterCommandsLogger.Debug("Finished filtering commands. Found %d filtered commands", len(filteredCommands))
|
||||
filterCommandsLogger.Trace("Filtered commands: %v", filteredCommands)
|
||||
return filteredCommands
|
||||
}
|
1000
utils/modifycommand_test.go
Normal file
1000
utils/modifycommand_test.go
Normal file
File diff suppressed because it is too large
Load Diff
79
utils/replacecommand.go
Normal file
79
utils/replacecommand.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// replaceCommandLogger is a scoped logger for the utils/replacecommand package.
|
||||
var replaceCommandLogger = logger.Default.WithPrefix("utils/replacecommand")
|
||||
|
||||
type ReplaceCommand struct {
|
||||
From int
|
||||
To int
|
||||
With string
|
||||
}
|
||||
|
||||
func ExecuteModifications(modifications []ReplaceCommand, fileData string) (string, int) {
|
||||
executeModificationsLogger := replaceCommandLogger.WithPrefix("ExecuteModifications")
|
||||
executeModificationsLogger.Debug("Executing a batch of text modifications")
|
||||
executeModificationsLogger.Trace("Number of modifications: %d, Original file data length: %d", len(modifications), len(fileData))
|
||||
var err error
|
||||
|
||||
sort.Slice(modifications, func(i, j int) bool {
|
||||
return modifications[i].From > modifications[j].From
|
||||
})
|
||||
executeModificationsLogger.Debug("Modifications sorted in reverse order for safe replacement")
|
||||
executeModificationsLogger.Trace("Sorted modifications: %v", modifications)
|
||||
|
||||
executed := 0
|
||||
for idx, modification := range modifications {
|
||||
executeModificationsLogger.Debug("Applying modification %d/%d", idx+1, len(modifications))
|
||||
executeModificationsLogger.Trace("Current modification details: From=%d, To=%d, With=%q", modification.From, modification.To, modification.With)
|
||||
fileData, err = modification.Execute(fileData)
|
||||
if err != nil {
|
||||
executeModificationsLogger.Error("Failed to execute replacement for modification %+v: %v", modification, err)
|
||||
continue
|
||||
}
|
||||
executed++
|
||||
executeModificationsLogger.Trace("File data length after modification: %d", len(fileData))
|
||||
}
|
||||
executeModificationsLogger.Info("Successfully applied %d text replacements", executed)
|
||||
return fileData, executed
|
||||
}
|
||||
|
||||
func (m *ReplaceCommand) Execute(fileDataStr string) (string, error) {
|
||||
executeLogger := replaceCommandLogger.WithPrefix("Execute").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With))
|
||||
executeLogger.Debug("Attempting to execute single replacement")
|
||||
err := m.Validate(len(fileDataStr))
|
||||
if err != nil {
|
||||
executeLogger.Error("Failed to validate modification: %v", err)
|
||||
return fileDataStr, fmt.Errorf("failed to validate modification: %v", err)
|
||||
}
|
||||
|
||||
executeLogger.Trace("Applying replacement: fileDataStr[:%d] + %q + fileDataStr[%d:]", m.From, m.With, m.To)
|
||||
result := fileDataStr[:m.From] + m.With + fileDataStr[m.To:]
|
||||
executeLogger.Trace("Replacement executed. Result length: %d", len(result))
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *ReplaceCommand) Validate(maxsize int) error {
|
||||
validateLogger := replaceCommandLogger.WithPrefix("Validate").WithField("modification", fmt.Sprintf("From:%d,To:%d,With:%q", m.From, m.To, m.With)).WithField("maxSize", maxsize)
|
||||
validateLogger.Debug("Validating replacement command against max size")
|
||||
if m.To < m.From {
|
||||
validateLogger.Error("Validation failed: 'To' (%d) is less than 'From' (%d)", m.To, m.From)
|
||||
return fmt.Errorf("command to is less than from: %v", m)
|
||||
}
|
||||
if m.From > maxsize || m.To > maxsize {
|
||||
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is greater than max size (%d)", m.From, m.To, maxsize)
|
||||
return fmt.Errorf("command from or to is greater than replacement length: %v", m)
|
||||
}
|
||||
if m.From < 0 || m.To < 0 {
|
||||
validateLogger.Error("Validation failed: 'From' (%d) or 'To' (%d) is less than 0", m.From, m.To)
|
||||
return fmt.Errorf("command from or to is less than 0: %v", m)
|
||||
}
|
||||
validateLogger.Debug("Modification command validated successfully")
|
||||
return nil
|
||||
}
|
504
utils/replacecommand_test.go
Normal file
504
utils/replacecommand_test.go
Normal file
@@ -0,0 +1,504 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestReplaceCommandExecute(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
command ReplaceCommand
|
||||
expected string
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "Simple replacement",
|
||||
input: "This is a test string",
|
||||
command: ReplaceCommand{From: 5, To: 7, With: "was"},
|
||||
expected: "This was a test string",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Replace at beginning",
|
||||
input: "Hello world",
|
||||
command: ReplaceCommand{From: 0, To: 5, With: "Hi"},
|
||||
expected: "Hi world",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Replace at end",
|
||||
input: "Hello world",
|
||||
command: ReplaceCommand{From: 6, To: 11, With: "everyone"},
|
||||
expected: "Hello everyone",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Replace entire string",
|
||||
input: "Hello world",
|
||||
command: ReplaceCommand{From: 0, To: 11, With: "Goodbye!"},
|
||||
expected: "Goodbye!",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "Error: From > To",
|
||||
input: "Test string",
|
||||
command: ReplaceCommand{From: 7, To: 5, With: "fail"},
|
||||
expected: "Test string",
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "Error: From > string length",
|
||||
input: "Test",
|
||||
command: ReplaceCommand{From: 10, To: 12, With: "fail"},
|
||||
expected: "Test",
|
||||
shouldError: true,
|
||||
},
|
||||
{
|
||||
name: "Error: To > string length",
|
||||
input: "Test",
|
||||
command: ReplaceCommand{From: 2, To: 10, With: "fail"},
|
||||
expected: "Test",
|
||||
shouldError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := tc.command.Execute(tc.input)
|
||||
|
||||
if tc.shouldError {
|
||||
if err == nil {
|
||||
t.Errorf("Expected an error for command %+v but got none", tc.command)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
if result != tc.expected {
|
||||
t.Errorf("Expected %q, got %q", tc.expected, result)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecuteModifications(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
modifications []ReplaceCommand
|
||||
expected string
|
||||
expectedCount int
|
||||
}{
|
||||
{
|
||||
name: "Single modification",
|
||||
input: "Hello world",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 5, With: "Hi"},
|
||||
},
|
||||
expected: "Hi world",
|
||||
expectedCount: 1,
|
||||
},
|
||||
{
|
||||
name: "Multiple modifications",
|
||||
input: "This is a test string",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 4, With: "That"},
|
||||
{From: 8, To: 14, With: "sample"},
|
||||
},
|
||||
expected: "That is sample string",
|
||||
expectedCount: 2,
|
||||
},
|
||||
{
|
||||
name: "Overlapping modifications",
|
||||
input: "ABCDEF",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 3, With: "123"}, // ABC -> 123
|
||||
{From: 2, To: 5, With: "xyz"}, // CDE -> xyz
|
||||
},
|
||||
// The actual behavior with the current implementation
|
||||
expected: "123yzF",
|
||||
expectedCount: 2,
|
||||
},
|
||||
{
|
||||
name: "Sequential modifications",
|
||||
input: "Hello world",
|
||||
modifications: []ReplaceCommand{
|
||||
{From: 0, To: 5, With: "Hi"},
|
||||
{From: 5, To: 6, With: ""}, // Remove the space
|
||||
{From: 6, To: 11, With: "everyone"},
|
||||
},
|
||||
expected: "Hieveryone",
|
||||
expectedCount: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Make a copy of the modifications to avoid modifying the test case
|
||||
mods := make([]ReplaceCommand, len(tc.modifications))
|
||||
copy(mods, tc.modifications)
|
||||
|
||||
result, count := ExecuteModifications(mods, tc.input)
|
||||
|
||||
if count != tc.expectedCount {
|
||||
t.Errorf("Expected %d modifications, got %d", tc.expectedCount, count)
|
||||
}
|
||||
|
||||
if result != tc.expected {
|
||||
t.Errorf("Expected %q, got %q", tc.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestReverseOrderExecution(t *testing.T) {
|
||||
// This test verifies the current behavior of modification application
|
||||
input := "Original text with multiple sections"
|
||||
|
||||
// Modifications in specific positions
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 8, With: "Modified"}, // Original -> Modified
|
||||
{From: 9, To: 13, With: "document"}, // text -> document
|
||||
{From: 14, To: 22, With: "without"}, // with -> without
|
||||
{From: 23, To: 31, With: "any"}, // multiple -> any
|
||||
}
|
||||
|
||||
// The actual current behavior of our implementation
|
||||
expected := "Modified document withouttanytions"
|
||||
|
||||
result, count := ExecuteModifications(modifications, input)
|
||||
|
||||
if count != 4 {
|
||||
t.Errorf("Expected 4 modifications, got %d", count)
|
||||
}
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Expected %q, got %q", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace text in the middle of a string with new content
|
||||
func TestReplaceCommandExecute_ReplacesTextInMiddle(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
To: 11,
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Hello replaced, how are you?", result)
|
||||
}
|
||||
|
||||
// Replace with empty string (deletion)
|
||||
func TestReplaceCommandExecute_DeletesText(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
To: 11,
|
||||
With: "",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Hello , how are you?", result)
|
||||
}
|
||||
|
||||
// Replace with longer string than original segment
|
||||
func TestReplaceCommandExecute_WithLongerString(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 6,
|
||||
To: 11,
|
||||
With: "longerreplacement",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Hello longerreplacement, how are you?", result)
|
||||
}
|
||||
|
||||
// From and To values are the same (zero-length replacement)
|
||||
func TestReplaceCommandExecute_ZeroLengthReplacement(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 5,
|
||||
To: 5,
|
||||
With: "inserted",
|
||||
}
|
||||
|
||||
fileContent := "Hello world"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Helloinserted world", result)
|
||||
}
|
||||
|
||||
// From value is greater than To value
|
||||
func TestReplaceCommandExecute_FromGreaterThanTo(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 10,
|
||||
To: 5,
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "Hello world, how are you?", result)
|
||||
}
|
||||
|
||||
// From or To values exceed string length
|
||||
func TestReplaceCommandExecute_FromOrToExceedsLength(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: 5,
|
||||
To: 50, // Exceeds the length of the fileContent
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "Hello world", result)
|
||||
}
|
||||
|
||||
// From or To values are negative
|
||||
func TestReplaceCommandExecute_NegativeFromOrTo(t *testing.T) {
|
||||
// Arrange
|
||||
cmd := &ReplaceCommand{
|
||||
From: -1,
|
||||
To: 10,
|
||||
With: "replaced",
|
||||
}
|
||||
|
||||
fileContent := "Hello world, how are you?"
|
||||
|
||||
// Act
|
||||
result, err := cmd.Execute(fileContent)
|
||||
|
||||
// Assert
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, "Hello world, how are you?", result)
|
||||
}
|
||||
|
||||
// Modifications are applied in reverse order (from highest to lowest 'From' value)
|
||||
func TestExecuteModificationsAppliesInReverseOrder(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 4, With: "That"},
|
||||
{From: 10, To: 14, With: "sample"},
|
||||
{From: 26, To: 38, With: "modifications"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "That is a sample string for modifications"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// One or more modifications fail but others succeed
|
||||
func TestExecuteModificationsWithPartialFailures(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
// Create a custom ReplaceCommand implementation that will fail
|
||||
failingCommand := ReplaceCommand{
|
||||
From: 15,
|
||||
To: 10, // Invalid range (To < From) to cause failure
|
||||
With: "will fail",
|
||||
}
|
||||
|
||||
// Valid commands
|
||||
validCommand1 := ReplaceCommand{
|
||||
From: 0,
|
||||
To: 4,
|
||||
With: "That",
|
||||
}
|
||||
|
||||
validCommand2 := ReplaceCommand{
|
||||
From: 26,
|
||||
To: 38,
|
||||
With: "modifications",
|
||||
}
|
||||
|
||||
modifications := []ReplaceCommand{failingCommand, validCommand1, validCommand2}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "That is a test string for modifications"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
// Only 2 out of 3 modifications should succeed
|
||||
if executed != 2 {
|
||||
t.Errorf("Expected 2 modifications to be executed successfully, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// All valid modifications are executed and the modified string is returned
|
||||
func TestExecuteModificationsAllValid(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "Hello world, this is a test"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 5, With: "Hi"},
|
||||
{From: 18, To: 20, With: "was"},
|
||||
{From: 21, To: 27, With: "an example"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "Hi world, this was an example"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// The count of successfully executed modifications is returned
|
||||
func TestExecuteModificationsReturnsCorrectCount(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "Initial text for testing"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 0, To: 7, With: "Final"},
|
||||
{From: 12, To: 16, With: "example"},
|
||||
{From: 17, To: 24, With: "process"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
_, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify the count of executed modifications
|
||||
expectedExecuted := 3
|
||||
if executed != expectedExecuted {
|
||||
t.Errorf("Expected %d modifications to be executed, but got %d", expectedExecuted, executed)
|
||||
}
|
||||
}
|
||||
|
||||
// Empty modifications list returns the original string with zero executed count
|
||||
func TestExecuteModificationsWithEmptyList(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
modifications := []ReplaceCommand{}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
if result != fileData {
|
||||
t.Errorf("Expected result to be %q, but got %q", fileData, result)
|
||||
}
|
||||
|
||||
if executed != 0 {
|
||||
t.Errorf("Expected 0 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// Modifications with identical 'From' values
|
||||
func TestExecuteModificationsWithIdenticalFromValues(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "This is a test string for replacements"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 10, To: 14, With: "sample"},
|
||||
{From: 10, To: 14, With: "example"},
|
||||
{From: 26, To: 38, With: "modifications"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
// Yes, it's mangled, yes, it's intentional
|
||||
// Every subsequent command works with the modified contents of the previous command
|
||||
// So by the time we get to "example" the indices have already eaten into "sample"... In fact they have eaten into "samp", "le" is left
|
||||
// So we prepend "example" and end up with "examplele"
|
||||
// Whether sample or example goes first here is irrelevant to us
|
||||
// But it just so happens that sample goes first, so we end up with "examplele"
|
||||
expectedResult := "This is a examplele string for modifications"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
||||
|
||||
// Modifications that would affect each other if not sorted properly
|
||||
func TestExecuteModificationsHandlesOverlappingRanges(t *testing.T) {
|
||||
// Setup test data
|
||||
fileData := "The quick brown fox jumps over the lazy dog"
|
||||
|
||||
modifications := []ReplaceCommand{
|
||||
{From: 4, To: 9, With: "slow"},
|
||||
{From: 10, To: 15, With: "red"},
|
||||
{From: 16, To: 19, With: "cat"},
|
||||
}
|
||||
|
||||
// Execute the function
|
||||
result, executed := ExecuteModifications(modifications, fileData)
|
||||
|
||||
// Verify results
|
||||
expectedResult := "The slow red cat jumps over the lazy dog"
|
||||
if result != expectedResult {
|
||||
t.Errorf("Expected result to be %q, but got %q", expectedResult, result)
|
||||
}
|
||||
|
||||
if executed != 3 {
|
||||
t.Errorf("Expected 3 modifications to be executed, but got %d", executed)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user