3 Commits

3 changed files with 134 additions and 30 deletions

View File

@@ -84,7 +84,7 @@ END`
assert.Len(t, association.Commands, 0, "Expected 0 regular commands") assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
// Run the isolate commands // Run the isolate commands
result, err := RunIsolateCommands(association, "test.txt", testContent) result, err := RunIsolateCommands(association, "test.txt", testContent, false)
if err != nil && err != NothingToDo { if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err) t.Fatalf("Failed to run isolate commands: %v", err)
} }
@@ -162,7 +162,7 @@ END_SECTION2`
} }
// Run the isolate commands // Run the isolate commands
result, err := RunIsolateCommands(associations["test.txt"], "test.txt", testContent) result, err := RunIsolateCommands(associations["test.txt"], "test.txt", testContent, false)
if err != nil && err != NothingToDo { if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err) t.Fatalf("Failed to run isolate commands: %v", err)
} }
@@ -234,7 +234,7 @@ func TestIsolateCommandsWithJSONMode(t *testing.T) {
} }
// Run the isolate commands // Run the isolate commands
result, err := RunIsolateCommands(associations["test.json"], "test.json", testContent) result, err := RunIsolateCommands(associations["test.json"], "test.json", testContent, false)
if err != nil && err != NothingToDo { if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err) t.Fatalf("Failed to run isolate commands: %v", err)
} }
@@ -309,7 +309,7 @@ END_REGULAR`
assert.Len(t, association.Commands, 1, "Expected 1 regular command") assert.Len(t, association.Commands, 1, "Expected 1 regular command")
// First run isolate commands // First run isolate commands
isolateResult, err := RunIsolateCommands(association, "test.txt", testContent) isolateResult, err := RunIsolateCommands(association, "test.txt", testContent, false)
if err != nil && err != NothingToDo { if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err) t.Fatalf("Failed to run isolate commands: %v", err)
} }
@@ -320,7 +320,7 @@ END_REGULAR`
// Then run regular commands // Then run regular commands
commandLoggers := make(map[string]*logger.Logger) commandLoggers := make(map[string]*logger.Logger)
finalResult, err := RunOtherCommands("test.txt", isolateResult, association, commandLoggers) finalResult, err := RunOtherCommands("test.txt", isolateResult, association, commandLoggers, false)
if err != nil && err != NothingToDo { if err != nil && err != NothingToDo {
t.Fatalf("Failed to run regular commands: %v", err) t.Fatalf("Failed to run regular commands: %v", err)
} }
@@ -397,7 +397,7 @@ irons_spellbooks:chain_lightning
assert.Len(t, association.Commands, 0, "Expected 0 regular commands") assert.Len(t, association.Commands, 0, "Expected 0 regular commands")
// Run the isolate commands // Run the isolate commands
result, err := RunIsolateCommands(association, "irons_spellbooks-server.toml", testContent) result, err := RunIsolateCommands(association, "irons_spellbooks-server.toml", testContent, false)
if err != nil && err != NothingToDo { if err != nil && err != NothingToDo {
t.Fatalf("Failed to run isolate commands: %v", err) t.Fatalf("Failed to run isolate commands: %v", err)
} }

View File

@@ -12,8 +12,8 @@ import (
"cook/processor" "cook/processor"
"cook/utils" "cook/utils"
"github.com/spf13/cobra"
logger "git.site.quack-lab.dev/dave/cylogger" logger "git.site.quack-lab.dev/dave/cylogger"
"github.com/spf13/cobra"
) )
//go:embed example_cook.toml //go:embed example_cook.toml
@@ -54,12 +54,16 @@ Features:
- Parallel file processing - Parallel file processing
- Command filtering and organization`, - Command filtering and organization`,
PersistentPreRun: func(cmd *cobra.Command, args []string) { PersistentPreRun: func(cmd *cobra.Command, args []string) {
CreateExampleConfig()
logger.InitFlag() logger.InitFlag()
mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String()) mainLogger.Info("Initializing with log level: %s", logger.GetLevel().String())
mainLogger.Trace("Full argv: %v", os.Args) mainLogger.Trace("Full argv: %v", os.Args)
}, },
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
exampleFlag, _ := cmd.Flags().GetBool("example")
if exampleFlag {
CreateExampleConfig()
return
}
if len(args) == 0 { if len(args) == 0 {
cmd.Usage() cmd.Usage()
return return
@@ -76,6 +80,7 @@ Features:
rootCmd.Flags().StringP("filter", "f", "", "Filter commands before running them") rootCmd.Flags().StringP("filter", "f", "", "Filter commands before running them")
rootCmd.Flags().Bool("json", false, "Enable JSON mode for processing JSON files") rootCmd.Flags().Bool("json", false, "Enable JSON mode for processing JSON files")
rootCmd.Flags().BoolP("conv", "c", false, "Convert YAML files to TOML format") rootCmd.Flags().BoolP("conv", "c", false, "Convert YAML files to TOML format")
rootCmd.Flags().BoolP("example", "e", false, "Generate example_cook.toml and exit")
// Set up examples in the help text // Set up examples in the help text
rootCmd.SetUsageTemplate(`Usage:{{if .Runnable}} rootCmd.SetUsageTemplate(`Usage:{{if .Runnable}}

View File

@@ -189,24 +189,122 @@ end
---@param table table ---@param table table
---@param depth number? ---@param depth number?
function DumpTable(table, depth) function dump(table, depth)
if depth == nil then if depth == nil then
depth = 0 depth = 0
end end
if (depth > 200) then if (depth > 200) then
print("Error: Depth > 200 in dumpTable()") print("Error: Depth > 200 in dump()")
return return
end end
for k, v in pairs(table) do for k, v in pairs(table) do
if (type(v) == "table") then if (type(v) == "table") then
print(string.rep(" ", depth) .. k .. ":") print(string.rep(" ", depth) .. k .. ":")
DumpTable(v, depth + 1) dump(v, depth + 1)
else else
print(string.rep(" ", depth) .. k .. ": ", v) print(string.rep(" ", depth) .. k .. ": ", v)
end end
end end
end end
--- Parses CSV text into rows and fields using a minimal RFC 4180 state machine.
---
--- Requirements/assumptions:
--- - Input is a single string containing the entire CSV content.
--- - Field separators are commas (,).
--- - Newlines between rows may be "\n" or "\r\n". "\r\n" is treated as one line break.
--- - Fields may be quoted with double quotes (").
--- - Inside quoted fields, doubled quotes ("") represent a literal quote character.
--- - No backslash escaping is supported (not part of RFC 4180).
--- - Newlines inside quoted fields are preserved as part of the field.
--- - Leading/trailing spaces are preserved; no trimming is performed.
--- - Empty fields and empty rows are preserved.
--- - The final row is emitted even if the text does not end with a newline.
---
--- Returns:
--- - A table (array) of rows; each row is a table (array) of string fields.
local function parseCSV(csv)
local rows = {}
local fields = {}
local field = {}
local STATE_DEFAULT = 1
local STATE_IN_QUOTES = 2
local STATE_QUOTE_IN_QUOTES = 3
local state = STATE_DEFAULT
local i = 1
local len = #csv
while i <= len do
local c = csv:sub(i, i)
if state == STATE_DEFAULT then
if c == '"' then
state = STATE_IN_QUOTES
i = i + 1
elseif c == ',' then
table.insert(fields, table.concat(field))
field = {}
i = i + 1
elseif c == '\r' or c == '\n' then
table.insert(fields, table.concat(field))
field = {}
table.insert(rows, fields)
fields = {}
if c == '\r' and i < len and csv:sub(i + 1, i + 1) == '\n' then
i = i + 2
else
i = i + 1
end
else
table.insert(field, c)
i = i + 1
end
elseif state == STATE_IN_QUOTES then
if c == '"' then
state = STATE_QUOTE_IN_QUOTES
i = i + 1
else
table.insert(field, c)
i = i + 1
end
else -- STATE_QUOTE_IN_QUOTES
if c == '"' then
table.insert(field, '"')
state = STATE_IN_QUOTES
i = i + 1
elseif c == ',' then
table.insert(fields, table.concat(field))
field = {}
state = STATE_DEFAULT
i = i + 1
elseif c == '\r' or c == '\n' then
table.insert(fields, table.concat(field))
field = {}
table.insert(rows, fields)
fields = {}
state = STATE_DEFAULT
if c == '\r' and i < len and csv:sub(i + 1, i + 1) == '\n' then
i = i + 2
else
i = i + 1
end
else
state = STATE_DEFAULT
-- Don't increment i, reprocess character in DEFAULT state
end
end
end
if #field > 0 or #fields > 0 then
table.insert(fields, table.concat(field))
table.insert(rows, fields)
end
return rows
end
-- String to number conversion helper -- String to number conversion helper
function num(str) function num(str)
return tonumber(str) or 0 return tonumber(str) or 0
@@ -531,12 +629,13 @@ STRING FUNCTIONS:
format(s, ...) - Formats string using Lua string.format format(s, ...) - Formats string using Lua string.format
trim(s) - Removes leading/trailing whitespace trim(s) - Removes leading/trailing whitespace
strsplit(inputstr, sep) - Splits string by separator (default: whitespace) strsplit(inputstr, sep) - Splits string by separator (default: whitespace)
parseCSV(csv) - Parses CSV text into rows of fields
num(str) - Converts string to number (returns 0 if invalid) num(str) - Converts string to number (returns 0 if invalid)
str(num) - Converts number to string str(num) - Converts number to string
is_number(str) - Returns true if string is numeric is_number(str) - Returns true if string is numeric
TABLE FUNCTIONS: TABLE FUNCTIONS:
DumpTable(table, depth) - Prints table structure recursively dump(table, depth) - Prints table structure recursively
isArray(t) - Returns true if table is a sequential array isArray(t) - Returns true if table is a sequential array
HTTP FUNCTIONS: HTTP FUNCTIONS: