Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| efc602e0ba | |||
| 917063db0c | |||
| 3e552428a5 | |||
| 50455c491d | |||
| 12ec399b09 | |||
| 5a49998c2c |
213
processor/luahelper-test.lua
Normal file
213
processor/luahelper-test.lua
Normal file
@@ -0,0 +1,213 @@
|
||||
-- Load the helper script
|
||||
dofile("luahelper.lua")
|
||||
|
||||
-- Test helper function
|
||||
local function assert(condition, message)
|
||||
if not condition then error("ASSERTION FAILED: " .. (message or "unknown error")) end
|
||||
end
|
||||
|
||||
local function test(name, fn)
|
||||
local ok, err = pcall(fn)
|
||||
if ok then
|
||||
print("PASS: " .. name)
|
||||
else
|
||||
print("FAIL: " .. name .. " - " .. tostring(err))
|
||||
end
|
||||
end
|
||||
|
||||
-- Test fromCSV basic parsing
|
||||
test("fromCSV basic", function()
|
||||
local csv = "a,b,c\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(csv)
|
||||
assert(#rows == 3, "Should have 3 rows")
|
||||
assert(rows[1][1] == "a", "First row first field should be 'a'")
|
||||
assert(rows[2][2] == "2", "Second row second field should be '2'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with headers
|
||||
test("fromCSV with headers", function()
|
||||
local csv = "foo,bar,baz\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(csv, ",", true)
|
||||
assert(#rows == 2, "Should have 2 data rows")
|
||||
assert(rows[1][1] == "1", "First row first field should be '1'")
|
||||
assert(rows[1].foo == "1", "First row foo should be '1'")
|
||||
assert(rows[1].bar == "2", "First row bar should be '2'")
|
||||
assert(rows[1].baz == "3", "First row baz should be '3'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with custom delimiter
|
||||
test("fromCSV with tab delimiter", function()
|
||||
local csv = "a\tb\tc\n1\t2\t3"
|
||||
local rows = fromCSV(csv, "\t")
|
||||
assert(#rows == 2, "Should have 2 rows")
|
||||
assert(rows[1][1] == "a", "First row first field should be 'a'")
|
||||
assert(rows[2][2] == "2", "Second row second field should be '2'")
|
||||
end)
|
||||
|
||||
-- Test fromCSV with quoted fields
|
||||
test("fromCSV with quoted fields", function()
|
||||
local csv = '"hello,world","test"\n"foo","bar"'
|
||||
local rows = fromCSV(csv)
|
||||
assert(#rows == 2, "Should have 2 rows")
|
||||
assert(rows[1][1] == "hello,world", "Quoted field with comma should be preserved")
|
||||
assert(rows[1][2] == "test", "Second field should be 'test'")
|
||||
end)
|
||||
|
||||
-- Test toCSV basic
|
||||
test("toCSV basic", function()
|
||||
local rows = { { "a", "b", "c" }, { "1", "2", "3" } }
|
||||
local csv = toCSV(rows)
|
||||
assert(csv == "a,b,c\n1,2,3", "CSV output should match expected")
|
||||
end)
|
||||
|
||||
-- Test toCSV with custom delimiter
|
||||
test("toCSV with tab delimiter", function()
|
||||
local rows = { { "a", "b", "c" }, { "1", "2", "3" } }
|
||||
local csv = toCSV(rows, "\t")
|
||||
assert(csv == "a\tb\tc\n1\t2\t3", "TSV output should match expected")
|
||||
end)
|
||||
|
||||
-- Test toCSV with fields needing quoting
|
||||
test("toCSV with quoted fields", function()
|
||||
local rows = { { "hello,world", "test" }, { "foo", "bar" } }
|
||||
local csv = toCSV(rows)
|
||||
assert(csv == '"hello,world",test\nfoo,bar', "Fields with commas should be quoted")
|
||||
end)
|
||||
|
||||
-- Test round trip
|
||||
test("fromCSV toCSV round trip", function()
|
||||
local original = "a,b,c\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(original)
|
||||
local csv = toCSV(rows)
|
||||
assert(csv == original, "Round trip should preserve original")
|
||||
end)
|
||||
|
||||
-- Test round trip with headers
|
||||
test("fromCSV toCSV round trip with headers", function()
|
||||
local original = "foo,bar,baz\n1,2,3\n4,5,6"
|
||||
local rows = fromCSV(original, ",", true)
|
||||
local csv = toCSV(rows)
|
||||
local expected = "1,2,3\n4,5,6"
|
||||
assert(csv == expected, "Round trip with headers should preserve data rows")
|
||||
end)
|
||||
|
||||
-- Math function tests
|
||||
test("min function", function()
|
||||
assert(min(5, 3) == 3, "min(5, 3) should be 3")
|
||||
assert(min(-1, 0) == -1, "min(-1, 0) should be -1")
|
||||
assert(min(10, 10) == 10, "min(10, 10) should be 10")
|
||||
end)
|
||||
|
||||
test("max function", function()
|
||||
assert(max(5, 3) == 5, "max(5, 3) should be 5")
|
||||
assert(max(-1, 0) == 0, "max(-1, 0) should be 0")
|
||||
assert(max(10, 10) == 10, "max(10, 10) should be 10")
|
||||
end)
|
||||
|
||||
test("round function", function()
|
||||
assert(round(3.14159) == 3, "round(3.14159) should be 3")
|
||||
assert(round(3.14159, 2) == 3.14, "round(3.14159, 2) should be 3.14")
|
||||
assert(round(3.5) == 4, "round(3.5) should be 4")
|
||||
assert(round(3.4) == 3, "round(3.4) should be 3")
|
||||
assert(round(123.456, 1) == 123.5, "round(123.456, 1) should be 123.5")
|
||||
end)
|
||||
|
||||
test("floor function", function()
|
||||
assert(floor(3.7) == 3, "floor(3.7) should be 3")
|
||||
assert(floor(-3.7) == -4, "floor(-3.7) should be -4")
|
||||
assert(floor(5) == 5, "floor(5) should be 5")
|
||||
end)
|
||||
|
||||
test("ceil function", function()
|
||||
assert(ceil(3.2) == 4, "ceil(3.2) should be 4")
|
||||
assert(ceil(-3.2) == -3, "ceil(-3.2) should be -3")
|
||||
assert(ceil(5) == 5, "ceil(5) should be 5")
|
||||
end)
|
||||
|
||||
-- String function tests
|
||||
test("upper function", function()
|
||||
assert(upper("hello") == "HELLO", "upper('hello') should be 'HELLO'")
|
||||
assert(upper("Hello World") == "HELLO WORLD", "upper('Hello World') should be 'HELLO WORLD'")
|
||||
assert(upper("123abc") == "123ABC", "upper('123abc') should be '123ABC'")
|
||||
end)
|
||||
|
||||
test("lower function", function()
|
||||
assert(lower("HELLO") == "hello", "lower('HELLO') should be 'hello'")
|
||||
assert(lower("Hello World") == "hello world", "lower('Hello World') should be 'hello world'")
|
||||
assert(lower("123ABC") == "123abc", "lower('123ABC') should be '123abc'")
|
||||
end)
|
||||
|
||||
test("format function", function()
|
||||
assert(format("Hello %s", "World") == "Hello World", "format should work")
|
||||
assert(format("Number: %d", 42) == "Number: 42", "format with number should work")
|
||||
assert(format("%.2f", 3.14159) == "3.14", "format with float should work")
|
||||
end)
|
||||
|
||||
test("trim function", function()
|
||||
assert(trim(" hello ") == "hello", "trim should remove leading and trailing spaces")
|
||||
assert(trim(" hello world ") == "hello world", "trim should preserve internal spaces")
|
||||
assert(trim("hello") == "hello", "trim should not affect strings without spaces")
|
||||
assert(trim(" ") == "", "trim should handle all spaces")
|
||||
end)
|
||||
|
||||
test("strsplit function", function()
|
||||
local result = strsplit("a,b,c", ",")
|
||||
assert(#result == 3, "strsplit should return 3 elements")
|
||||
assert(result[1] == "a", "First element should be 'a'")
|
||||
assert(result[2] == "b", "Second element should be 'b'")
|
||||
assert(result[3] == "c", "Third element should be 'c'")
|
||||
end)
|
||||
|
||||
test("strsplit with default separator", function()
|
||||
local result = strsplit("a b c")
|
||||
assert(#result == 3, "strsplit with default should return 3 elements")
|
||||
assert(result[1] == "a", "First element should be 'a'")
|
||||
assert(result[2] == "b", "Second element should be 'b'")
|
||||
assert(result[3] == "c", "Third element should be 'c'")
|
||||
end)
|
||||
|
||||
test("strsplit with custom separator", function()
|
||||
local result = strsplit("a|b|c", "|")
|
||||
assert(#result == 3, "strsplit with pipe should return 3 elements")
|
||||
assert(result[1] == "a", "First element should be 'a'")
|
||||
assert(result[2] == "b", "Second element should be 'b'")
|
||||
assert(result[3] == "c", "Third element should be 'c'")
|
||||
end)
|
||||
|
||||
-- Conversion function tests
|
||||
test("num function", function()
|
||||
assert(num("123") == 123, "num('123') should be 123")
|
||||
assert(num("45.67") == 45.67, "num('45.67') should be 45.67")
|
||||
assert(num("invalid") == 0, "num('invalid') should be 0")
|
||||
assert(num("") == 0, "num('') should be 0")
|
||||
end)
|
||||
|
||||
test("str function", function()
|
||||
assert(str(123) == "123", "str(123) should be '123'")
|
||||
assert(str(45.67) == "45.67", "str(45.67) should be '45.67'")
|
||||
assert(str(0) == "0", "str(0) should be '0'")
|
||||
end)
|
||||
|
||||
test("is_number function", function()
|
||||
assert(is_number("123") == true, "is_number('123') should be true")
|
||||
assert(is_number("45.67") == true, "is_number('45.67') should be true")
|
||||
assert(is_number("invalid") == false, "is_number('invalid') should be false")
|
||||
assert(is_number("") == false, "is_number('') should be false")
|
||||
assert(is_number("123abc") == false, "is_number('123abc') should be false")
|
||||
end)
|
||||
|
||||
-- Table function tests
|
||||
test("isArray function", function()
|
||||
assert(isArray({ 1, 2, 3 }) == true, "isArray should return true for sequential array")
|
||||
assert(isArray({ "a", "b", "c" }) == true, "isArray should return true for string array")
|
||||
assert(isArray({}) == true, "isArray should return true for empty array")
|
||||
assert(isArray({ a = 1, b = 2 }) == false, "isArray should return false for map")
|
||||
assert(isArray({ 1, 2, [4] = 4 }) == false, "isArray should return false for sparse array")
|
||||
assert(isArray({ [1] = 1, [2] = 2, [3] = 3 }) == true, "isArray should return true for 1-indexed array")
|
||||
assert(isArray({ [0] = 1, [1] = 2 }) == false, "isArray should return false for 0-indexed array")
|
||||
assert(isArray({ [1] = 1, [2] = 2, [4] = 4 }) == false, "isArray should return false for non-sequential array")
|
||||
assert(isArray("not a table") == false, "isArray should return false for non-table")
|
||||
assert(isArray(123) == false, "isArray should return false for number")
|
||||
end)
|
||||
|
||||
print("\nAll tests completed!")
|
||||
235
processor/luahelper.lua
Normal file
235
processor/luahelper.lua
Normal file
@@ -0,0 +1,235 @@
|
||||
-- Custom Lua helpers for math operations
|
||||
function min(a, b) return math.min(a, b) end
|
||||
|
||||
function max(a, b) return math.max(a, b) end
|
||||
|
||||
function round(x, n)
|
||||
if n == nil then n = 0 end
|
||||
return math.floor(x * 10 ^ n + 0.5) / 10 ^ n
|
||||
end
|
||||
|
||||
function floor(x) return math.floor(x) end
|
||||
|
||||
function ceil(x) return math.ceil(x) end
|
||||
|
||||
function upper(s) return string.upper(s) end
|
||||
|
||||
function lower(s) return string.lower(s) end
|
||||
|
||||
function format(s, ...) return string.format(s, ...) end
|
||||
|
||||
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
||||
|
||||
-- String split helper
|
||||
function strsplit(inputstr, sep)
|
||||
if sep == nil then sep = "%s" end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^" .. sep .. "]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
---@param table table
|
||||
---@param depth number?
|
||||
function dump(table, depth)
|
||||
if depth == nil then depth = 0 end
|
||||
if depth > 200 then
|
||||
print("Error: Depth > 200 in dump()")
|
||||
return
|
||||
end
|
||||
for k, v in pairs(table) do
|
||||
if type(v) == "table" then
|
||||
print(string.rep(" ", depth) .. k .. ":")
|
||||
dump(v, depth + 1)
|
||||
else
|
||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- Parses CSV text into rows and fields using a minimal RFC 4180 state machine.
|
||||
---
|
||||
--- Requirements/assumptions:
|
||||
--- - Input is a single string containing the entire CSV content.
|
||||
--- - Field separators are specified by delimiter parameter (default: comma).
|
||||
--- - Newlines between rows may be "\n" or "\r\n". "\r\n" is treated as one line break.
|
||||
--- - Fields may be quoted with double quotes (").
|
||||
--- - Inside quoted fields, doubled quotes ("") represent a literal quote character.
|
||||
--- - No backslash escaping is supported (not part of RFC 4180).
|
||||
--- - Newlines inside quoted fields are preserved as part of the field.
|
||||
--- - Leading/trailing spaces are preserved; no trimming is performed.
|
||||
--- - Empty fields and empty rows are preserved.
|
||||
--- - The final row is emitted even if the text does not end with a newline.
|
||||
---
|
||||
--- @param csv string The CSV text to parse.
|
||||
--- @param delimiter string? The field delimiter (default: ",").
|
||||
--- @param hasHeaders boolean? If true, first row is treated as headers and rows can be accessed by header name (default: false).
|
||||
--- @return table A table (array) of rows; each row is a table with numeric indices and optionally header-named keys.
|
||||
function fromCSV(csv, delimiter, hasHeaders)
|
||||
if delimiter == nil then delimiter = "," end
|
||||
if hasHeaders == nil then hasHeaders = false end
|
||||
local allRows = {}
|
||||
local fields = {}
|
||||
local field = {}
|
||||
|
||||
local STATE_DEFAULT = 1
|
||||
local STATE_IN_QUOTES = 2
|
||||
local STATE_QUOTE_IN_QUOTES = 3
|
||||
local state = STATE_DEFAULT
|
||||
|
||||
local i = 1
|
||||
local len = #csv
|
||||
|
||||
while i <= len do
|
||||
local c = csv:sub(i, i)
|
||||
|
||||
if state == STATE_DEFAULT then
|
||||
if c == '"' then
|
||||
state = STATE_IN_QUOTES
|
||||
i = i + 1
|
||||
elseif c == delimiter then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
i = i + 1
|
||||
elseif c == "\r" or c == "\n" then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
table.insert(allRows, fields)
|
||||
fields = {}
|
||||
if c == "\r" and i < len and csv:sub(i + 1, i + 1) == "\n" then
|
||||
i = i + 2
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
table.insert(field, c)
|
||||
i = i + 1
|
||||
end
|
||||
elseif state == STATE_IN_QUOTES then
|
||||
if c == '"' then
|
||||
state = STATE_QUOTE_IN_QUOTES
|
||||
i = i + 1
|
||||
else
|
||||
table.insert(field, c)
|
||||
i = i + 1
|
||||
end
|
||||
else -- STATE_QUOTE_IN_QUOTES
|
||||
if c == '"' then
|
||||
table.insert(field, '"')
|
||||
state = STATE_IN_QUOTES
|
||||
i = i + 1
|
||||
elseif c == delimiter then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
state = STATE_DEFAULT
|
||||
i = i + 1
|
||||
elseif c == "\r" or c == "\n" then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
table.insert(allRows, fields)
|
||||
fields = {}
|
||||
state = STATE_DEFAULT
|
||||
if c == "\r" and i < len and csv:sub(i + 1, i + 1) == "\n" then
|
||||
i = i + 2
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
state = STATE_DEFAULT
|
||||
-- Don't increment i, reprocess character in DEFAULT state
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if #field > 0 or #fields > 0 then
|
||||
table.insert(fields, table.concat(field))
|
||||
table.insert(allRows, fields)
|
||||
end
|
||||
|
||||
if hasHeaders and #allRows > 0 then
|
||||
local headers = allRows[1]
|
||||
local rows = {}
|
||||
for i = 2, #allRows do
|
||||
local row = {}
|
||||
local dataRow = allRows[i]
|
||||
for j = 1, #dataRow do
|
||||
row[j] = dataRow[j]
|
||||
if headers[j] ~= nil and headers[j] ~= "" then row[headers[j]] = dataRow[j] end
|
||||
end
|
||||
table.insert(rows, row)
|
||||
end
|
||||
return rows
|
||||
end
|
||||
|
||||
return allRows
|
||||
end
|
||||
|
||||
--- Converts a table of rows back to CSV text format (RFC 4180 compliant).
|
||||
---
|
||||
--- Requirements:
|
||||
--- - Input is a table (array) of rows, where each row is a table (array) of field values.
|
||||
--- - Field values are converted to strings using tostring().
|
||||
--- - Fields are quoted if they contain the delimiter, newlines, or double quotes.
|
||||
--- - Double quotes inside quoted fields are doubled ("").
|
||||
--- - Fields are joined with the specified delimiter; rows are joined with newlines.
|
||||
---
|
||||
--- @param rows table Array of rows, where each row is an array of field values.
|
||||
--- @param delimiter string? The field delimiter (default: ",").
|
||||
--- @return string CSV-formatted text.
|
||||
function toCSV(rows, delimiter)
|
||||
if delimiter == nil then delimiter = "," end
|
||||
local rowStrings = {}
|
||||
|
||||
for _, row in ipairs(rows) do
|
||||
local fieldStrings = {}
|
||||
|
||||
for _, field in ipairs(row) do
|
||||
local fieldStr = tostring(field)
|
||||
local needsQuoting = false
|
||||
|
||||
if
|
||||
fieldStr:find(delimiter)
|
||||
or fieldStr:find("\n")
|
||||
or fieldStr:find("\r")
|
||||
or fieldStr:find('"')
|
||||
then
|
||||
needsQuoting = true
|
||||
end
|
||||
|
||||
if needsQuoting then
|
||||
fieldStr = fieldStr:gsub('"', '""')
|
||||
fieldStr = '"' .. fieldStr .. '"'
|
||||
end
|
||||
|
||||
table.insert(fieldStrings, fieldStr)
|
||||
end
|
||||
|
||||
table.insert(rowStrings, table.concat(fieldStrings, delimiter))
|
||||
end
|
||||
|
||||
return table.concat(rowStrings, "\n")
|
||||
end
|
||||
|
||||
-- String to number conversion helper
|
||||
function num(str) return tonumber(str) or 0 end
|
||||
|
||||
-- Number to string conversion
|
||||
function str(num) return tostring(num) end
|
||||
|
||||
-- Check if string is numeric
|
||||
function is_number(str) return tonumber(str) ~= nil end
|
||||
|
||||
function isArray(t)
|
||||
if type(t) ~= "table" then return false end
|
||||
local max = 0
|
||||
local count = 0
|
||||
for k, _ in pairs(t) do
|
||||
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then return false end
|
||||
max = math.max(max, k)
|
||||
count = count + 1
|
||||
end
|
||||
return max == count
|
||||
end
|
||||
|
||||
modified = false
|
||||
@@ -1,6 +1,7 @@
|
||||
package processor
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@@ -13,6 +14,9 @@ import (
|
||||
lua "github.com/yuin/gopher-lua"
|
||||
)
|
||||
|
||||
//go:embed luahelper.lua
|
||||
var helperScript string
|
||||
|
||||
// processorLogger is a scoped logger for the processor package.
|
||||
var processorLogger = logger.Default.WithPrefix("processor")
|
||||
|
||||
@@ -160,182 +164,6 @@ func InitLuaHelpers(L *lua.LState) error {
|
||||
initLuaHelpersLogger := processorLogger.WithPrefix("InitLuaHelpers")
|
||||
initLuaHelpersLogger.Debug("Loading Lua helper functions")
|
||||
|
||||
helperScript := `
|
||||
-- Custom Lua helpers for math operations
|
||||
function min(a, b) return math.min(a, b) end
|
||||
function max(a, b) return math.max(a, b) end
|
||||
function round(x, n)
|
||||
if n == nil then n = 0 end
|
||||
return math.floor(x * 10^n + 0.5) / 10^n
|
||||
end
|
||||
function floor(x) return math.floor(x) end
|
||||
function ceil(x) return math.ceil(x) end
|
||||
function upper(s) return string.upper(s) end
|
||||
function lower(s) return string.lower(s) end
|
||||
function format(s, ...) return string.format(s, ...) end
|
||||
function trim(s) return string.gsub(s, "^%s*(.-)%s*$", "%1") end
|
||||
|
||||
-- String split helper
|
||||
function strsplit(inputstr, sep)
|
||||
if sep == nil then
|
||||
sep = "%s"
|
||||
end
|
||||
local t = {}
|
||||
for str in string.gmatch(inputstr, "([^"..sep.."]+)") do
|
||||
table.insert(t, str)
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
---@param table table
|
||||
---@param depth number?
|
||||
function dump(table, depth)
|
||||
if depth == nil then
|
||||
depth = 0
|
||||
end
|
||||
if (depth > 200) then
|
||||
print("Error: Depth > 200 in dump()")
|
||||
return
|
||||
end
|
||||
for k, v in pairs(table) do
|
||||
if (type(v) == "table") then
|
||||
print(string.rep(" ", depth) .. k .. ":")
|
||||
dump(v, depth + 1)
|
||||
else
|
||||
print(string.rep(" ", depth) .. k .. ": ", v)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
--- Parses CSV text into rows and fields using a minimal RFC 4180 state machine.
|
||||
---
|
||||
--- Requirements/assumptions:
|
||||
--- - Input is a single string containing the entire CSV content.
|
||||
--- - Field separators are commas (,).
|
||||
--- - Newlines between rows may be "\n" or "\r\n". "\r\n" is treated as one line break.
|
||||
--- - Fields may be quoted with double quotes (").
|
||||
--- - Inside quoted fields, doubled quotes ("") represent a literal quote character.
|
||||
--- - No backslash escaping is supported (not part of RFC 4180).
|
||||
--- - Newlines inside quoted fields are preserved as part of the field.
|
||||
--- - Leading/trailing spaces are preserved; no trimming is performed.
|
||||
--- - Empty fields and empty rows are preserved.
|
||||
--- - The final row is emitted even if the text does not end with a newline.
|
||||
---
|
||||
--- Returns:
|
||||
--- - A table (array) of rows; each row is a table (array) of string fields.
|
||||
function parseCSV(csv)
|
||||
local rows = {}
|
||||
local fields = {}
|
||||
local field = {}
|
||||
|
||||
local STATE_DEFAULT = 1
|
||||
local STATE_IN_QUOTES = 2
|
||||
local STATE_QUOTE_IN_QUOTES = 3
|
||||
local state = STATE_DEFAULT
|
||||
|
||||
local i = 1
|
||||
local len = #csv
|
||||
|
||||
while i <= len do
|
||||
local c = csv:sub(i, i)
|
||||
|
||||
if state == STATE_DEFAULT then
|
||||
if c == '"' then
|
||||
state = STATE_IN_QUOTES
|
||||
i = i + 1
|
||||
elseif c == ',' then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
i = i + 1
|
||||
elseif c == '\r' or c == '\n' then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
table.insert(rows, fields)
|
||||
fields = {}
|
||||
if c == '\r' and i < len and csv:sub(i + 1, i + 1) == '\n' then
|
||||
i = i + 2
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
table.insert(field, c)
|
||||
i = i + 1
|
||||
end
|
||||
elseif state == STATE_IN_QUOTES then
|
||||
if c == '"' then
|
||||
state = STATE_QUOTE_IN_QUOTES
|
||||
i = i + 1
|
||||
else
|
||||
table.insert(field, c)
|
||||
i = i + 1
|
||||
end
|
||||
else -- STATE_QUOTE_IN_QUOTES
|
||||
if c == '"' then
|
||||
table.insert(field, '"')
|
||||
state = STATE_IN_QUOTES
|
||||
i = i + 1
|
||||
elseif c == ',' then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
state = STATE_DEFAULT
|
||||
i = i + 1
|
||||
elseif c == '\r' or c == '\n' then
|
||||
table.insert(fields, table.concat(field))
|
||||
field = {}
|
||||
table.insert(rows, fields)
|
||||
fields = {}
|
||||
state = STATE_DEFAULT
|
||||
if c == '\r' and i < len and csv:sub(i + 1, i + 1) == '\n' then
|
||||
i = i + 2
|
||||
else
|
||||
i = i + 1
|
||||
end
|
||||
else
|
||||
state = STATE_DEFAULT
|
||||
-- Don't increment i, reprocess character in DEFAULT state
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if #field > 0 or #fields > 0 then
|
||||
table.insert(fields, table.concat(field))
|
||||
table.insert(rows, fields)
|
||||
end
|
||||
|
||||
return rows
|
||||
end
|
||||
|
||||
-- String to number conversion helper
|
||||
function num(str)
|
||||
return tonumber(str) or 0
|
||||
end
|
||||
|
||||
-- Number to string conversion
|
||||
function str(num)
|
||||
return tostring(num)
|
||||
end
|
||||
|
||||
-- Check if string is numeric
|
||||
function is_number(str)
|
||||
return tonumber(str) ~= nil
|
||||
end
|
||||
|
||||
function isArray(t)
|
||||
if type(t) ~= "table" then return false end
|
||||
local max = 0
|
||||
local count = 0
|
||||
for k, _ in pairs(t) do
|
||||
if type(k) ~= "number" or k < 1 or math.floor(k) ~= k then
|
||||
return false
|
||||
end
|
||||
max = math.max(max, k)
|
||||
count = count + 1
|
||||
end
|
||||
return max == count
|
||||
end
|
||||
|
||||
modified = false
|
||||
`
|
||||
if err := L.DoString(helperScript); err != nil {
|
||||
initLuaHelpersLogger.Error("Failed to load Lua helper functions: %v", err)
|
||||
return fmt.Errorf("error loading helper functions: %v", err)
|
||||
@@ -483,9 +311,9 @@ func fetch(L *lua.LState) int {
|
||||
fetchLogger.Debug("Fetching URL: %q", url)
|
||||
|
||||
// Get options from second argument if provided
|
||||
var method string = "GET"
|
||||
var headers map[string]string = make(map[string]string)
|
||||
var body string = ""
|
||||
var method = "GET"
|
||||
var headers = make(map[string]string)
|
||||
var body = ""
|
||||
|
||||
if L.GetTop() > 1 {
|
||||
options := L.ToTable(2)
|
||||
@@ -629,7 +457,8 @@ STRING FUNCTIONS:
|
||||
format(s, ...) - Formats string using Lua string.format
|
||||
trim(s) - Removes leading/trailing whitespace
|
||||
strsplit(inputstr, sep) - Splits string by separator (default: whitespace)
|
||||
parseCSV(csv) - Parses CSV text into rows of fields
|
||||
fromCSV(csv, delimiter, hasHeaders) - Parses CSV text into rows of fields (delimiter defaults to ",", hasHeaders defaults to false)
|
||||
toCSV(rows, delimiter) - Converts table of rows to CSV text format (delimiter defaults to ",")
|
||||
num(str) - Converts string to number (returns 0 if invalid)
|
||||
str(num) - Converts number to string
|
||||
is_number(str) - Returns true if string is numeric
|
||||
|
||||
@@ -2,7 +2,6 @@ package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -13,34 +12,13 @@ import (
|
||||
var fileLogger = logger.Default.WithPrefix("utils/file")
|
||||
|
||||
func CleanPath(path string) string {
|
||||
cleanPathLogger := fileLogger.WithPrefix("CleanPath")
|
||||
cleanPathLogger.Debug("Cleaning path: %q", path)
|
||||
cleanPathLogger.Trace("Original path: %q", path)
|
||||
path = filepath.Clean(path)
|
||||
path = strings.ReplaceAll(path, "\\", "/")
|
||||
cleanPathLogger.Trace("Cleaned path result: %q", path)
|
||||
return path
|
||||
// Use the centralized ResolvePath function
|
||||
return ResolvePath(path)
|
||||
}
|
||||
|
||||
func ToAbs(path string) string {
|
||||
toAbsLogger := fileLogger.WithPrefix("ToAbs")
|
||||
toAbsLogger.Debug("Converting path to absolute: %q", path)
|
||||
toAbsLogger.Trace("Input path: %q", path)
|
||||
if filepath.IsAbs(path) {
|
||||
toAbsLogger.Debug("Path is already absolute, cleaning it.")
|
||||
cleanedPath := CleanPath(path)
|
||||
toAbsLogger.Trace("Already absolute path after cleaning: %q", cleanedPath)
|
||||
return cleanedPath
|
||||
}
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
toAbsLogger.Error("Error getting current working directory: %v", err)
|
||||
return CleanPath(path)
|
||||
}
|
||||
toAbsLogger.Trace("Current working directory: %q", cwd)
|
||||
cleanedPath := CleanPath(filepath.Join(cwd, path))
|
||||
toAbsLogger.Trace("Converted absolute path result: %q", cleanedPath)
|
||||
return cleanedPath
|
||||
// Use the centralized ResolvePath function
|
||||
return ResolvePath(path)
|
||||
}
|
||||
|
||||
// LimitString truncates a string to maxLen and adds "..." if truncated
|
||||
|
||||
@@ -85,25 +85,27 @@ func SplitPattern(pattern string) (string, string) {
|
||||
splitPatternLogger := modifyCommandLogger.WithPrefix("SplitPattern").WithField("pattern", pattern)
|
||||
splitPatternLogger.Debug("Splitting pattern")
|
||||
splitPatternLogger.Trace("Original pattern: %q", pattern)
|
||||
static, pattern := doublestar.SplitPattern(pattern)
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
splitPatternLogger.Error("Error getting current working directory: %v", err)
|
||||
return "", ""
|
||||
}
|
||||
splitPatternLogger.Trace("Current working directory: %q", cwd)
|
||||
// Resolve the pattern first to handle ~ expansion and make it absolute
|
||||
resolvedPattern := ResolvePath(pattern)
|
||||
splitPatternLogger.Trace("Resolved pattern: %q", resolvedPattern)
|
||||
|
||||
static, pattern := doublestar.SplitPattern(resolvedPattern)
|
||||
|
||||
// Ensure static part is properly resolved
|
||||
if static == "" {
|
||||
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory")
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
splitPatternLogger.Error("Error getting current working directory: %v", err)
|
||||
return "", ""
|
||||
}
|
||||
static = cwd
|
||||
splitPatternLogger.Debug("Static part is empty, defaulting to current working directory: %q", static)
|
||||
} else {
|
||||
// Static part should already be resolved by ResolvePath
|
||||
static = strings.ReplaceAll(static, "\\", "/")
|
||||
}
|
||||
if !filepath.IsAbs(static) {
|
||||
splitPatternLogger.Debug("Static part is not absolute, joining with current working directory")
|
||||
static = filepath.Join(cwd, static)
|
||||
static = filepath.Clean(static)
|
||||
splitPatternLogger.Trace("Static path after joining and cleaning: %q", static)
|
||||
}
|
||||
static = strings.ReplaceAll(static, "\\", "/")
|
||||
|
||||
splitPatternLogger.Trace("Final static path: %q, Remaining pattern: %q", static, pattern)
|
||||
return static, pattern
|
||||
}
|
||||
@@ -123,33 +125,23 @@ func AssociateFilesWithCommands(files []string, commands []ModifyCommand) (map[s
|
||||
fileCommands := make(map[string]FileCommandAssociation)
|
||||
|
||||
for _, file := range files {
|
||||
file = strings.ReplaceAll(file, "\\", "/")
|
||||
associateFilesLogger.Debug("Processing file: %q", file)
|
||||
// Use centralized path resolution internally but keep original file as key
|
||||
resolvedFile := ResolvePath(file)
|
||||
associateFilesLogger.Debug("Processing file: %q (resolved: %q)", file, resolvedFile)
|
||||
fileCommands[file] = FileCommandAssociation{
|
||||
File: file,
|
||||
File: resolvedFile,
|
||||
IsolateCommands: []ModifyCommand{},
|
||||
Commands: []ModifyCommand{},
|
||||
}
|
||||
for _, command := range commands {
|
||||
associateFilesLogger.Debug("Checking command %q for file %q", command.Name, file)
|
||||
for _, glob := range command.Files {
|
||||
glob = strings.ReplaceAll(glob, "\\", "/")
|
||||
// SplitPattern now handles tilde expansion and path resolution
|
||||
static, pattern := SplitPattern(glob)
|
||||
associateFilesLogger.Trace("Glob parts for %q → static=%q pattern=%q", glob, static, pattern)
|
||||
|
||||
// Build absolute path for the current file to compare with static
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
associateFilesLogger.Warning("Failed to get CWD when matching %q for file %q: %v", glob, file, err)
|
||||
continue
|
||||
}
|
||||
var absFile string
|
||||
if filepath.IsAbs(file) {
|
||||
absFile = filepath.Clean(file)
|
||||
} else {
|
||||
absFile = filepath.Clean(filepath.Join(cwd, file))
|
||||
}
|
||||
absFile = strings.ReplaceAll(absFile, "\\", "/")
|
||||
// Use resolved file for matching
|
||||
absFile := resolvedFile
|
||||
associateFilesLogger.Trace("Absolute file path resolved for matching: %q", absFile)
|
||||
|
||||
// Only match if the file is under the static root
|
||||
@@ -200,9 +192,14 @@ func AggregateGlobs(commands []ModifyCommand) map[string]struct{} {
|
||||
for _, command := range commands {
|
||||
aggregateGlobsLogger.Debug("Processing command %q for glob patterns", command.Name)
|
||||
for _, glob := range command.Files {
|
||||
resolvedGlob := strings.Replace(glob, "~", os.Getenv("HOME"), 1)
|
||||
resolvedGlob = strings.ReplaceAll(resolvedGlob, "\\", "/")
|
||||
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q)", glob, resolvedGlob)
|
||||
// Split the glob into static and pattern parts, then resolve ONLY the static part
|
||||
static, pattern := SplitPattern(glob)
|
||||
// Reconstruct the glob with resolved static part
|
||||
resolvedGlob := static
|
||||
if pattern != "" {
|
||||
resolvedGlob += "/" + pattern
|
||||
}
|
||||
aggregateGlobsLogger.Trace("Adding glob: %q (resolved to %q) [static=%s, pattern=%s]", glob, resolvedGlob, static, pattern)
|
||||
globs[resolvedGlob] = struct{}{}
|
||||
}
|
||||
}
|
||||
@@ -236,15 +233,16 @@ func ExpandGLobs(patterns map[string]struct{}) ([]string, error) {
|
||||
expandGlobsLogger.Debug("Found %d matches for pattern %q", len(matches), pattern)
|
||||
expandGlobsLogger.Trace("Raw matches for pattern %q: %v", pattern, matches)
|
||||
for _, m := range matches {
|
||||
m = filepath.Join(static, m)
|
||||
info, err := os.Stat(m)
|
||||
// Resolve the full path
|
||||
fullPath := ResolvePath(filepath.Join(static, m))
|
||||
info, err := os.Stat(fullPath)
|
||||
if err != nil {
|
||||
expandGlobsLogger.Warning("Error getting file info for %q: %v", m, err)
|
||||
expandGlobsLogger.Warning("Error getting file info for %q: %v", fullPath, err)
|
||||
continue
|
||||
}
|
||||
if !info.IsDir() && !filesMap[m] {
|
||||
expandGlobsLogger.Trace("Adding unique file to list: %q", m)
|
||||
filesMap[m], files = true, append(files, m)
|
||||
if !info.IsDir() && !filesMap[fullPath] {
|
||||
expandGlobsLogger.Trace("Adding unique file to list: %q", fullPath)
|
||||
filesMap[fullPath], files = true, append(files, fullPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -317,9 +315,8 @@ func LoadCommandsFromCookFiles(pattern string) ([]ModifyCommand, error) {
|
||||
loadCookFilesLogger.Trace("Cook files found: %v", cookFiles)
|
||||
|
||||
for _, cookFile := range cookFiles {
|
||||
cookFile = filepath.Join(static, cookFile)
|
||||
cookFile = filepath.Clean(cookFile)
|
||||
cookFile = strings.ReplaceAll(cookFile, "\\", "/")
|
||||
// Use centralized path resolution
|
||||
cookFile = ResolvePath(filepath.Join(static, cookFile))
|
||||
loadCookFilesLogger.Debug("Loading commands from individual cook file: %q", cookFile)
|
||||
|
||||
cookFileData, err := os.ReadFile(cookFile)
|
||||
@@ -406,9 +403,8 @@ func LoadCommandsFromTomlFiles(pattern string) ([]ModifyCommand, error) {
|
||||
loadTomlFilesLogger.Trace("TOML files found: %v", tomlFiles)
|
||||
|
||||
for _, tomlFile := range tomlFiles {
|
||||
tomlFile = filepath.Join(static, tomlFile)
|
||||
tomlFile = filepath.Clean(tomlFile)
|
||||
tomlFile = strings.ReplaceAll(tomlFile, "\\", "/")
|
||||
// Use centralized path resolution
|
||||
tomlFile = ResolvePath(filepath.Join(static, tomlFile))
|
||||
loadTomlFilesLogger.Debug("Loading commands from individual TOML file: %q", tomlFile)
|
||||
|
||||
tomlFileData, err := os.ReadFile(tomlFile)
|
||||
@@ -504,9 +500,8 @@ func ConvertYAMLToTOML(yamlPattern string) error {
|
||||
skippedCount := 0
|
||||
|
||||
for _, yamlFile := range yamlFiles {
|
||||
yamlFilePath := filepath.Join(static, yamlFile)
|
||||
yamlFilePath = filepath.Clean(yamlFilePath)
|
||||
yamlFilePath = strings.ReplaceAll(yamlFilePath, "\\", "/")
|
||||
// Use centralized path resolution
|
||||
yamlFilePath := ResolvePath(filepath.Join(static, yamlFile))
|
||||
|
||||
// Generate corresponding TOML file path
|
||||
tomlFilePath := strings.TrimSuffix(yamlFilePath, filepath.Ext(yamlFilePath)) + ".toml"
|
||||
|
||||
@@ -251,11 +251,19 @@ func TestAggregateGlobs(t *testing.T) {
|
||||
|
||||
globs := AggregateGlobs(commands)
|
||||
|
||||
// Now we properly resolve only the static part of globs
|
||||
// *.xml has no static part (current dir), so it becomes resolved_dir/*.xml
|
||||
// *.txt has no static part (current dir), so it becomes resolved_dir/*.txt
|
||||
// *.json has no static part (current dir), so it becomes resolved_dir/*.json
|
||||
// subdir/*.xml has static "subdir", so it becomes resolved_dir/subdir/*.xml
|
||||
cwd, _ := os.Getwd()
|
||||
resolvedCwd := ResolvePath(cwd)
|
||||
|
||||
expected := map[string]struct{}{
|
||||
"*.xml": {},
|
||||
"*.txt": {},
|
||||
"*.json": {},
|
||||
"subdir/*.xml": {},
|
||||
resolvedCwd + "/*.xml": {},
|
||||
resolvedCwd + "/*.txt": {},
|
||||
resolvedCwd + "/*.json": {},
|
||||
resolvedCwd + "/subdir/*.xml": {},
|
||||
}
|
||||
|
||||
if len(globs) != len(expected) {
|
||||
|
||||
104
utils/path.go
Normal file
104
utils/path.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
logger "git.site.quack-lab.dev/dave/cylogger"
|
||||
)
|
||||
|
||||
// pathLogger is a scoped logger for the utils/path package.
|
||||
var pathLogger = logger.Default.WithPrefix("utils/path")
|
||||
|
||||
// ResolvePath resolves a file path by:
|
||||
// 1. Expanding ~ to the user's home directory
|
||||
// 2. Making the path absolute if it's relative
|
||||
// 3. Normalizing path separators to forward slashes
|
||||
// 4. Cleaning the path
|
||||
func ResolvePath(path string) string {
|
||||
resolvePathLogger := pathLogger.WithPrefix("ResolvePath").WithField("inputPath", path)
|
||||
resolvePathLogger.Debug("Resolving path")
|
||||
|
||||
if path == "" {
|
||||
resolvePathLogger.Warning("Empty path provided")
|
||||
return ""
|
||||
}
|
||||
|
||||
// Step 1: Expand ~ to home directory
|
||||
originalPath := path
|
||||
if strings.HasPrefix(path, "~") {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" {
|
||||
// Fallback for Windows
|
||||
if runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
}
|
||||
if home != "" {
|
||||
if path == "~" {
|
||||
path = home
|
||||
} else if strings.HasPrefix(path, "~/") {
|
||||
path = filepath.Join(home, path[2:])
|
||||
} else {
|
||||
// Handle cases like ~username
|
||||
// For now, just replace ~ with home directory
|
||||
path = strings.Replace(path, "~", home, 1)
|
||||
}
|
||||
resolvePathLogger.Debug("Expanded tilde to home directory: home=%s, result=%s", home, path)
|
||||
} else {
|
||||
resolvePathLogger.Warning("Could not determine home directory for tilde expansion")
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Make path absolute if it's not already
|
||||
if !filepath.IsAbs(path) {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
resolvePathLogger.Error("Failed to get current working directory: %v", err)
|
||||
return path // Return as-is if we can't get CWD
|
||||
}
|
||||
path = filepath.Join(cwd, path)
|
||||
resolvePathLogger.Debug("Made relative path absolute: cwd=%s, result=%s", cwd, path)
|
||||
}
|
||||
|
||||
// Step 3: Clean the path
|
||||
path = filepath.Clean(path)
|
||||
resolvePathLogger.Debug("Cleaned path: result=%s", path)
|
||||
|
||||
// Step 4: Normalize path separators to forward slashes for consistency
|
||||
path = strings.ReplaceAll(path, "\\", "/")
|
||||
|
||||
resolvePathLogger.Debug("Final resolved path: original=%s, final=%s", originalPath, path)
|
||||
return path
|
||||
}
|
||||
|
||||
// ResolvePathForLogging is the same as ResolvePath but includes more detailed logging
|
||||
// for debugging purposes
|
||||
func ResolvePathForLogging(path string) string {
|
||||
return ResolvePath(path)
|
||||
}
|
||||
|
||||
// IsAbsolutePath checks if a path is absolute (including tilde expansion)
|
||||
func IsAbsolutePath(path string) bool {
|
||||
// Check for tilde expansion first
|
||||
if strings.HasPrefix(path, "~") {
|
||||
return true // Tilde paths become absolute after expansion
|
||||
}
|
||||
return filepath.IsAbs(path)
|
||||
}
|
||||
|
||||
// GetRelativePath returns the relative path from base to target
|
||||
func GetRelativePath(base, target string) (string, error) {
|
||||
resolvedBase := ResolvePath(base)
|
||||
resolvedTarget := ResolvePath(target)
|
||||
|
||||
relPath, err := filepath.Rel(resolvedBase, resolvedTarget)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Normalize to forward slashes
|
||||
return strings.ReplaceAll(relPath, "\\", "/"), nil
|
||||
}
|
||||
432
utils/path_test.go
Normal file
432
utils/path_test.go
Normal file
@@ -0,0 +1,432 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestResolvePath(t *testing.T) {
|
||||
// Save original working directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
// Create a temporary directory for testing
|
||||
tmpDir, err := os.MkdirTemp("", "path_test")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string
|
||||
setup func() // Optional setup function
|
||||
}{
|
||||
{
|
||||
name: "Empty path",
|
||||
input: "",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "Already absolute path",
|
||||
input: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/absolute/path/file.txt"
|
||||
}
|
||||
return "/absolute/path/file.txt"
|
||||
}(),
|
||||
expected: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/absolute/path/file.txt"
|
||||
}
|
||||
return "/absolute/path/file.txt"
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Relative path",
|
||||
input: "relative/file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("relative/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Tilde expansion - home only",
|
||||
input: "~",
|
||||
expected: func() string {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
return strings.ReplaceAll(filepath.Clean(home), "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Tilde expansion - with subpath",
|
||||
input: "~/Documents/file.txt",
|
||||
expected: func() string {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
expected := filepath.Join(home, "Documents", "file.txt")
|
||||
return strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Path normalization - double slashes",
|
||||
input: "path//to//file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("path/to/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Path normalization - . and ..",
|
||||
input: "path/./to/../file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("path/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Windows backslash normalization",
|
||||
input: "path\\to\\file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("path/to/file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Mixed separators with tilde",
|
||||
input: "~/Documents\\file.txt",
|
||||
expected: func() string {
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
expected := filepath.Join(home, "Documents", "file.txt")
|
||||
return strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Relative path from current directory",
|
||||
input: "./file.txt",
|
||||
expected: func() string {
|
||||
abs, _ := filepath.Abs("file.txt")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.setup != nil {
|
||||
tt.setup()
|
||||
}
|
||||
|
||||
result := ResolvePath(tt.input)
|
||||
assert.Equal(t, tt.expected, result, "ResolvePath(%q) = %q, want %q", tt.input, result, tt.expected)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePathWithWorkingDirectoryChange(t *testing.T) {
|
||||
// Save original working directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
// Create temporary directories
|
||||
tmpDir, err := os.MkdirTemp("", "path_test")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
subDir := filepath.Join(tmpDir, "subdir")
|
||||
err = os.MkdirAll(subDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Change to subdirectory
|
||||
err = os.Chdir(subDir)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Test relative path resolution from new working directory
|
||||
result := ResolvePath("../test.txt")
|
||||
expected := filepath.Join(tmpDir, "test.txt")
|
||||
expected = strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
|
||||
assert.Equal(t, expected, result)
|
||||
}
|
||||
|
||||
func TestResolvePathComplexTilde(t *testing.T) {
|
||||
// Test complex tilde patterns
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
|
||||
if home == "" {
|
||||
t.Skip("Cannot determine home directory for tilde expansion tests")
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
input: "~",
|
||||
expected: strings.ReplaceAll(filepath.Clean(home), "\\", "/"),
|
||||
},
|
||||
{
|
||||
input: "~/",
|
||||
expected: strings.ReplaceAll(filepath.Clean(home), "\\", "/"),
|
||||
},
|
||||
{
|
||||
input: "~~",
|
||||
expected: func() string {
|
||||
// ~~ should be treated as ~ followed by ~ (tilde expansion)
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
if home != "" {
|
||||
// First ~ gets expanded, second ~ remains
|
||||
return strings.ReplaceAll(filepath.Clean(home+"~"), "\\", "/")
|
||||
}
|
||||
abs, _ := filepath.Abs("~~")
|
||||
return strings.ReplaceAll(abs, "\\", "/")
|
||||
}(),
|
||||
},
|
||||
{
|
||||
input: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/not/tilde/path"
|
||||
}
|
||||
return "/not/tilde/path"
|
||||
}(),
|
||||
expected: func() string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return "C:/not/tilde/path"
|
||||
}
|
||||
return "/not/tilde/path"
|
||||
}(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run("Complex tilde: "+tt.input, func(t *testing.T) {
|
||||
result := ResolvePath(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsAbsolutePath(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "Empty path",
|
||||
input: "",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "Absolute Unix path",
|
||||
input: "/absolute/path",
|
||||
expected: func() bool {
|
||||
if runtime.GOOS == "windows" {
|
||||
// On Windows, paths starting with / are not considered absolute
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Relative path",
|
||||
input: "relative/path",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "Tilde expansion (becomes absolute)",
|
||||
input: "~/path",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Windows absolute path",
|
||||
input: "C:\\Windows\\System32",
|
||||
expected: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := IsAbsolutePath(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetRelativePath(t *testing.T) {
|
||||
// Create temporary directories for testing
|
||||
tmpDir, err := os.MkdirTemp("", "relative_path_test")
|
||||
assert.NoError(t, err)
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
baseDir := filepath.Join(tmpDir, "base")
|
||||
targetDir := filepath.Join(tmpDir, "target")
|
||||
subDir := filepath.Join(targetDir, "subdir")
|
||||
|
||||
err = os.MkdirAll(baseDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
err = os.MkdirAll(subDir, 0755)
|
||||
assert.NoError(t, err)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
base string
|
||||
target string
|
||||
expected string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Target is subdirectory of base",
|
||||
base: baseDir,
|
||||
target: filepath.Join(baseDir, "subdir"),
|
||||
expected: "subdir",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Target is parent of base",
|
||||
base: filepath.Join(baseDir, "subdir"),
|
||||
target: baseDir,
|
||||
expected: "..",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Target is sibling directory",
|
||||
base: baseDir,
|
||||
target: targetDir,
|
||||
expected: "../target",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Same directory",
|
||||
base: baseDir,
|
||||
target: baseDir,
|
||||
expected: ".",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "With tilde expansion",
|
||||
base: baseDir,
|
||||
target: filepath.Join(baseDir, "file.txt"),
|
||||
expected: "file.txt",
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := GetRelativePath(tt.base, tt.target)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePathRegression(t *testing.T) {
|
||||
// This test specifically addresses the original bug:
|
||||
// "~ is NOT BEING FUCKING RESOLVED"
|
||||
|
||||
home := os.Getenv("HOME")
|
||||
if home == "" && runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
|
||||
if home == "" {
|
||||
t.Skip("Cannot determine home directory for regression test")
|
||||
}
|
||||
|
||||
// Test the exact pattern from the bug report
|
||||
testPath := "~/Seafile/activitywatch/sync.yml"
|
||||
result := ResolvePath(testPath)
|
||||
expected := filepath.Join(home, "Seafile", "activitywatch", "sync.yml")
|
||||
expected = strings.ReplaceAll(filepath.Clean(expected), "\\", "/")
|
||||
|
||||
assert.Equal(t, expected, result, "Tilde expansion bug not fixed!")
|
||||
assert.NotContains(t, result, "~", "Tilde still present in resolved path!")
|
||||
// Convert both to forward slashes for comparison
|
||||
homeForwardSlash := strings.ReplaceAll(home, "\\", "/")
|
||||
assert.Contains(t, result, homeForwardSlash, "Home directory not found in resolved path!")
|
||||
}
|
||||
|
||||
func TestResolvePathEdgeCases(t *testing.T) {
|
||||
// Save original working directory
|
||||
origDir, _ := os.Getwd()
|
||||
defer os.Chdir(origDir)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
setup func()
|
||||
shouldPanic bool
|
||||
}{
|
||||
{
|
||||
name: "Just dot",
|
||||
input: ".",
|
||||
},
|
||||
{
|
||||
name: "Just double dot",
|
||||
input: "..",
|
||||
},
|
||||
{
|
||||
name: "Triple dot",
|
||||
input: "...",
|
||||
},
|
||||
{
|
||||
name: "Multiple leading dots",
|
||||
input: "./.././../file.txt",
|
||||
},
|
||||
{
|
||||
name: "Path with spaces",
|
||||
input: "path with spaces/file.txt",
|
||||
},
|
||||
{
|
||||
name: "Very long relative path",
|
||||
input: strings.Repeat("../", 10) + "file.txt",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.setup != nil {
|
||||
tt.setup()
|
||||
}
|
||||
|
||||
if tt.shouldPanic {
|
||||
assert.Panics(t, func() {
|
||||
ResolvePath(tt.input)
|
||||
})
|
||||
} else {
|
||||
// Should not panic
|
||||
assert.NotPanics(t, func() {
|
||||
ResolvePath(tt.input)
|
||||
})
|
||||
// Result should be a valid absolute path
|
||||
result := ResolvePath(tt.input)
|
||||
if tt.input != "" {
|
||||
assert.True(t, filepath.IsAbs(result) || result == "", "Result should be absolute or empty")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user