Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
142 changes: 76 additions & 66 deletions build/tested.lua
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,74 @@ local function fisher_yates_shuffle(t)
end
end

local function should_skip_test(test, run_only, options)
if run_only and test.kind ~= "only" then
return "SKIP", "Only running 'tested.only' tests"
elseif test.kind == "skip" then
return "SKIP", "Test marked with 'tested.skip'"
elseif options and options.filter ~= nil and not string.find(test.name, options.filter) then
return "CONDITIONAL_SKIP", "Test name does not match filter pattern '" .. options.filter .. "'"
elseif test.options.run_when ~= nil and test.options.run_when == false then
return "CONDITIONAL_SKIP", "Condition in `tested.conditional_skip` returned false. Skipping test."
end
return nil, nil
end

local function set_result(ok, err, total_assertions, assert_failed_count, test_output)
if ok == false then
test_output.result = "EXCEPTION"
test_output.message = err .. "\n" .. debug.traceback()

elseif total_assertions == 0 then
test_output.result = "UNKNOWN"
test_output.message = "No assertions run during test"

elseif assert_failed_count == 0 then
test_output.result = "PASS"
test_output.message = "All assertions have passed"

else
test_output.result = "FAIL"
test_output.message = assert_failed_count .. " assertions have failed"
end
end

local function adjust_for_expected(expected, test_output)
if expected ~= nil then
if test_output.result == expected then
if expected == "EXCEPTION" then
test_output.result = "EXPECTED_EXCEPTION"
elseif expected == "UNKNOWN" then
test_output.result = "EXPECTED_UNKNOWN"
elseif expected == "FAIL" then
test_output.result = "EXPECTED_FAIL"
end
else
test_output.message = "Expected test result to be " .. expected .. ", but came back as " .. test_output.result .. "\n" .. test_output.message
test_output.result = "UNEXPECTED"
end
end
end

local function add_up_test_results(test_output, test_counts)
if test_output.result == "PASS" then
test_counts.passed = test_counts.passed + 1

elseif test_output.result == "FAIL" then
test_counts.failed = test_counts.failed + 1

elseif test_output.result == "EXPECTED_FAIL" or test_output.result == "EXPECTED_EXCEPTION" or test_output.result == "EXPECTED_UNKNOWN" then
test_counts.expected = test_counts.expected + 1

elseif test_output.result == "EXCEPTION" or test_output.result == "UNKNOWN" or test_output.result == "UNEXPECTED" then
test_counts.invalid = test_counts.invalid + 1

elseif test_output.result == "SKIP" or test_output.result == "CONDITIONAL_SKIP" then
test_counts.skipped = test_counts.skipped + 1

end
end


function tested:run(filename, options)
if options and options.random then
Expand All @@ -154,29 +222,11 @@ function tested:run(filename, options)

test_results.tests[i] = { assertion_results = {}, name = test.name }

if self.run_only_tests and test.kind ~= "only" then
test_results.tests[i].result = "SKIP"
test_results.tests[i].message = "Only running 'tested.only' tests"
test_results.tests[i].time = 0
test_results.counts.skipped = test_results.counts.skipped + 1

elseif test.kind == "skip" then
test_results.tests[i].result = "SKIP"
test_results.tests[i].message = "Test marked with 'tested.skip'"
local skip_result, skip_message = should_skip_test(test, self.run_only_tests, options)
if skip_result then
test_results.tests[i].result = skip_result
test_results.tests[i].message = skip_message
test_results.tests[i].time = 0
test_results.counts.skipped = test_results.counts.skipped + 1

elseif options and options.filter ~= nil and not string.find(test.name, options.filter) then
test_results.tests[i].result = "CONDITIONAL_SKIP"
test_results.tests[i].message = "Test name does not match filter pattern '" .. options.filter .. "'"
test_results.tests[i].time = 0
test_results.counts.skipped = test_results.counts.skipped + 1

elseif test.options.run_when ~= nil and test.options.run_when == false then
test_results.tests[i].result = "CONDITIONAL_SKIP"
test_results.tests[i].message = "Condition in `tested.conditional_skip` returned false. Skipping test."
test_results.tests[i].time = 0
test_results.counts.skipped = test_results.counts.skipped + 1

else
local assert_failed_count = 0
Expand Down Expand Up @@ -215,54 +265,14 @@ function tested:run(filename, options)
test_results.total_time = test_results.total_time + test_results.tests[i].time
self.assert = original_assert

if ok == false then
test_results.tests[i].result = "EXCEPTION"
test_results.tests[i].message = err .. "\n" .. debug.traceback()

elseif total_assertions == 0 then
test_results.tests[i].result = "UNKNOWN"
test_results.tests[i].message = "No assertions run during test"

elseif assert_failed_count == 0 then
test_results.tests[i].result = "PASS"
test_results.tests[i].message = "All assertions have passed"

else
test_results.tests[i].result = "FAIL"
test_results.tests[i].message = assert_failed_count .. " assertions have failed"
end


if test.options.expected ~= nil then
if test_results.tests[i].result == test.options.expected then
if test.options.expected == "EXCEPTION" then
test_results.tests[i].result = "EXPECTED_EXCEPTION"
elseif test.options.expected == "UNKNOWN" then
test_results.tests[i].result = "EXPECTED_UNKNOWN"
elseif test.options.expected == "FAIL" then
test_results.tests[i].result = "EXPECTED_FAIL"
end
else
test_results.tests[i].message = "Expected test result to be " .. test.options.expected .. ", but came back as " .. test_results.tests[i].result .. "\n" .. test_results.tests[i].message
test_results.tests[i].result = "UNEXPECTED"
end
end
set_result(ok, err, total_assertions, assert_failed_count, test_results.tests[i])


if test_results.tests[i].result == "PASS" then
test_results.counts.passed = test_results.counts.passed + 1

elseif test_results.tests[i].result == "FAIL" then
test_results.counts.failed = test_results.counts.failed + 1

elseif test_results.tests[i].result == "EXPECTED_FAIL" or test_results.tests[i].result == "EXPECTED_EXCEPTION" or test_results.tests[i].result == "EXPECTED_UNKNOWN" then
test_results.counts.expected = test_results.counts.expected + 1
adjust_for_expected(test.options.expected, test_results.tests[i])
end

elseif test_results.tests[i].result == "EXCEPTION" or test_results.tests[i].result == "UNKNOWN" or test_results.tests[i].result == "UNEXPECTED" then
test_results.counts.invalid = test_results.counts.invalid + 1

end
end
add_up_test_results(test_results.tests[i], test_results.counts)
end
if test_results.counts.failed == 0 and test_results.counts.invalid == 0 then
test_results.fully_tested = true
Expand Down
174 changes: 174 additions & 0 deletions build/tested/cli.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
local argparse = require("argparse")
local lfs = require("lfs")

local logging = require("tested.libs.logging")
local logger = logging.get_logger("tested.cli")



local cli = { CLIOptions = {} }










































local cli_to_display = {
["skip"] = "SKIP",
["pass"] = "PASS",
["fail"] = "FAIL",
["exception"] = "EXCEPTION",
["unknown"] = "UNKNOWN",
["unexpected"] = "UNEXPECTED",

}

function cli.parse_args(version)
local parser = argparse("tested", "A Lua/Teal Unit Testing Framework", "For more info see https://fouriertransformer.github.io/tested")
parser:flag("-c --coverage"):
description("Enable code coverage - will generate luacov.stats.out (default: not-set)"):
default(false)
parser:flag("-r --random"):
description("Randomize the order of the tests (default: not-set)"):
default(false)
parser:option("-F --filter"):
description("Only run tests whose name matches this Lua pattern (default: not-set)")
parser:option("-s --show"):
description("What test results to display (default: '-s fail -s exception -s unknown')"):
choices({ "all", "valid", "invalid", "skip", "pass", "fail", "exception", "unknown", "expected", "unexpected" }):
count("*")
parser:mutex(
parser:option("-f --display-format"):
description("What format to output the results in (default: 'terminal')"):
choices({ "terminal", "plain", "tap" }):
default("terminal"),
parser:option("-z --custom-formatter"):
description("File that loads a custom formatter to use for output"))

parser:option("-n --threads"):
description("Set the number of threads to run the tests with (default: 4). Set to 0 to disable."):
default(4):
convert(tonumber)
parser:option("-x --format-handler"):
description("File that loads custom formats that are Lua-compatible"):
count("*")
parser:option("-d --debug"):
description("Set the log level - mostly for debugging issues with tested (default: 'WARNING')"):
choices({ "DEBUG", "INFO", "WARNING" }):
default("WARNING")
parser:flag("--version"):
description("Show version information"):
action(function() print(version); os.exit(0) end)
parser:argument("paths", "Path(s) to directories or files with tests to run (default: 'tests')"):
args("*")

logger:info("Parsing Arguments...")

local args = parser:parse()
return args
end

function cli.set_defaults(args)
logger:info("Setting Defaults...")
if #args.show == 0 then
args.show = { "fail", "exception", "unknown", "unexpected" }
args.specified_show = false
else
args.specified_show = true
end
if #args.paths == 0 then args.paths = { "tests" } end
args.test_files = {}
args.test_directories = {}

local show_all = false
for _, display_option in ipairs(args.show) do if display_option == "all" then show_all = true; break end end
if show_all then args.show = { "skip", "pass", "fail", "exception", "unknown", "expected", "unexpected" } end
end

function cli.validate_args(args)
logger:info("Validating args...")
for _, path in ipairs(args.paths) do
local info, err = lfs.attributes(path)
if err then error("The file or directory '" .. path .. "' does not appear to exist. Unable to run tests") end
if not (info.mode == "directory" or info.mode == "file") then error("tested requires the paths passed in to be a directory or file", 0) end
if info.mode == "directory" then table.insert(args.test_directories, path) end
if info.mode == "file" then table.insert(args.test_files, path) end
end
if args.filter then
local ok, err = pcall(string.find, "", args.filter)
if not ok then
error("Invalid --filter pattern '" .. args.filter .. "': " .. tostring(err), 0)
end
end
end

function cli.display_types(options)
local to_display = {}
for _, cli_option in ipairs(options) do
if cli_to_display[cli_option] then
to_display[cli_to_display[cli_option]] = true
if cli_option == "skip" then
to_display["SKIP"] = true
to_display["CONDITIONAL_SKIP"] = true
end
else
if cli_option == "invalid" then
to_display["EXCEPTION"] = true
to_display["UNKNOWN"] = true
to_display["TIMEOUT"] = true
to_display["UNEXPECTED"] = true
elseif cli_option == "valid" then
to_display["PASS"] = true
to_display["SKIP"] = true
to_display["CONDITIONAL_SKIP"] = true
to_display["FAIL"] = true
to_display["EXPECTED_FAIL"] = true
to_display["EXPECTED_EXCEPTION"] = true
to_display["EXPECTED_UNKNOWN"] = true
elseif cli_option == "expected" then
to_display["EXPECTED_FAIL"] = true
to_display["EXPECTED_EXCEPTION"] = true
to_display["EXPECTED_UNKNOWN"] = true
end
end
end
return to_display
end

return cli
Loading
Loading