diff --git a/CHANGELOG.md b/CHANGELOG.md index 05e3cb721..010e44b7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - #992: Implement automatic history purge logic - #973: Enables CORS and JWT configuration for WebApplications in module.xml - #1110: Add `iriscli` and `ipm` container utility scripts that are auto-installed to `~/.local/bin/` and `~/bin/` so they work both inside and outside of containers (Unix/Linux only) +- #971: Adds structured test output formats (JSON, YAML, Toon). Use `-f ` for a one-shot override or `config set TestReportFormat ` for a persistent default. Without either, legacy output is shown. Also adds `-output-file` for writing results to a file (including JUnit XML via `.xml` extension) and improves `-quiet` to suppress build noise. ### Fixed - #1001: The `unmap` and `enable` commands will now only activate CPF merge once after all namespaces have been configured instead after every namespace diff --git a/src/cls/IPM/Lifecycle/Base.cls b/src/cls/IPM/Lifecycle/Base.cls index 74bade7c7..5a3a6a528 100644 --- a/src/cls/IPM/Lifecycle/Base.cls +++ b/src/cls/IPM/Lifecycle/Base.cls @@ -1311,6 +1311,14 @@ Method %Verify(ByRef pParams) As %Status new $namespace set tInitNS = $select($namespace="%SYS": "USER", 1: $namespace) set tVerbose = $get(pParams("Verbose")) + set explicitQuiet = ($data(pParams("Verbose")) && (pParams("Verbose") = 0)) + + // Suppress module install/load noise in quiet mode; ended before resource processors + // run so each processor can manage its own output (Test.cls still shows summary + failures). + // Null device is process-scoped, so suppression survives the namespace switch below. + if explicitQuiet { + set suppressor = ##class(%IPM.Utils.OutputSuppressor).%New() + } if '$get(pParams("Verify","InCurrentNamespace"),0) { set tMustCreate = 1 @@ -1394,6 +1402,9 @@ Method %Verify(ByRef pParams) As %Status // Load dependencies scoped to the "Verify" phase that are not yet installed. do ##class(%IPM.Utils.Module).LoadDependencies(..Module, ..PhaseList, .pParams) + // End suppression before resource processors run — each handles its own output. + set suppressor = "" + set orderedResourceList = ..Module.GetOrderedResourceList() set tKey = "" for { diff --git a/src/cls/IPM/Main.cls b/src/cls/IPM/Main.cls index e2eb416c2..3871ecdbf 100644 --- a/src/cls/IPM/Main.cls +++ b/src/cls/IPM/Main.cls @@ -73,8 +73,10 @@ Can also specify desired version to update to. - - + + + + diff --git a/src/cls/IPM/Repo/UniversalSettings.cls b/src/cls/IPM/Repo/UniversalSettings.cls index e1ff1f3ed..e5f79a85a 100644 --- a/src/cls/IPM/Repo/UniversalSettings.cls +++ b/src/cls/IPM/Repo/UniversalSettings.cls @@ -1,6 +1,6 @@ /// IPM settings are placed in ^IPM.settings global in %SYS namespace /// Use this class to set or get settings -/// +/// /// Available settings /// default_registry (string) - default registry url /// analytics_tracking_id @@ -45,7 +45,10 @@ Parameter SemVerPostRelease = "SemVerPostRelease"; /// to retain IPM history records before they are eligible for cleanup. Parameter HistoryRetain = "history_retain"; -Parameter CONFIGURABLE = "trackingId,analytics,ColorScheme,TerminalPrompt,PublishTimeout,PipCaller,UseStandalonePip,SemVerPostRelease,DefaultLogEntryLimit,HistoryRetain"; +/// Specifies the serialization format (JSON, TOON, YAML) for unit and integration test results in the shell. +Parameter TestReportFormat = "TestReportFormat"; + +Parameter CONFIGURABLE = "trackingId,analytics,ColorScheme,TerminalPrompt,PublishTimeout,PipCaller,UseStandalonePip,SemVerPostRelease,DefaultLogEntryLimit,HistoryRetain,TestReportFormat"; /// Returns configArray, that includes all configurable settings ClassMethod GetAll(Output configArray) As %Status @@ -86,11 +89,13 @@ ClassMethod ResetToDefault(key As %String) As %Status write "Config key = """_key_""" not found",! quit } - set sc = ..SetValue($parameter(..%ClassName(1),key), ..GetDefaultValue($parameter(..%ClassName(1),key))) + // TestReportFormat has no factory default; empty means "use legacy output" + set defaultValue = $select(key = "TestReportFormat": "", 1: ..GetDefaultValue($parameter(..%ClassName(1),key))) + set sc = ..SetValue($parameter(..%ClassName(1),key), defaultValue) if $$$ISOK(sc) { - write "Value for """_key_""" succesfully reset to default",! + write !,"Value for """_key_""" succesfully reset to default",! } else { - write "Error reseting value for """_key_"""",! + write !,"Error reseting value for """_key_"""",! } return sc } @@ -104,11 +109,16 @@ ClassMethod UpdateOne( write "Config key = """_key_""" not found",! quit } - set sc = ..SetValue($parameter(..%ClassName(1),key), value) + if key = "TestReportFormat" { + // Validate format value before saving it + set sc = ..SetTestReportFormat(value) + } else { + set sc = ..SetValue($parameter(..%ClassName(1),key), value) + } if $$$ISOK(sc) { - write "Key """_key_""" succesfully updated",! + write !,"Key """_key_""" succesfully updated",! } else { - write "Error updating """_key_"""",! + write !,$system.Status.GetErrorText(sc),! } return sc } @@ -190,4 +200,19 @@ ClassMethod GetHistoryRetain() As %Integer return ..GetValue(..#HistoryRetain) } +ClassMethod SetTestReportFormat( + val As %String, + overwrite As %Boolean = 1) As %Status +{ + if val '= "" && ('$listfind($listfromstring(##class(%IPM.Test.Abstract).#VALIDFORMATS), $zconvert(val, "l"))) { + return $$$ERROR($$$GeneralError, "Unknown format '"_val_"'. Valid formats: "_##class(%IPM.Test.Abstract).#VALIDFORMATS) + } + return ..SetValue(..#TestReportFormat, val, overwrite) +} + +ClassMethod GetTestReportFormat() As %String +{ + return ..GetValue(..#TestReportFormat) +} + } diff --git a/src/cls/IPM/ResourceProcessor/PythonWheel.cls b/src/cls/IPM/ResourceProcessor/PythonWheel.cls index a1346cc9b..0cb67a5a4 100644 --- a/src/cls/IPM/ResourceProcessor/PythonWheel.cls +++ b/src/cls/IPM/ResourceProcessor/PythonWheel.cls @@ -52,7 +52,14 @@ Method OnPhase( if verbose { write !,"Running command: ",command } - $$$ThrowOnError(##class(%IPM.Utils.Module).RunCommand(, command)) + set explicitQuiet = ($data(pParams("Verbose")) && (pParams("Verbose") = 0)) + if explicitQuiet { + // Redirect pip stdout/stderr to a sink stream so subprocess output doesn't bypass device suppression + set pipOutput = ##class(%Stream.TmpCharacter).%New() + $$$ThrowOnError(##class(%IPM.Utils.Module).RunCommand(, command, .pipOutput, .pipOutput)) + } else { + $$$ThrowOnError(##class(%IPM.Utils.Module).RunCommand(, command)) + } } catch ex { set pResourceHandled = 0 // Special case: we want the installation of IPM to continue, even if the wheel package fails to install diff --git a/src/cls/IPM/ResourceProcessor/Test.cls b/src/cls/IPM/ResourceProcessor/Test.cls index 66af3b624..a146f4b5b 100644 --- a/src/cls/IPM/ResourceProcessor/Test.cls +++ b/src/cls/IPM/ResourceProcessor/Test.cls @@ -111,6 +111,10 @@ Method OnPhase( // In test/verify phase, run unit tests. set tVerbose = $get(pParams("Verbose"), 0) set tFlags = $select(tVerbose:"/display=all",1:"/display=none") + set explicitQuiet = ($data(pParams("Verbose")) && (pParams("Verbose") = 0)) + if explicitQuiet { + set suppressor = ##class(%IPM.Utils.OutputSuppressor).%New() + } // Ensure unit tests and related classes are loaded. set tUnitTestDir = ##class(%File).NormalizeDirectory(..ResourceReference.Module.Root_..ResourceReference.Name) @@ -199,21 +203,54 @@ Method OnPhase( zkill ^UnitTestRoot $$$ThrowOnError(tSC) - if $data(pParams("UnitTest","JUnitOutput"),tJUnitFile) { - set tPostfix = "-"_$zconvert(pPhase,"L")_"-" - if (..Package '= "") { - set tPostfix = tPostfix_$replace(..Package,".","-")_"-PKG" - } elseif (..Class '= "") { - set tPostfix = tPostfix_$replace(..Class,".","-")_"-CLS" + set testIndex = $get(^||%UnitTest.Manager.AllResults($get(^||%UnitTest.Manager.AllResultsCount))) + if testIndex = "" { + set testIndex = $order(^UnitTest.Result(""),-1) + } + if $data(pParams("outputfile"), outputFile) { + set fileExtension = $zconvert($piece(outputFile,".",*),"L") + set outputClass = $case(fileExtension, + "json":"%IPM.Test.JsonOutput", + "yaml":"%IPM.Test.YamlOutput", + "toon":"%IPM.Test.ToonOutput", + "xml":"%IPM.Test.JUnitOutput", + :"") + if outputClass = "" { + $$$ThrowOnError($$$ERROR($$$GeneralError,"Unsupported output-file extension '."_fileExtension_"'. Use .json, .yaml, .toon, or .xml.")) + } + set outputDir = ##class(%File).GetDirectory(outputFile) + if outputDir '= "" { + $$$ThrowOnError(##class(%File).CreateDirectoryChain(outputDir)) } - set tJUnitFile = $piece(tJUnitFile,".",1,*-1)_tPostfix_".xml" - set tSC = ##class(%IPM.Test.JUnitOutput).ToFile(tJUnitFile) + set tSC = $classmethod(outputClass,"ToFile",outputFile) $$$ThrowOnError(tSC) } + set suppressor = "" - // By default, detect and report unit test failures as an error from this phase + set outputFormat = $get(pParams("outputformat")) + if outputFormat = "" { + set outputFormat = ##class(%IPM.Repo.UniversalSettings).GetTestReportFormat() + } + + write !!,"Test Results:" + if outputFormat '= "" { + set outputClass = "%IPM.Test."_$zconvert(outputFormat,"w")_"Output" + if '$$$defClassDefined(outputClass) { + $$$ThrowOnError($$$ERROR($$$GeneralError,"Unknown output format: "_outputFormat)) + } + set tSC = $classmethod(outputClass,"OutputToDevice",testIndex,tVerbose,1) + $$$ThrowOnError(tSC) + } else { + set tSC = ##class(%IPM.Test.Abstract).OutputToDevice(testIndex,tVerbose,0) + $$$ThrowOnError(tSC) + } + write ! + // Detect and report unit test failures as an error from this phase. + // OutputFailures shows legacy red FAILED lines only when no format is active. if $get(pParams("UnitTest","FailuresAreFatal"),1) { - do ##class(%IPM.Test.Manager).OutputFailures(phaseStartIndex) + if outputFormat = "" { + do ##class(%IPM.Test.Manager).OutputFailures(phaseStartIndex) + } set tSC = ##class(%IPM.Test.Manager).GetAllTestsStatus(,phaseStartIndex) $$$ThrowOnError(tSC) } diff --git a/src/cls/IPM/Test/Abstract.cls b/src/cls/IPM/Test/Abstract.cls new file mode 100644 index 000000000..5621c0091 --- /dev/null +++ b/src/cls/IPM/Test/Abstract.cls @@ -0,0 +1,255 @@ +/// Base class for all unit test result formatters. +Class %IPM.Test.Abstract Extends %RegisteredObject +{ + +/// Comma-separated list of valid output format names (lowercase). +/// Must stay in sync with the valueList on the format modifier in %IPM.Main. +Parameter VALIDFORMATS = "json,yaml,toon"; + +ClassMethod ToFile( + fileName As %String, + testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}) As %Status +{ + set sc = $$$OK + try { + set fileStream = ##class(%Stream.FileCharacter).%New() + set fileStream.TranslateTable = "UTF8" + $$$ThrowOnError(fileStream.LinkToFile(fileName)) + do ..WriteToFileStream(fileStream, testIndex) + $$$ThrowOnError(fileStream.%Save()) + } catch ex { + set sc = ex.AsStatus() + } + return sc +} + +ClassMethod WriteToFileStream( + fileStream As %Stream.FileCharacter, + testIndex As %Integer) [ Abstract ] +{ +} + +/// verbose: show all methods (not just failures) in the detail section. +/// showDetail: show the results/failures section at all; when 0 only the summary is written. +ClassMethod OutputToDevice( + testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}, + verbose As %Boolean = 0, + showDetail As %Boolean = 1) As %Status +{ + set sc = $$$OK + try { + set tree = ..BuildResultTree(testIndex) + set summary = ..GetSummary(testIndex, tree) + write !!,"Test Run #"_summary.id_" ("_summary.namespace_") "_summary.duration_"s "_summary.testDateTime + write !,"Methods: "_summary.methods.total_" total, "_summary.methods.passed_" passed, "_summary.methods.failed_" failed" + write !,"Assertions: "_summary.assertions.total_" total, "_summary.assertions.passed_" passed, "_summary.assertions.failed_" failed" + + if showDetail { + if verbose { + write ! + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + write !,suiteObj.name_"/"_caseObj.name_" [failed] "_caseObj.error + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + write !,suiteObj.name_"/"_caseObj.name_"/"_methodObj.name_" ["_methodObj.status_"]" + if methodObj.error '= "" { write " "_methodObj.error } + } + } + } + } elseif summary.methods.failed > 0 { + write ! + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + write !,suiteObj.name_"/"_caseObj.name_" [failed] "_caseObj.error + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + if methodObj.status = "failed" { + write !,suiteObj.name_"/"_caseObj.name_"/"_methodObj.name_" [failed]" + if methodObj.error '= "" { write " "_methodObj.error } + } + } + } + } + } + } + } catch ex { + set sc = ex.AsStatus() + } + return sc +} + +/// Walks ^UnitTest.Result and returns a fully-populated result tree. +/// Non-assertion failures (thrown errors, lifecycle hook failures) appear +/// as non-empty "error" fields on method and case nodes rather than as +/// assert entries. +/// +/// ^UnitTest.Result global structure: +/// (testIndex, suite) → $list(status, time) +/// (testIndex, suite, case) → $list(status, time, errCategory, errMsg) +/// (testIndex, suite, case, method) → $list(status, time, errCategory, errMsg) +/// (testIndex, suite, case, method, counter) → $list(assertPassed, action, description, location) +/// status/assertPassed: 1=pass, 0=fail +/// errCategory/errMsg: non-empty only when a lifecycle hook or thrown error caused the failure +/// Instance metadata (Namespace, Duration, DateTime) lives in %UnitTest_Result.TestInstance +ClassMethod BuildResultTree(testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}) As %DynamicObject +{ + set meta = ##class(%SQL.Statement).%ExecDirect(, + "SELECT Namespace,Duration,DateTime FROM %UnitTest_Result.TestInstance WHERE ID=?", + testIndex) + set ns = "" + set duration = "" + set testDateTime = "" + if meta.%Next() { + set ns = meta.%Get("Namespace") + set duration = meta.%Get("Duration") + set testDateTime = meta.%Get("DateTime") + } + + set tree = { + "id": (testIndex), + "namespace": (ns), + "duration": (duration), + "testDateTime": (testDateTime), + "suites": [] + } + + set suite = "" + for { + set suite = $order(^UnitTest.Result(testIndex, suite), 1, suiteData) + quit:suite="" + + set suiteObj = {"name": (suite), "status": "passed", "cases": []} + do tree.suites.%Push(suiteObj) + + set testCase = "" + for { + set testCase = $order(^UnitTest.Result(testIndex, suite, testCase), 1, caseData) + quit:testCase="" + + set caseStatus = $select($listget(caseData, 1) '= 0: "passed", 1: "failed") + set caseError = "" + if caseStatus = "failed" { + set errCat = $listget(caseData, 3) + // Only set when errCat is non-empty; errMsg alone is IRIS's own aggregation + // string ("There are failed TestMethods") and not a real error message. + if errCat '= "" { + set caseError = errCat_": "_$listget(caseData, 4) + } + } + set caseObj = {"name": (testCase), "status": (caseStatus), "error": (caseError), "methods": []} + do suiteObj.cases.%Push(caseObj) + + if caseStatus = "failed" { + set suiteObj.status = "failed" + } + + set method = "" + for { + set method = $order(^UnitTest.Result(testIndex, suite, testCase, method), 1, methodData) + quit:method="" + + set methodStatus = $select($listget(methodData, 1) '= 0: "passed", 1: "failed") + set methodError = "" + if methodStatus = "failed" { + set errCat = $listget(methodData, 3) + if errCat '= "" { + set methodError = errCat_": "_$listget(methodData, 4) + } + } + set methodObj = { + "name": (method), + "status": (methodStatus), + "duration": ($listget(methodData, 2)), + "error": (methodError), + "asserts": [] + } + do caseObj.methods.%Push(methodObj) + + set assert = "" + for { + set assert = $order(^UnitTest.Result(testIndex, suite, testCase, method, assert), 1, assertData) + quit:assert="" + set assertObj = { + "counter": (assert), + "status": ($select($listget(assertData, 1) '= 0: "passed", 1: "failed")), + "action": ($listget(assertData, 2)), + "description": ($listget(assertData, 3)), + "location": ($listget(assertData, 4)) + } + do methodObj.asserts.%Push(assertObj) + } + } + } + } + return tree +} + +/// Returns a summary %DynamicObject with run metadata and method/assertion counts. +/// Counts methods as failed if any assert failed OR if a non-assertion error occurred. +/// Pass a pre-built tree to avoid a second ^UnitTest.Result traversal. +ClassMethod GetSummary(testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}, tree As %DynamicObject = "") As %DynamicObject +{ + if '$isobject(tree) { + set tree = ..BuildResultTree(testIndex) + } + set (assertTotal, assertPassed, assertFailed) = 0 + set (methodTotal, methodPassed, methodFailed) = 0 + + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + set methodTotal = methodTotal + 1 + if methodObj.status = "passed" { + set methodPassed = methodPassed + 1 + } else { + set methodFailed = methodFailed + 1 + } + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + set assertTotal = assertTotal + 1 + if assertObj.status = "passed" { + set assertPassed = assertPassed + 1 + } else { + set assertFailed = assertFailed + 1 + } + } + } + // Case-level error from a lifecycle hook (OnBeforeAllTests, OnAfterAllTests) — count it as one failed method + if caseObj.error '= "" { + set methodTotal = methodTotal + 1 + set methodFailed = methodFailed + 1 + } + } + } + + return { + "id": (testIndex), + "namespace": (tree.namespace), + "duration": (tree.duration), + "testDateTime": (tree.testDateTime), + "methods": { + "total": (methodTotal), + "passed": (methodPassed), + "failed": (methodFailed) + }, + "assertions": { + "total": (assertTotal), + "passed": (assertPassed), + "failed": (assertFailed) + } + } +} + +} diff --git a/src/cls/IPM/Test/JUnitOutput.cls b/src/cls/IPM/Test/JUnitOutput.cls index bcdd4ba1b..843e0f4ac 100644 --- a/src/cls/IPM/Test/JUnitOutput.cls +++ b/src/cls/IPM/Test/JUnitOutput.cls @@ -1,137 +1,98 @@ -Class %IPM.Test.JUnitOutput +Class %IPM.Test.JUnitOutput Extends %IPM.Test.Abstract { -ClassMethod ToFile( - pFileName As %String, - pTestIndex As %Integer = {$order(^UnitTest.Result(""),-1)}) As %Status +ClassMethod WriteToFileStream( + fileStream As %Stream.FileCharacter, + testIndex As %Integer) { - set tSC = $$$OK - try { - set tFile = ##class(%Stream.FileCharacter).%New() - set tFile.TranslateTable="UTF8" - do tFile.LinkToFile(pFileName) + set tree = ..BuildResultTree(testIndex) - kill ^||TMP // results global - set tSuite="" - for { - set tSuite=$order(^UnitTest.Result(pTestIndex,tSuite),1,tSuiteData) - quit:tSuite="" - set ^||TMP("S",tSuite,"time")=$listget(tSuiteData,2) + do fileStream.WriteLine("") + do fileStream.WriteLine("") - set tCase="" - for { - set tCase=$order(^UnitTest.Result(pTestIndex,tSuite,tCase),1,tCaseData) - quit:tCase="" - - do $increment(^||TMP("S",tSuite,"tests")) - set ^||TMP("S",tSuite,"C",tCase,"time")=$listget(tCaseData,2) - set tMethod="" - for { - set tMethod=$order(^UnitTest.Result(pTestIndex,tSuite,tCase,tMethod),1,tMethodData) - quit:tMethod="" - - set ^||TMP("S",tSuite,"C",tCase,"M",tMethod,"time")=$listget(tMethodData,2) - set tAssert="" - for { - set tAssert=$order(^UnitTest.Result(pTestIndex,tSuite,tCase,tMethod,tAssert),1,tAssertData) - quit:tAssert="" - - do $increment(^||TMP("S",tSuite,"assertions")) - do $increment(^||TMP("S",tSuite,"C",tCase,"assertions")) - do $increment(^||TMP("S",tSuite,"C",tCase,"M",tMethod,"assertions")) - if $listget(tAssertData)=0 { - do $increment(^||TMP("S",tSuite,"failures")) - do $increment(^||TMP("S",tSuite,"C",tCase,"failures")) - set tIndex = $increment(^||TMP("S",tSuite,"C",tCase,"M",tMethod,"failures")) - set ^||TMP("S",tSuite,"C",tCase,"M",tMethod,"failures",tIndex) = - $listget(tAssertData,2) _ ": " _ $listget(tAssertData,3) - } - } - if ($listget(tMethodData)=0) - && ('$data(^||TMP("S",tSuite,"C",tCase,"M",tMethod,"failures"))) { - do $increment(^||TMP("S",tSuite,"failures")) - do $increment(^||TMP("S",tSuite,"C",tCase,"failures")) - set tIndex = $increment(^||TMP("S",tSuite,"C",tCase,"M",tMethod,"failures")) - set ^||TMP("S",tSuite,"C",tCase,"M",tMethod,"failures",tIndex) = - $listget(tMethodData,3) _ ": " _ $listget(tMethodData,4) - } - } - - if $listget(tCaseData)=0 - && ('$data(^||TMP("S",tSuite,"C",tCase,"failures"))) { - do $increment(^||TMP("S",tSuite,"failures")) - do $increment(^||TMP("S",tSuite,"C",tCase,"failures")) - set tIndex = $increment(^||TMP("S",tSuite,"C",tCase,"M",tCase,"failures")) - set ^||TMP("S",tSuite,"C",tCase,"M",tCase,"failures",tIndex) = - $listget(tCaseData,3) _ ": " _ $listget(tCaseData,4) - } + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set suiteTests = 0 + set suiteFailures = 0 + set suiteAssertions = 0 + set suiteDuration = 0 + set caseIter2 = suiteObj.cases.%GetIterator() + while caseIter2.%GetNext(, .caseObj2) { + set methodIter2 = caseObj2.methods.%GetIterator() + while methodIter2.%GetNext(, .methodObj2) { + set suiteTests = suiteTests + 1 + if methodObj2.status = "failed" { set suiteFailures = suiteFailures + 1 } + set suiteDuration = suiteDuration + methodObj2.duration + set assertIter2 = methodObj2.asserts.%GetIterator() + while assertIter2.%GetNext(, .unused) { set suiteAssertions = suiteAssertions + 1 } + } + if caseObj2.error '= "" { + set suiteTests = suiteTests + 1 + set suiteFailures = suiteFailures + 1 } } - do tFile.WriteLine("") - do tFile.WriteLine("") - set tSuite="" - for { - set tSuite=$order(^||TMP("S",tSuite)) - quit:tSuite="" - - do tFile.Write("") + do fileStream.Write("") - set tCase="" - for { - set tCase=$order(^||TMP("S",tSuite,"C",tCase)) - quit:tCase="" + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + do fileStream.Write("") - do tFile.Write("") + if caseObj.error '= "" { + do fileStream.Write("") + set msg = ..EncodeXMLAttr(caseObj.error) + do fileStream.Write("") + do fileStream.WriteLine("") + } - set tMethod="" - for { - set tMethod=$order(^||TMP("S",tSuite,"C",tCase,"M",tMethod)) - quit:tMethod="" + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + set methodAssertions = methodObj.asserts.%Size() + do fileStream.Write("") - do tFile.Write("") - set tFailureKey = "" - for { - set tFailureKey = $order(^||TMP("S",tSuite,"C",tCase,"M",tMethod,"failures",tFailureKey),1,tMessage) - if (tFailureKey = "") { - quit - } - set tMessage = $zstrip(tMessage,"*C") - set tMessage = $zconvert($zconvert(tMessage,"O","UTF8"),"O","XML") - // Also encode newlines - $zconvert doesn't do this. - set tMessage = $replace(tMessage,$char(10)," ") - set tMessage = $replace(tMessage,$char(13)," ") - do tFile.Write("") - do tFile.WriteLine("") + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + if assertObj.status = "failed" { + set msg = ..EncodeXMLAttr(assertObj.action_": "_assertObj.description) + do fileStream.Write("") + do fileStream.WriteLine("") } - do tFile.WriteLine("") } - do tFile.WriteLine("") + if (methodObj.status = "failed") && (methodObj.error '= "") { + set msg = ..EncodeXMLAttr(methodObj.error) + do fileStream.Write("") + do fileStream.WriteLine("") + } + do fileStream.WriteLine("") } - do tFile.WriteLine("") + do fileStream.WriteLine("") } - do tFile.WriteLine("") - kill ^||TMP - - $$$ThrowOnError(tFile.%Save()) - } catch e { - set tSC = e.AsStatus() + do fileStream.WriteLine("") } - quit $$$OK + do fileStream.WriteLine("") +} + +ClassMethod EncodeXMLAttr(msg As %String) As %String [ Private ] +{ + // Strip control chars but preserve LF/CR for entity encoding below + set msg = $translate(msg, $char(0,1,2,3,4,5,6,7,8,11,12,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31)) + set msg = $zconvert($zconvert(msg, "O", "UTF8"), "O", "XML") + set msg = $replace(msg, $char(13,10), " ") + set msg = $replace(msg, $char(10), " ") + set msg = $replace(msg, $char(13), " ") + return msg } } diff --git a/src/cls/IPM/Test/JsonOutput.cls b/src/cls/IPM/Test/JsonOutput.cls new file mode 100644 index 000000000..1f56a0da4 --- /dev/null +++ b/src/cls/IPM/Test/JsonOutput.cls @@ -0,0 +1,77 @@ +Class %IPM.Test.JsonOutput Extends %IPM.Test.Abstract +{ + +ClassMethod WriteToFileStream( + fileStream As %Stream.FileCharacter, + testIndex As %Integer) +{ + do fileStream.Write(..BuildResultTree(testIndex).%ToJSON()) +} + +ClassMethod OutputToDevice( + testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}, + verbose As %Boolean = 0, + showDetail As %Boolean = 1) As %Status +{ + set sc = $$$OK + try { + set tree = ..BuildResultTree(testIndex) + set summary = ..GetSummary(testIndex, tree) + if showDetail { + if verbose { + set allMethods = [] + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + do allMethods.%Push({"suite": (suiteObj.name), "case": (caseObj.name), "status": "failed", "error": (caseObj.error)}) + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + do allMethods.%Push({"suite": (suiteObj.name), "case": (caseObj.name), "method": (methodObj.name), "status": (methodObj.status), "error": (methodObj.error)}) + } + } + } + set output = {"summary": (summary), "methods": (allMethods)} + } else { + set failedMethods = [] + if summary.methods.failed > 0 { + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + do failedMethods.%Push({"suite": (suiteObj.name), "case": (caseObj.name), "error": (caseObj.error)}) + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + if methodObj.status = "failed" { + if methodObj.error '= "" { + do failedMethods.%Push({"suite": (suiteObj.name), "case": (caseObj.name), "method": (methodObj.name), "error": (methodObj.error)}) + } + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + if assertObj.status = "failed" { + do failedMethods.%Push({"suite": (suiteObj.name), "case": (caseObj.name), "method": (methodObj.name), "action": (assertObj.action), "description": (assertObj.description), "location": (assertObj.location)}) + } + } + } + } + } + } + } + set output = {"summary": (summary), "failures": (failedMethods)} + } + } else { + set output = {"summary": (summary)} + } + write ! + do output.%ToJSON() + } catch ex { + set sc = ex.AsStatus() + } + return sc +} + +} diff --git a/src/cls/IPM/Test/Manager.cls b/src/cls/IPM/Test/Manager.cls index d83ba01bb..5957bc949 100644 --- a/src/cls/IPM/Test/Manager.cls +++ b/src/cls/IPM/Test/Manager.cls @@ -68,43 +68,51 @@ ClassMethod GetAllTestsStatus( // This ensures nested phases only see their own results, not parent's for i=(startIndex+1):1:testCount { set logIndex = $get(^||%UnitTest.Manager.AllResults(i)) - if (logIndex '= "") { - // Query for assertion failures in this test run - set res = ##class(%SQL.Statement).%ExecDirect(,"select count(*) "_ - "from %UnitTest_Result.TestAssert where Status = 0 "_ - "and TestMethod->TestCase->TestSuite->TestInstance->InstanceIndex = ?",logIndex) - if (res.%SQLCODE < 0) { - throw ##class(%Exception.SQL).CreateFromSQLCODE(res.%SQLCODE,res.%Message) - } - do res.%Next(.sc) - $$$ThrowOnError(sc) - set failures = res.%GetData(1) - set failureCount = failureCount + failures - - // Also check for test suite failures (e.g., loading errors) - if (failures = 0) { - set res = ##class(%SQL.Statement).%ExecDirect(,"select count(*) "_ - "from %UnitTest_Result.TestSuite where Status = 0 "_ - "and TestInstance->InstanceIndex = ?",logIndex) - if (res.%SQLCODE < 0) { - throw ##class(%Exception.SQL).CreateFromSQLCODE(res.%SQLCODE,res.%Message) + if logIndex '= "" { + // Count failure rows to match what OutputFailuresForLogIndex displays: + // one row per case error, one per method error, one per failed assertion + set tree = ##class(%IPM.Test.Abstract).BuildResultTree(logIndex) + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + set failureCount = failureCount + 1 + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + if methodObj.error '= "" { + set failureCount = failureCount + 1 + } + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + if assertObj.status = "failed" { + set failureCount = failureCount + 1 + } + } + } } - do res.%Next(.sc) - $$$ThrowOnError(sc) - set failures = res.%GetData(1) - set failureCount = failureCount + failures } } } - if (failureCount > 0) { - set sc = $$$ERROR($$$GeneralError, failureCount_" assertion(s) failed.") + if failureCount > 0 { + set sc = $$$ERROR($$$GeneralError, failureCount_" failure(s).") } - // Only clean up AllResults at top level (startIndex=0), not in nested phases - if (startIndex = 0) { + // Always consume the entries this phase owned. + // At top level (startIndex=0) kill everything; for nested phases, roll the count + // back so the outer phase does not re-count entries the inner phase already handled. + // Outer phases learn about inner failures via the Shell return status, not by + // re-inspecting these entries. + if startIndex = 0 { kill ^||%UnitTest.Manager.AllResults kill ^||%UnitTest.Manager.AllResultsCount + } else { + for i=(startIndex+1):1:testCount { + kill ^||%UnitTest.Manager.AllResults(i) + } + set ^||%UnitTest.Manager.AllResultsCount = startIndex } } catch e { set sc = e.AsStatus() @@ -120,8 +128,6 @@ ClassMethod OutputFailures(startIndex As %Integer = 0) try { set testCount = $get(^||%UnitTest.Manager.AllResultsCount, 0) - // Output failures from all tracked test LogIndexes from startIndex onwards - // This ensures parent phase outputs failures from both parent and nested tests for i=(startIndex+1):1:testCount { set logIndex = $get(^||%UnitTest.Manager.AllResults(i)) if (logIndex '= "") { @@ -141,27 +147,24 @@ ClassMethod OutputFailuresForLogIndex(logIndex As %Integer) if 'logIndex { quit } - set logGN = $name(^UnitTest.Result(logIndex)) - set root = "" - for { - set root = $order(@logGN@(root)) - quit:root="" - set suite = "" - for { - set suite = $order(@logGN@(root, suite)) - quit:suite="" - set method = "" - for { - set method = $order(@logGN@(root, suite, method)) - quit:method="" - - set assert = "" - for { - set assert = $order(@logGN@(root, suite, method, assert), 1, assertInfo) - quit:assert="" - set $listbuild(status, type, text) = assertInfo - continue:status - write !,$$$FormattedLine($$$Red, "FAILED " _ suite _ ":" _ method), ": " _ type _ " - " _ text + set tree = ##class(%IPM.Test.Abstract).BuildResultTree(logIndex) + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + write !,$$$FormattedLine($$$Red, "FAILED " _ suiteObj.name _ ":" _ caseObj.name), ": " _ caseObj.error + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + if methodObj.status '= "failed" { continue } + if methodObj.error '= "" { + write !,$$$FormattedLine($$$Red, "FAILED " _ suiteObj.name _ ":" _ methodObj.name), ": " _ methodObj.error + } + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + if assertObj.status '= "failed" { continue } + write !,$$$FormattedLine($$$Red, "FAILED " _ suiteObj.name _ ":" _ methodObj.name), ": " _ assertObj.action _ " - " _ assertObj.description } } } diff --git a/src/cls/IPM/Test/ToonOutput.cls b/src/cls/IPM/Test/ToonOutput.cls new file mode 100644 index 000000000..974505500 --- /dev/null +++ b/src/cls/IPM/Test/ToonOutput.cls @@ -0,0 +1,111 @@ +Class %IPM.Test.ToonOutput Extends %IPM.Test.Abstract +{ + +ClassMethod WriteToFileStream( + fileStream As %Stream.FileCharacter, + testIndex As %Integer) +{ + set tree = ..BuildResultTree(testIndex) + set rowStream = ..BuildToonRows(tree, 0, .rowCount) + + do fileStream.WriteLine("unitTest:") + do fileStream.WriteLine(" id: "_tree.id) + do fileStream.WriteLine(" namespace: "_tree.namespace) + do fileStream.WriteLine(" duration: "_tree.duration_"s") + do fileStream.WriteLine(" testDateTime: "_tree.testDateTime) + do fileStream.WriteLine() + do fileStream.WriteLine("results["_rowCount_"]{suiteName,testcaseName,methodName,status,assertAction,assertCounter,assertDescription,assertLocation}:") + do rowStream.Rewind() + do fileStream.CopyFrom(rowStream) +} + +ClassMethod OutputToDevice( + testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}, + verbose As %Boolean = 0, + showDetail As %Boolean = 1) As %Status +{ + set sc = $$$OK + try { + set tree = ..BuildResultTree(testIndex) + set summary = ..GetSummary(testIndex, tree) + write ! + write !,"summary:" + write !," id: "_summary.id_" namespace: "_summary.namespace_" duration: "_summary.duration_"s testDateTime: "_summary.testDateTime + write !," methods["_summary.methods.total_"]: "_summary.methods.passed_" passed, "_summary.methods.failed_" failed" + write !," assertions["_summary.assertions.total_"]: "_summary.assertions.passed_" passed, "_summary.assertions.failed_" failed" + + if showDetail { + if verbose { + set rowStream = ..BuildToonRows(tree, 0, .rowCount) + write ! + write !,"results["_rowCount_"]{suiteName,testcaseName,methodName,status,assertAction,assertCounter,assertDescription,assertLocation}:" + do rowStream.Rewind() + while 'rowStream.AtEnd { + write !,rowStream.ReadLine() + } + } elseif summary.methods.failed > 0 { + set failStream = ..BuildToonRows(tree, 1, .failCount) + write ! + write !,"failures["_failCount_"]{suiteName,testcaseName,methodName,status,assertAction,assertCounter,assertDescription,assertLocation}:" + do failStream.Rewind() + while 'failStream.AtEnd { + write !,failStream.ReadLine() + } + } + } + } catch ex { + set sc = ex.AsStatus() + } + return sc +} + +/// RFC 4180 quoting: doubles internal quotes, wraps in quotes if value contains comma/quote/newline. +ClassMethod EscapeToonField(val As %String) As %String [ Private ] +{ + if val [ """" || (val [ ",") || (val [ $char(10)) { + return """"_$replace(val, """", """""")_"""" + } + return """"_val_"""" +} + +/// Returns a %Stream.TmpCharacter containing Toon result rows. +/// failuresOnly=0: all methods (for ToFile and verbose OutputToDevice). +/// failuresOnly=1: failed methods and assert failures only (for non-verbose OutputToDevice). +ClassMethod BuildToonRows(tree As %DynamicObject, failuresOnly As %Boolean, Output rowCount As %Integer) As %Stream.TmpCharacter [ Private ] +{ + set rowStream = ##class(%Stream.TmpCharacter).%New() + set rowCount = 0 + + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + set rowCount = rowCount + 1 + do rowStream.WriteLine(" "_suiteObj.name_","_caseObj.name_","_caseObj.name_",failed,OnBeforeAllTests,0,"_..EscapeToonField(caseObj.error)_",""""") + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + if failuresOnly && (methodObj.status '= "failed") { continue } + if methodObj.error '= "" { + set rowCount = rowCount + 1 + do rowStream.WriteLine(" "_suiteObj.name_","_caseObj.name_","_methodObj.name_",failed,error,0,"_..EscapeToonField(methodObj.error)_",""""") + } + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + if failuresOnly && (assertObj.status '= "failed") { continue } + set rowCount = rowCount + 1 + set row = " "_suiteObj.name_","_caseObj.name_","_methodObj.name_","_assertObj.status_","_assertObj.action_","_assertObj.counter_","_..EscapeToonField(assertObj.description)_","_..EscapeToonField(assertObj.location) + do rowStream.WriteLine(row) + } + if 'failuresOnly && (methodObj.asserts.%Size() = 0) { + set rowCount = rowCount + 1 + do rowStream.WriteLine(" "_suiteObj.name_","_caseObj.name_","_methodObj.name_","_methodObj.status_",,,,""""") + } + } + } + } + return rowStream +} + +} diff --git a/src/cls/IPM/Test/Utils.cls b/src/cls/IPM/Test/Utils.cls index 5355dbef0..e332634ad 100644 --- a/src/cls/IPM/Test/Utils.cls +++ b/src/cls/IPM/Test/Utils.cls @@ -211,7 +211,9 @@ ClassMethod CloseConnectionsForNamespace(pNamespace As %String) As %Status } while tProcs.%Next(.tStatus) { set tProc = ##class(SYS.Process).%OpenId(tProcs.%Get("PID")) - set tStatus = $$$ADDSC(tStatus,tProc.Terminate()) + if $isobject(tProc) { + set tStatus = $$$ADDSC(tStatus,tProc.Terminate()) + } } } catch e { set tStatus = e.AsStatus() diff --git a/src/cls/IPM/Test/YamlOutput.cls b/src/cls/IPM/Test/YamlOutput.cls new file mode 100644 index 000000000..5f5b773c6 --- /dev/null +++ b/src/cls/IPM/Test/YamlOutput.cls @@ -0,0 +1,146 @@ +Class %IPM.Test.YamlOutput Extends %IPM.Test.Abstract +{ + +ClassMethod WriteToFileStream( + fileStream As %Stream.FileCharacter, + testIndex As %Integer) +{ + do fileStream.CopyFrom(..BuildYaml(testIndex)) +} + +ClassMethod OutputToDevice( + testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}, + verbose As %Boolean = 0, + showDetail As %Boolean = 1) As %Status +{ + set sc = $$$OK + try { + set tree = ..BuildResultTree(testIndex) + set summary = ..GetSummary(testIndex, tree) + write ! + write !,"summary:" + write !," id: "_summary.id + write !," namespace: """_..EscapeYamlString(summary.namespace)_"""" + write !," duration: "_summary.duration_"s" + write !," testDateTime: """_..EscapeYamlString(summary.testDateTime)_"""" + write !," methods:" + write !," total: "_summary.methods.total + write !," passed: "_summary.methods.passed + write !," failed: "_summary.methods.failed + write !," assertions:" + write !," total: "_summary.assertions.total + write !," passed: "_summary.assertions.passed + write !," failed: "_summary.assertions.failed + + if showDetail { + if verbose { + write ! + write !,"results:" + set resultStream = ..BuildYamlRows(tree, 0) + do resultStream.Rewind() + while 'resultStream.AtEnd { + write !,resultStream.ReadLine() + } + } elseif summary.methods.failed > 0 { + write ! + write !,"failures:" + set failStream = ..BuildYamlRows(tree, 1) + do failStream.Rewind() + while 'failStream.AtEnd { + write !,failStream.ReadLine() + } + } + } + } catch ex { + set sc = ex.AsStatus() + } + return sc +} + +/// Returns a stream with the full YAML document (header + all results). +ClassMethod BuildYaml(testIndex As %Integer = {$order(^UnitTest.Result(""),-1)}) As %Stream.TmpCharacter +{ + set tree = ..BuildResultTree(testIndex) + set yamlStream = ##class(%Stream.TmpCharacter).%New() + + do yamlStream.WriteLine("unitTest:") + do yamlStream.WriteLine(" id: "_tree.id) + do yamlStream.WriteLine(" namespace: """_..EscapeYamlString(tree.namespace)_"""") + do yamlStream.WriteLine(" duration: "_tree.duration_"s") + do yamlStream.WriteLine(" testDateTime: """_..EscapeYamlString(tree.testDateTime)_"""") + do yamlStream.WriteLine() + do yamlStream.WriteLine(" results:") + do yamlStream.CopyFrom(..BuildYamlRows(tree, 0)) + return yamlStream +} + +/// Escapes backslashes and double-quotes for use inside YAML double-quoted strings. +ClassMethod EscapeYamlString(val As %String) As %String [ Private ] +{ + set val = $replace(val, "\", "\\") + set val = $replace(val, """", "\""") + return val +} + +/// Returns a stream of YAML rows. failuresOnly=1 skips passing methods/assertions. +/// Each method's assertions are emitted under an `asserts:` list to produce valid YAML. +ClassMethod BuildYamlRows(tree As %DynamicObject, failuresOnly As %Boolean) As %Stream.TmpCharacter [ Private ] +{ + set indent = $select(failuresOnly: " ", 1: " ") + set stream = ##class(%Stream.TmpCharacter).%New() + set (currentSuite, currentCase) = "" + set suiteIter = tree.suites.%GetIterator() + while suiteIter.%GetNext(, .suiteObj) { + set caseIter = suiteObj.cases.%GetIterator() + while caseIter.%GetNext(, .caseObj) { + if caseObj.error '= "" { + if suiteObj.name '= currentSuite { + set currentSuite = suiteObj.name + set currentCase = "" + do stream.WriteLine(indent_"- suiteName: """_..EscapeYamlString(suiteObj.name)_"""") + do stream.WriteLine(indent_" testcases:") + } + do stream.WriteLine(indent_" - testcaseName: """_..EscapeYamlString(caseObj.name)_"""") + do stream.WriteLine(indent_" error: |") + do stream.WriteLine(indent_" "_$replace(caseObj.error, $char(10), $char(10)_indent_" ")) + } + set methodIter = caseObj.methods.%GetIterator() + while methodIter.%GetNext(, .methodObj) { + if failuresOnly && (methodObj.status '= "failed") { continue } + if suiteObj.name '= currentSuite { + set currentSuite = suiteObj.name + set currentCase = "" + do stream.WriteLine(indent_"- suiteName: """_..EscapeYamlString(suiteObj.name)_"""") + do stream.WriteLine(indent_" testcases:") + } + if caseObj.name '= currentCase { + set currentCase = caseObj.name + do stream.WriteLine(indent_" - testcaseName: """_..EscapeYamlString(caseObj.name)_"""") + do stream.WriteLine(indent_" methods:") + } + do stream.WriteLine(indent_" - methodName: """_..EscapeYamlString(methodObj.name)_"""") + do stream.WriteLine(indent_" status: """_methodObj.status_"""") + if methodObj.error '= "" { + do stream.WriteLine(indent_" error: |") + do stream.WriteLine(indent_" "_$replace(methodObj.error, $char(10), $char(10)_indent_" ")) + } + if methodObj.asserts.%Size() > 0 { + do stream.WriteLine(indent_" asserts:") + set assertIter = methodObj.asserts.%GetIterator() + while assertIter.%GetNext(, .assertObj) { + if failuresOnly && (assertObj.status '= "failed") { continue } + do stream.WriteLine(indent_" - action: """_..EscapeYamlString(assertObj.action)_"""") + do stream.WriteLine(indent_" counter: "_assertObj.counter) + do stream.WriteLine(indent_" status: """_assertObj.status_"""") + do stream.WriteLine(indent_" description: |") + do stream.WriteLine(indent_" "_$replace(assertObj.description, $char(10), $char(10)_indent_" ")) + do stream.WriteLine(indent_" location: """_..EscapeYamlString(assertObj.location)_"""") + } + } + } + } + } + return stream +} + +} diff --git a/src/cls/IPM/Utils/Module.cls b/src/cls/IPM/Utils/Module.cls index d41928965..d7cbd19f3 100644 --- a/src/cls/IPM/Utils/Module.cls +++ b/src/cls/IPM/Utils/Module.cls @@ -991,7 +991,7 @@ ClassMethod GetModuleNameFromXML( /// 1 /// /// ``` -/// +/// /// Returns results as multidimensional array ClassMethod GetModuleDefaultsFromXML( pDirectory As %String, diff --git a/src/cls/IPM/Utils/OutputSuppressor.cls b/src/cls/IPM/Utils/OutputSuppressor.cls new file mode 100644 index 000000000..fd21d0672 --- /dev/null +++ b/src/cls/IPM/Utils/OutputSuppressor.cls @@ -0,0 +1,32 @@ +/// Suppresses all output by switching to the null device. +/// Instantiate via %New() to begin suppression; output is restored automatically +/// when the instance goes out of scope (or on error via %OnClose). +/// Safe to nest inside or outside BeginCaptureOutput. +Class %IPM.Utils.OutputSuppressor Extends %RegisteredObject +{ + +Property PreviousDevice As %String [ Private ]; + +Property NullDevice As %String [ Private ]; + +Method %OnNew() As %Status +{ + set ..PreviousDevice = $io + set ..NullDevice = ##class(%Library.Device).GetNullDevice() + try { + open ..NullDevice + use ..NullDevice + } catch ex { + return ex.AsStatus() + } + return $$$OK +} + +Method %OnClose() As %Status +{ + close ..NullDevice + use ..PreviousDevice + return $$$OK +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/TestOutputFormat.cls b/tests/integration_tests/Test/PM/Integration/TestOutputFormat.cls new file mode 100644 index 000000000..d761dc316 --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/TestOutputFormat.cls @@ -0,0 +1,340 @@ +Class Test.PM.Integration.TestOutputFormat Extends Test.PM.Integration.Base +{ + +/// The module is loaded once for the class rather than per-method because each `test` run +/// is fast and re-loading would dominate test time. This is safe because: +/// (1) each test method triggers its own `test` run, producing a new ^UnitTest.Result index, +/// (2) all tests that need the latest index use $order(^UnitTest.Result(""),-1), so they +/// always query their own run rather than a stale one, +/// (3) the test module has no side effects outside ^UnitTest.Result. +/// Adding new test methods: trigger a fresh `test` run or re-use the latest index — do not +/// rely on a specific index value from a previous test method. +Method OnBeforeAllTests() As %Status +{ + set sc = ##class(%IPM.Main).Shell("load " _ ..GetModuleDir("test-output-format")) + do $$$AssertStatusOK(sc, "Loaded test-output-format module") + return sc +} + +Method OnAfterAllTests() As %Status +{ + do ##class(%IPM.Main).Shell("config delete TestReportFormat") + do ##class(%IPM.Main).Shell("uninstall test-output-format") + return $$$OK +} + +/// Reset format config before each test so format-specific tests don't pollute each other. +Method OnBeforeOneTest(testName As %String) As %Status +{ + do ##class(%IPM.Main).Shell("config delete TestReportFormat") + return $$$OK +} + +/// Helper: join captured output array into a single string +ClassMethod JoinOutput(ByRef pOutput) As %String [ Private ] +{ + set result = "" + set i = "" + for { + set i = $order(pOutput(i)) + quit:i="" + set result = result _ pOutput(i) _ $char(10) + } + return result +} + + +/// Default (no -f, no config): summary + legacy FAILED lines, no formatted sections. +Method TestDefaultOutput() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ "12 failed", "summary reports 12 failed methods") + do $$$AssertNotTrue(content [ "failures[", "no toon failures section without -f") + do $$$AssertNotTrue(content [ "results[", "no toon results section without -f") +} + +/// Verbose without -f: summary only (no results/failures section), plus legacy FAILED lines. +Method TestVerboseOutput() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -verbose") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ "12 failed", "summary reports 12 failed methods") + do $$$AssertNotTrue(content [ "results[", "no results section without -f") + do $$$AssertNotTrue(content [ "failures[", "no failures section without -f") +} + +/// -f toon: summary + failures section with 13 rows. +Method TestOutputFormatToon() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -f toon") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ "12 failed", "summary reports 12 failed methods") + do $$$AssertTrue(content [ "failures[13]", "failures section has 13 rows") + do $$$AssertNotTrue(content [ "results[", "no results section in default mode") +} + +/// -f toon -verbose: results section with all rows, no failures section. +Method TestVerboseOutputFormatToon() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -verbose -f toon") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ "12 failed", "summary reports 12 failed methods") + do $$$AssertTrue(content [ "results[", "results section present with -verbose -f toon") + do $$$AssertNotTrue(content [ "failures[", "no failures section in verbose mode") +} + +/// -f json: summary + failures as JSON object. +Method TestOutputFormatJson() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -f json") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + + set raw = ..JoinOutput(.output) + set jsonStart = $find(raw, "{") - 1 + set jsonEnd = $length(raw) - $find($reverse(raw), "}") + 2 + set jsonStr = $extract(raw, jsonStart, jsonEnd) + + set parsed = ##class(%DynamicAbstractObject).%FromJSON(jsonStr) + do $$$AssertTrue($isobject(parsed), "JSON output is parseable") + do $$$AssertTrue($isobject(parsed.failures), "JSON output has failures array") + do $$$AssertEquals(parsed.failures.%Size(), 13, "JSON failures array has 13 entries") + do $$$AssertNotTrue($isobject(parsed.methods), "JSON output does not have methods array in default mode") +} + +/// -f json -verbose: methods array with all methods, no failures array. +Method TestVerboseOutputFormatJson() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -verbose -f json") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + + set raw = ..JoinOutput(.output) + set jsonStart = $find(raw, "{") - 1 + set jsonEnd = $length(raw) - $find($reverse(raw), "}") + 2 + set jsonStr = $extract(raw, jsonStart, jsonEnd) + + set parsed = ##class(%DynamicAbstractObject).%FromJSON(jsonStr) + do $$$AssertTrue($isobject(parsed), "Verbose JSON output is parseable") + do $$$AssertTrue($isobject(parsed.methods), "Verbose JSON output has methods array") + do $$$AssertTrue(parsed.methods.%Size() > 0, "Verbose JSON methods array is non-empty") + do $$$AssertNotTrue($isobject(parsed.failures), "Verbose JSON output does not have failures array") +} + +/// -f yaml: summary + failures block. +Method TestOutputFormatYaml() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -f yaml") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ "failed: 12", "YAML summary reports 12 failed methods") + do $$$AssertTrue(content [ ($char(10)_"failures:"), "YAML output has failures block") + do $$$AssertNotTrue(content [ ($char(10)_"results:"), "YAML output has no results block in default mode") +} + +/// -f yaml -verbose: results block present, no failures block. +Method TestVerboseOutputFormatYaml() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -verbose -f yaml") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ ($char(10)_"results:"), "Verbose YAML output has results block") + do $$$AssertNotTrue(content [ ($char(10)_"failures:"), "Verbose YAML output has no failures block") +} + +/// Config default: setting TestReportFormat=json makes default output use json. +Method TestConfigDefault() +{ + do ##class(%IPM.Main).Shell("config set TestReportFormat json") + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set raw = ..JoinOutput(.output) + set jsonStart = $find(raw, "{") - 1 + set jsonEnd = $length(raw) - $find($reverse(raw), "}") + 2 + set jsonStr = $extract(raw, jsonStart, jsonEnd) + + set parsed = ##class(%DynamicAbstractObject).%FromJSON(jsonStr) + do $$$AssertTrue($isobject(parsed), "Config default JSON output is parseable") + do $$$AssertTrue($isobject(parsed.failures), "Config default shows failures") +} + +/// -f overrides config: config=yaml but -f json should produce JSON. +Method TestFormatOverridesConfig() +{ + do ##class(%IPM.Main).Shell("config set TestReportFormat yaml") + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -f json") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set raw = ..JoinOutput(.output) + set jsonStart = $find(raw, "{") - 1 + set jsonEnd = $length(raw) - $find($reverse(raw), "}") + 2 + set jsonStr = $extract(raw, jsonStart, jsonEnd) + + set parsed = ##class(%DynamicAbstractObject).%FromJSON(jsonStr) + do $$$AssertTrue($isobject(parsed), "-f json overrides config=yaml") +} + +/// Quiet output: suppresses test runner noise, shows summary only (no failures section without -f). +/// Known gap: two pre-phase lines ([USER|ZPM] Test START, Building dependency graph) still escape suppression. +Method TestQuietOutput() +{ + do ##class(%IPM.Utils.Module).BeginCaptureOutput(.cookie) + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -quiet") + do ##class(%IPM.Utils.Module).EndCaptureOutput(cookie, .output) + + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + set content = ..JoinOutput(.output) + do $$$AssertTrue(content [ "12 failed", "summary reports 12 failed methods in quiet mode") + do $$$AssertNotTrue(content [ "failures[", "no failures section without -f") +} + +/// -output-file .toon: ToFile always writes all rows under results[N], not failures-only. +Method TestOutputFileToon() +{ + set tmpFile = ##class(%Library.File).TempFilename() _ ".toon" + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -output-file """ _ tmpFile _ """") + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + do $$$AssertTrue(##class(%File).GetFileSize(tmpFile) > 0, "Toon output file is non-empty") + + set fileStream = ##class(%Stream.FileCharacter).%New() + $$$ThrowOnError(fileStream.LinkToFile(tmpFile)) + set content = "" + while 'fileStream.AtEnd { + set content = content _ fileStream.ReadLine() _ $char(10) + } + do $$$AssertTrue(content [ "unitTest:", "Toon file has unitTest header") + do $$$AssertTrue(content [ "results[", "Toon file has results header") + do $$$AssertTrue(content [ "deliberate failure", "Toon file contains known failure text") + do ##class(%File).Delete(tmpFile) +} + +/// -output-file .yaml: file contains unitTest header and results section. +Method TestOutputFileYaml() +{ + set tmpFile = ##class(%Library.File).TempFilename() _ ".yaml" + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -output-file """ _ tmpFile _ """") + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + do $$$AssertTrue(##class(%File).GetFileSize(tmpFile) > 0, "YAML output file is non-empty") + + set fileStream = ##class(%Stream.FileCharacter).%New() + $$$ThrowOnError(fileStream.LinkToFile(tmpFile)) + set content = "" + while 'fileStream.AtEnd { + set content = content _ fileStream.ReadLine() _ $char(10) + } + do $$$AssertTrue(content [ "unitTest:", "YAML file has unitTest header") + do $$$AssertTrue(content [ "id: ", "YAML file has id field") + do $$$AssertTrue(content [ "namespace:", "YAML file has namespace field") + do $$$AssertTrue(content [ "results:", "YAML file has results section") + do ##class(%File).Delete(tmpFile) +} + +/// -output-file .json: file is valid JSON with expected structure. +Method TestOutputFileJson() +{ + set tmpFile = ##class(%Library.File).TempFilename() _ ".json" + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -output-file """ _ tmpFile _ """") + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + do $$$AssertTrue(##class(%File).GetFileSize(tmpFile) > 0, "JSON output file is non-empty") + + set fileStream = ##class(%Stream.FileCharacter).%New() + $$$ThrowOnError(fileStream.LinkToFile(tmpFile)) + set parsed = ##class(%DynamicAbstractObject).%FromJSON(fileStream) + do $$$AssertTrue($isobject(parsed), "JSON file is parseable") + do $$$AssertTrue(parsed.id '= "", "JSON file has id field") + do $$$AssertTrue(parsed.namespace '= "", "JSON file has namespace field") + do $$$AssertTrue($isobject(parsed.suites), "JSON file has suites array") + do ##class(%File).Delete(tmpFile) +} + +/// -output-file .xml: file is valid JUnit XML. +Method TestOutputFileXml() +{ + set tmpFile = ##class(%Library.File).TempFilename() _ ".xml" + set sc = ##class(%IPM.Main).Shell("test test-output-format -only -output-file """ _ tmpFile _ """") + do $$$AssertStatusNotOK(sc, "test run returns error when failures exist") + do $$$AssertTrue(##class(%File).GetFileSize(tmpFile) > 0, "XML output file is non-empty") + + set fileStream = ##class(%Stream.FileCharacter).%New() + $$$ThrowOnError(fileStream.LinkToFile(tmpFile)) + set content = "" + while 'fileStream.AtEnd { + set content = content _ fileStream.ReadLine() _ $char(10) + } + do $$$AssertTrue(content [ "", "XML file has root element") + do $$$AssertTrue(content [ "", "XML file has closing ") + do ##class(%File).Delete(tmpFile) +} + +/// GetSummary returns correct counts for the test-output-format module run. +Method TestGetSummary() +{ + set sc = ##class(%IPM.Main).Shell("test test-output-format -only") + set testIndex = $order(^UnitTest.Result(""), -1) + + set summary = ##class(%IPM.Test.Abstract).GetSummary(testIndex) + do $$$AssertTrue($isobject(summary), "GetSummary returns an object") + do $$$AssertTrue($isobject(summary.methods), "Summary has a methods sub-object") + do $$$AssertTrue($isobject(summary.assertions), "Summary has an assertions sub-object") + do $$$AssertEquals(summary.methods.failed, 12, "Summary reports 12 failed methods") + do $$$AssertEquals(summary.methods.passed, 6, "Summary reports 6 passed methods") + do $$$AssertEquals(summary.methods.passed + summary.methods.failed, summary.methods.total, "Method counts add up") + do $$$AssertEquals(summary.assertions.passed + summary.assertions.failed, summary.assertions.total, "Assertion counts add up") +} + +/// BuildResultTree returns correct structure for the test-output-format module run. +Method TestBuildResultTree() +{ + set sc = ##class(%IPM.Main).Shell("test test-output-format -only") + set testIndex = $order(^UnitTest.Result(""), -1) + + set tree = ##class(%IPM.Test.Abstract).BuildResultTree(testIndex) + do $$$AssertTrue($isobject(tree), "BuildResultTree returns an object") + do $$$AssertTrue(tree.id '= "", "Tree has an id") + do $$$AssertTrue(tree.namespace '= "", "Tree has a namespace") + do $$$AssertTrue($isobject(tree.suites), "Tree has a suites array") + do $$$AssertTrue(tree.suites.%Size() > 0, "Tree has at least one suite") + + set suiteObj = tree.suites.%Get(0) + do $$$AssertTrue($isobject(suiteObj), "First suite is an object") + do $$$AssertTrue(suiteObj.name '= "", "Suite has a name") + do $$$AssertTrue($isobject(suiteObj.cases), "Suite has a cases array") + + set caseObj = suiteObj.cases.%Get(0) + do $$$AssertTrue($isobject(caseObj), "First case is an object") + do $$$AssertTrue($isobject(caseObj.methods), "Case has a methods array") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/module.xml b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/module.xml new file mode 100644 index 000000000..effe0d57a --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/module.xml @@ -0,0 +1,12 @@ + + + + + test-output-format + 0.0.1 + module + src + + + + diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterAllReturn.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterAllReturn.cls new file mode 100644 index 000000000..ef1a33ead --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterAllReturn.cls @@ -0,0 +1,15 @@ +/// Demonstrates OnAfterAllTests returning an error status; all test methods run first. +Class Test.Output.Format.AfterAllReturn Extends %UnitTest.TestCase +{ + +Method OnAfterAllTests() As %Status +{ + return $$$ERROR($$$GeneralError, "deliberate OnAfterAllTests return-error") +} + +Method TestPassesNormally() +{ + do $$$AssertTrue(1, "this runs and passes; failure occurs only in OnAfterAllTests") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterAllThrow.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterAllThrow.cls new file mode 100644 index 000000000..5aa65734a --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterAllThrow.cls @@ -0,0 +1,15 @@ +/// Demonstrates OnAfterAllTests throwing; all test methods run first. +Class Test.Output.Format.AfterAllThrow Extends %UnitTest.TestCase +{ + +Method OnAfterAllTests() As %Status +{ + $$$ThrowStatus($$$ERROR($$$GeneralError, "deliberate OnAfterAllTests throw")) +} + +Method TestPassesNormally() +{ + do $$$AssertTrue(1, "this runs and passes; failure occurs only in OnAfterAllTests") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterOneReturn.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterOneReturn.cls new file mode 100644 index 000000000..00322e7e2 --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterOneReturn.cls @@ -0,0 +1,15 @@ +/// Demonstrates OnAfterOneTest returning an error status; assertions are recorded before teardown fails. +Class Test.Output.Format.AfterOneReturn Extends %UnitTest.TestCase +{ + +Method OnAfterOneTest(testName As %String) As %Status +{ + return $$$ERROR($$$GeneralError, "deliberate OnAfterOneTest return-error for "_testName) +} + +Method TestMethodBodyPasses() +{ + do $$$AssertTrue(1, "the method body runs and this assertion is recorded before teardown fails") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterOneThrow.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterOneThrow.cls new file mode 100644 index 000000000..2cd9a4bdc --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/AfterOneThrow.cls @@ -0,0 +1,15 @@ +/// Demonstrates OnAfterOneTest throwing; assertions are recorded before teardown throws. +Class Test.Output.Format.AfterOneThrow Extends %UnitTest.TestCase +{ + +Method OnAfterOneTest(testName As %String) As %Status +{ + $$$ThrowStatus($$$ERROR($$$GeneralError, "deliberate OnAfterOneTest throw for "_testName)) +} + +Method TestMethodBodyPasses() +{ + do $$$AssertTrue(1, "the method body runs and this assertion is recorded before teardown throws") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeAllReturn.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeAllReturn.cls new file mode 100644 index 000000000..07c4aebfe --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeAllReturn.cls @@ -0,0 +1,15 @@ +/// Demonstrates OnBeforeAllTests returning an error status; no test methods run. +Class Test.Output.Format.BeforeAllReturn Extends %UnitTest.TestCase +{ + +Method OnBeforeAllTests() As %Status +{ + return $$$ERROR($$$GeneralError, "deliberate OnBeforeAllTests return-error") +} + +Method TestWouldPass() +{ + do $$$AssertTrue(1, "this would pass if OnBeforeAllTests had not failed") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeAllThrow.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeAllThrow.cls new file mode 100644 index 000000000..d6bb2d300 --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeAllThrow.cls @@ -0,0 +1,15 @@ +/// Demonstrates OnBeforeAllTests throwing; no test methods run. +Class Test.Output.Format.BeforeAllThrow Extends %UnitTest.TestCase +{ + +Method OnBeforeAllTests() As %Status +{ + $$$ThrowStatus($$$ERROR($$$GeneralError, "deliberate OnBeforeAllTests throw")) +} + +Method TestWouldPass() +{ + do $$$AssertTrue(1, "this would pass if OnBeforeAllTests had not thrown") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeOneReturn.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeOneReturn.cls new file mode 100644 index 000000000..ea19c2313 --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeOneReturn.cls @@ -0,0 +1,20 @@ +/// Demonstrates OnBeforeOneTest returning an error status; each method is individually aborted. +Class Test.Output.Format.BeforeOneReturn Extends %UnitTest.TestCase +{ + +Method OnBeforeOneTest(testName As %String) As %Status +{ + return $$$ERROR($$$GeneralError, "deliberate OnBeforeOneTest return-error for "_testName) +} + +Method TestFirstMethod() +{ + do $$$AssertTrue(1, "this would pass if OnBeforeOneTest had not failed") +} + +Method TestSecondMethod() +{ + do $$$AssertTrue(1, "this would also pass if OnBeforeOneTest had not failed") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeOneThrow.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeOneThrow.cls new file mode 100644 index 000000000..a6cabe7f2 --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/BeforeOneThrow.cls @@ -0,0 +1,20 @@ +/// Demonstrates OnBeforeOneTest throwing; each method is individually aborted. +Class Test.Output.Format.BeforeOneThrow Extends %UnitTest.TestCase +{ + +Method OnBeforeOneTest(testName As %String) As %Status +{ + $$$ThrowStatus($$$ERROR($$$GeneralError, "deliberate OnBeforeOneTest throw for "_testName)) +} + +Method TestFirstMethod() +{ + do $$$AssertTrue(1, "this would pass if OnBeforeOneTest had not thrown") +} + +Method TestSecondMethod() +{ + do $$$AssertTrue(1, "this would also pass if OnBeforeOneTest had not thrown") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/MethodAssertFailure.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/MethodAssertFailure.cls new file mode 100644 index 000000000..4b78cd2f1 --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/MethodAssertFailure.cls @@ -0,0 +1,23 @@ +/// Demonstrates assertion failures in test method bodies. +Class Test.Output.Format.MethodAssertFailure Extends %UnitTest.TestCase +{ + +Method TestPassingAssertions() +{ + do $$$AssertTrue(1, "one is true") + do $$$AssertEquals("hello", "hello", "strings match") +} + +Method TestFailingAssertions() +{ + do $$$AssertTrue(1, "this passes") + do $$$AssertTrue(0, "deliberate failure: zero is not true") + do $$$AssertEquals("expected", "actual", "deliberate mismatch") +} + +Method TestAnotherPass() +{ + do $$$AssertNotTrue(0, "zero is not true") +} + +} diff --git a/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/MethodThrowFailure.cls b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/MethodThrowFailure.cls new file mode 100644 index 000000000..a9bb2b42f --- /dev/null +++ b/tests/integration_tests/Test/PM/Integration/_data/test-output-format/tests/Test/Output/Format/MethodThrowFailure.cls @@ -0,0 +1,21 @@ +/// Demonstrates a thrown error in a test method body. +Class Test.Output.Format.MethodThrowFailure Extends %UnitTest.TestCase +{ + +Method TestPassesNormally() +{ + do $$$AssertTrue(1, "this passes before the throwing method runs") +} + +Method TestThrowsFromBody() +{ + do $$$AssertTrue(1, "this assertion runs before the throw") + $$$ThrowStatus($$$ERROR($$$GeneralError, "deliberate throw from test body")) +} + +Method TestAfterThrow() +{ + do $$$AssertTrue(1, "this method runs independently after the throwing method") +} + +} diff --git a/tests/unit_tests/Test/PM/Unit/CLI.cls b/tests/unit_tests/Test/PM/Unit/CLI.cls index 8e9f2329a..15b5e7254 100644 --- a/tests/unit_tests/Test/PM/Unit/CLI.cls +++ b/tests/unit_tests/Test/PM/Unit/CLI.cls @@ -352,4 +352,25 @@ Method TestUninstallWithoutModuleName() do $$$AssertNotTrue(exists, "Module removed successfully.") } +/// Specifies the serialization format (json, toon, yaml) for unit and integration test results in the shell. +Method TestReportFormatConfiguration() +{ + set originalFormat = ##class(%IPM.Repo.UniversalSettings).GetTestReportFormat() + + do ..RunCommand("config set TestReportFormat json") + set format = ##class(%IPM.Repo.UniversalSettings).GetTestReportFormat() + do $$$AssertEquals(format, "json", "Verify TestReportFormat is set to JSON") + + do ..RunCommand("config set TestReportFormat yaml") + set format = ##class(%IPM.Repo.UniversalSettings).GetTestReportFormat() + do $$$AssertEquals(format, "yaml", "Verify TestReportFormat is set to YAML") + + // Restore original value so this test doesn't pollute subsequent tests + if originalFormat'="" { + do ..RunCommand("config set TestReportFormat "_originalFormat) + } else { + do ..RunCommand("config delete TestReportFormat") + } +} + }