1

I am just wondering has anyone successfully migrated their test Runner from Specrun (Specflow + Runner) to NUnit.. note both with Specflow. We are currently in the process but finding it difficult due to how our infrastructure is set up.

The main concern for us is we inject our Test Results into Mongo DB. the current Test result format is below is Specrun Format as a Json.

{
    "TestResult": {
    "Project": "TestAutomation.Project.Test",
    "Build": "$buildId",
    "Configuration": "TestAutomation.Project.Test",
    "TestAssemblies": "TestAutomation.Project.Test.dll",
    "StartTime": 1677577330160,
    "Duration": "00:00:31.6273096",
    "TestThreads": "3",
    "BuildRequestedBy": "$buildRequestedBy",
    "DevBranch": "$devBranch",
    "TestUrl": "$testUrl",
        "TestAssemblyVersion": "1.0.0.0",
 
    "TestBuildRequestedBy": "$testBuildRequestor",
    "TestBranch": "$testBranch",
 
    "TestSummary":     {"TotalTests": 1,
    "Succeeded":1,
    "Failed":0,
    "Pending":0,
    "Ignored":0,
    "Skipped":0,
    "Inconclusive":0}
,
    "ScenarioSummary": [
 
    {
    "Feature": "Sample",
    "Description": "In order to avoid silly mistakes  As a math idiot  I want to be told the sum of two numbers",
    "Scenario":"Add two numbers",
    "Retry": 0,
    "ScenarioTarget": "UnitTest",
    "ScenarioTags": "Sample, Author:testuser, SEV:Low, testuser-18, UnitTest, testuser-17",
    "TotalTests": 1,
    "Succeeded":1,
    "Failed":0,
    "RandomlyFailed":0,
    "Pending":0,
    "Ignored":0,
    "Skipped":0}
],
    "ExecutionDetails": [
 
 
        {
        "Scenario": "Add two numbers",
            "Feature": "Sample",
            "ScenarioTags": "Sample, Author:testuser, SEV:Low, SampleTest-18, UnitTest, SampleTest-17",
            "ScenarioTarget": "UnitTest",
        "Sequence": 1,
        "Status": "Succeeded",
        "ExecutionTime": "18.0126312",
        "Thread": 0,
            "Error":"",
        "TestOutcome":[
            {
            "Steps": "Given I have entered 50 into the calculator",
                "Trace": "done: SampleStepDefinition.GivenIHaveEnteredIntoTheCalculator(50) (0.0s)",
            "Result": "Succeeded in 14.892s"
            }
                ,
            {
            "Steps": "And I have entered 70 into the calculator",
                "Trace": "done: SampleStepDefinition.GivenIHaveEnteredIntoTheCalculator(70) (0.0s)",
            "Result": "Succeeded in 1.277s"
            }
                ,
            {
            "Steps": "When I press add",
                "Trace": "done: SampleStepDefinition.WhenIPressAdd() (0.0s)",
            "Result": "Succeeded in 1.402s"
            }
                ,
            {
            "Steps": "Then the result should be 120 on the screen",
                "Trace": "28/02/2023 09:42:38 - Info: Addition performed correctly. done: SampleStepDefinition.ThenTheResultShouldBeOnTheScreen(120) (0.0s)",
            "Result": "Succeeded in 0.173s"
            }
        ]
        }
]
    }
    }
    

Now we are running in NUnit the sample output is in XML like below.

<?xml version="1.0" encoding="utf-8" standalone="no"?>
<test-run id="0" name="TestAutomation.Project.Test.dll" fullname="D:\TestRepos\test-automation-test-runner\TestAutomation.Project.Test\bin\Debug\netcoreapp3.1\TestAutomation.Project.Test.dll" runstate="Runnable" testcasecount="2" result="Passed" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0" engine-version="3.13.2.0" clr-version="3.1.32" start-time="2023-05-16 17:41:27Z" end-time="2023-05-16 17:41:27Z" duration="0.632169">
  <command-line><![CDATA[D:\TestRepos\test-automation-test-runner\TestAutomation.Project.Test\bin\Debug\netcoreapp3.1\testhost.dll --port 63401 --endpoint 127.0.0.1:063401 --role client --parentprocessid 18980 --telemetryoptedin true]]></command-line>
  <filter>
    <test>TestAutomation.Project.Test.FeatureFiles.SampleFeature.SampleAddTwoNumbersForScenario </test>
  </filter>
  <test-suite type="Assembly" id="0-1006" name="TestAutomation.Project.Test.dll" fullname="D:/TestRepos/test-automation-test-runner/TestAutomation.Project.Test/bin/Debug/netcoreapp3.1/TestAutomation.Project.Test.dll" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.1652391Z" end-time="2023-05-16T17:41:27.7465614Z" duration="0.581235" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
    <environment framework-version="3.13.2.0" clr-version="3.1.32" os-version="Microsoft Windows 10.0.19044" platform="Win32NT" cwd="D:\TestRepos\test-automation-test-runner\TestAutomation.Project.Test\bin\Debug\netcoreapp3.1" machine-name="testvdi" user="testuser" user-domain="testdomain" culture="en-IE" uiculture="en-US" os-architecture="x64" />
    <settings>
      <setting name="SynchronousEvents" value="False" />
      <setting name="InternalTraceLevel" value="Debug" />
      <setting name="RandomSeed" value="1170503708" />
      <setting name="SkipNonTestAssemblies" value="True" />
      <setting name="ProcessModel" value="InProcess" />
      <setting name="DomainUsage" value="Single" />
      <setting name="DefaultTestNamePattern" value="{m} {a}" />
      <setting name="WorkDirectory" value="D:\TestRepos\test-automation-test-runner\TestAutomation.Project.Test\bin\Debug\netcoreapp3.1" />
      <setting name="NumberOfTestWorkers" value="4" />
    </settings>
    <properties>
      <property name="ParallelScope" value="Fixtures" />
      <property name="LevelOfParallelism" value="1" />
      <property name="_PID" value="16960" />
      <property name="_APPDOMAIN" value="testhost" />
    </properties>
    <test-suite type="SetUpFixture" id="0-1000" name="[default namespace]" fullname="SampleTest_TestAutomation_Shared_Test_NUnitAssemblyHooks" classname="SampleTest_TestAutomation_Shared_Test_NUnitAssemblyHooks" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.1781461Z" end-time="2023-05-16T17:41:27.7464325Z" duration="0.568280" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
      <output><![CDATA[-> Loading plugin D:\TestRepos\test-automation-test-runner\TestAutomation.Project.Test\bin\Debug\netcoreapp3.1\TechTalk.SpecFlow.NUnit.SpecFlowPlugin.dll
-> Loading plugin D:\TestRepos\test-automation-test-runner\TestAutomation.Project.Test\bin\Debug\netcoreapp3.1\TestAutomation.Project.Test.dll
-> Using default config
]]></output>
      <test-suite type="TestSuite" id="0-1007" name="SampleTest" fullname="SampleTest" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.6577402Z" end-time="2023-05-16T17:41:27.7431816Z" duration="0.085439" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
        <test-suite type="TestSuite" id="0-1008" name="TestAutomation" fullname="SampleTest.TestAutomation" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.6578336Z" end-time="2023-05-16T17:41:27.7431210Z" duration="0.085287" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
          <test-suite type="TestSuite" id="0-1009" name="Shared" fullname="TestAutomation.Project" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.6578735Z" end-time="2023-05-16T17:41:27.7429799Z" duration="0.085106" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
            <test-suite type="TestSuite" id="0-1010" name="Test" fullname="TestAutomation.Project.Test" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.6579081Z" end-time="2023-05-16T17:41:27.7429414Z" duration="0.085033" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
              <test-suite type="TestSuite" id="0-1011" name="FeatureFiles" fullname="TestAutomation.Project.Test.FeatureFiles" runstate="Runnable" testcasecount="2" result="Passed" start-time="2023-05-16T17:41:27.6579410Z" end-time="2023-05-16T17:41:27.7428775Z" duration="0.084938" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
                <test-suite type="TestFixture" id="0-1004" name="SampleFeature" fullname="TestAutomation.Project.Test.FeatureFiles.SampleFeature" classname="TestAutomation.Project.Test.FeatureFiles.SampleFeature" runstate="Runnable" testcasecount="1" result="Passed" start-time="2023-05-16T17:41:27.6585409Z" end-time="2023-05-16T17:41:27.7422072Z" duration="0.083671" total="1" passed="1" failed="0" warnings="0" inconclusive="0" skipped="0" asserts="0">
                  <properties>
                    <property name="Description" value="Sample" />
                    <property name="Category" value="SampleTest-18" />
                  </properties>
                  <test-case id="0-1005" name="SampleAddTwoNumbersForScenario " fullname="TestAutomation.Project.Test.FeatureFiles.SampleFeature.SampleAddTwoNumbersForScenario " methodname="SampleAddTwoNumbersForScenario" classname="TestAutomation.Project.Test.FeatureFiles.SampleFeature" runstate="Runnable" seed="1104564193" result="Passed" start-time="2023-05-16T17:41:27.6633163Z" end-time="2023-05-16T17:41:27.7356565Z" duration="0.072463" asserts="0">
                    <properties>
                      <property name="Description" value="Sample Add two numbers for Scenario" />
                      <property name="Category" value="Sample" />
                      <property name="Category" value="Author:testuser" />
                      <property name="Category" value="SEV:Low" />
                      <property name="Category" value="SampleTest-17" />
                      <property name="Category" value="UnitTest" />
                    </properties>
                    <output><![CDATA[Given I have entered 50 into the calculator
-> done: SampleStepDefinition.GivenIHaveEnteredIntoTheCalculator(50) (0.0s)
And I have entered 70 into the calculator
-> done: SampleStepDefinition.GivenIHaveEnteredIntoTheCalculator(70) (0.0s)
When I press add
-> done: SampleStepDefinition.WhenIPressAdd() (0.0s)
Then the result should be 120 on the screen
-> 16/05/2023 17:41:27 - Info: Addition performed correctly.
-> done: SampleStepDefinition.ThenTheResultShouldBeOnTheScreen(120) (0.0s)
]]></output>
                  </test-case>
                </test-suite>
              </test-suite>
            </test-suite>
          </test-suite>
        </test-suite>
      </test-suite>
    </test-suite>
  </test-suite>
</test-run>

I'm just wondering has anyone been able to convert NUnit test results into the style we had before for Specrun?

Just to note this was our custom Report Template for Specrun.

@inherits SpecFlow.Plus.Runner.Reporting.CustomTemplateBase<TestRunResult>
@using System
@using System.Linq
@using System.Globalization
@using System.Collections.Generic
@using System.Diagnostics
@using System.IO
@using System.Reflection
@using System.Text.RegularExpressions
@using TechTalk.SpecRun.Framework
@using TechTalk.SpecRun.Framework.Results
@using TechTalk.SpecRun.Framework.TestSuiteStructure


@helper GetSummaryRowTail(TestCollectionResultSummary summary)
{
    int randomlyFailed = 0;
    MatchCollection matchCollection = Regex.Matches(summary.ConcludedResultMessage, @"([0-9]+) randomly failed");
    if (matchCollection.Count > 0)
    {
    matchCollection = Regex.Matches(matchCollection[0].Value, @"([0-9]+)");
    }

    if (matchCollection.Count > 0)
    {
    randomlyFailed = Convert.ToInt32(matchCollection[0].Value);
    }

    var resultCounts = summary.ResultCounts;
    int succeeded = resultCounts.Where(rc => rc.Key.IsInGroup(TestNodeResultTypeGroup.Success)).Sum(rc => rc.Value) + randomlyFailed;
    int failed = resultCounts.Where(rc => rc.Key.IsInGroup(TestNodeResultTypeGroup.Failure)).Sum(rc => rc.Value) - randomlyFailed;
    int pending = resultCounts.Where(rc => rc.Key.IsInGroup(TestNodeResultTypeGroup.Pending)).Sum(rc => rc.Value);
    int ignored = resultCounts.Where(rc => rc.Key.IsInGroup(TestNodeResultTypeGroup.Ignored)).Sum(rc => rc.Value);
    int skipped = resultCounts[TestNodeResultType.Skipped];
    int inconclusive = summary.Total - (succeeded + failed + pending + ignored + skipped);

    @:{"TotalTests": @summary.Total,
    @:"Succeeded":@succeeded,
    @:"Failed":@failed,
    @:"Pending":@pending,
    @:"Ignored":@ignored,
    @:"Skipped":@skipped,
    @:"Inconclusive":@inconclusive}
    }

    @helper GetSummaryRow(TestCollectionResultSummary summary, TestNode testNode, int level, TestNode fixtureNode)
    {
    string[] splitter = { "target:", "," };
    string featureTitle = GetFixtureTitle(fixtureNode);

    @:{
    @:"Feature": "@featureTitle.Split(splitter, StringSplitOptions.RemoveEmptyEntries).Last().Trim().Replace("(", "").Trim()",

    if (!string.IsNullOrEmpty(fixtureNode.Description))
    {
    @:"Description": "@string.Join(" ", Regex.Split(fixtureNode.Description, @"(?:\r\n|\n|\r|\t)")).Trim()",
    }

    @TestNodeLinks(testNode, 0)

    int totalPass = summary.Succeeded;
    int totalFailed = summary.TotalFailure;

    if (summary.RandomlyFailed > 0)
    {
    totalPass = summary.Succeeded + summary.RandomlyFailed;
    totalFailed = summary.TotalFailure - summary.RandomlyFailed;
    }
    @:"TotalTests": @summary.Total,
    @:"Succeeded":@totalPass,
    @:"Failed":@totalFailed,
    @:"RandomlyFailed":@summary.RandomlyFailed,
    @:"Pending":@summary.Pending,
    @:"Ignored":@summary.Ignored,
    @:"Skipped":@summary.Skipped}
    }

    @helper TestNodeLinks(TestNode testNode, int level)
    {
    if (testNode is TestItem)
    {
    @TestItemLinks((TestItem)testNode, level)
    }
    if (testNode is TestCollection)
    {
    @:"@testNode.Type": "@Regex.Replace(testNode.Title, @",\sExamples.\d,", ",")",
    foreach (TestNode subTestNode in ((TestCollection)testNode).SubNodes)
    {
    @TestNodeLinks(subTestNode, level + 1)
    }
    }
    if (!string.IsNullOrEmpty(testNode.TestTarget) && !testNode.TestTarget.Equals("Default"))
    {
    @:"ScenarioTarget": "@testNode.TestTarget",
    }
    if (testNode.Tags.Any())
    {
    @:"ScenarioTags": "@string.Join(", ", testNode.Tags)",
    }
    }

    @helper TestItemLinks(TestItem testItem, int level)
    {
    if (level == 0)
    {
    @:"@testItem.Type":"@Regex.Replace(testItem.Title, @",\sExamples.\d,", ",")",
    }
    else
    {
    @:"@Regex.Replace(testItem.Title, @",\sExamples.\d,", ",")",
    }

    @RetryCount(testItem)
    }

    @helper RetryCount(TestItem testItem)
    {
    var testItemResult = GetTestItemResult(testItem);
    if (testItemResult != null)
    {
    int retryCounter = 0;
    foreach (var retry in testItemResult.Executions.Skip(1))
    {
    retryCounter++;
    }
    @:"Retry": @retryCounter,
    }
    }

    @functions
        {
        string GetFixtureTitle(TestNode fixtureNode)
        {
            return fixtureNode.IsDefaultTestTarget ? fixtureNode.Title : string.Format("{0}", fixtureNode.Title, fixtureNode.TestTarget);
        }
    }

    {
    "TestResult": {
    "Project": "@Model.Configuration.ProjectName",
    "Build": "$buildId",
    "Configuration": "@Model.Configuration.Name",
    "TestAssemblies": "@string.Join(", ", Model.Configuration.TestAssemblyPaths).Replace("\\", "\\\\")",
    "StartTime": @Math.Round(Model.ExecutionTime.StartTime.ToUniversalTime().Subtract(new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).TotalMilliseconds),
    "Duration": "@Model.ExecutionTime.Duration",
    "TestThreads": "@Model.TestThreads.Count",
    "BuildRequestedBy": "$buildRequestedBy",
    "DevBranch": "$devBranch",
    "TestUrl": "$testUrl",
    @{
        string dll = string.Join(", ", Model.Configuration.TestAssemblyPaths).Replace("\\", "\\\\");
        string assemblyVersion = System.Reflection.Assembly.LoadFile(System.IO.Path.Combine(Model.Configuration.TestProfileSettings.BaseFolder, dll)).GetName().Version.ToString();
        @:"TestAssemblyVersion": "@assemblyVersion",
    }
    "TestBuildRequestedBy": "$testBuildRequestor",
    "TestBranch": "$testBranch",
    @if (Model.FrameworkError != null)
    {
        @:"Execution framework error": "@(Model.FrameworkError.ToString())",
    }

    "TestSummary": @GetSummaryRowTail(Model.Summary),
    "ScenarioSummary": [
    @{
        int count = GetTextFixtures().ToList().Count();
        int i = 0;
    }
    @foreach (var fixtureNode in GetTextFixtures())
    {
        i++;
        bool eventsCounterFlags = false;
        foreach (var testNode in fixtureNode.SubNodes)
        {
            if (eventsCounterFlags)
            {
                @:,
            }
            else
            {
                eventsCounterFlags = true;
            }
            var testSummary = GetSummary(testNode);
            @GetSummaryRow(testSummary, testNode, 0, fixtureNode)
        }
        if (count != i)
        {
            @:,
        }
    }],
    "ExecutionDetails": [
    @{
        int countResult = Model.TestExecutionResults.Count();
        int counter = 0;
    }

    @foreach (var test in Model.TestExecutionResults.OrderBy(testRow => testRow.ExecutionOrder))
    {
        counter++;
        @:{
        var testItem = test.TestItemResult.TestNode;
        var fullTestName = GetTestTitle(test);
        string scenario = Regex.Replace(fullTestName.Remove(fullTestName.LastIndexOf('(')).TrimEnd(), @",\sExamples.\d,", ",");
        @:"@testItem.Type": "@scenario",
        string[] splitter = { "target:", "," };
        string featureTitle = GetFixtureTitle(testItem.Parent);
        if (!string.IsNullOrEmpty(featureTitle))
        {
            @:"Feature": "@featureTitle.Split(splitter, StringSplitOptions.RemoveEmptyEntries).Last().Trim().Replace("(", "").Trim()",
        }
        if (!string.IsNullOrEmpty(testItem.Description))
        {
            @:"Scenario": "@Regex.Replace(string.Join(" ", Regex.Split(testItem.Description, @"(?:\r\n|\n|\r|\t)")).Replace("\\", "\\\\").Replace(@"\""", @""""), @",\sExamples.\d,", ",")",
        }
        if (testItem.Tags.Any())
        {
            @:"ScenarioTags": "@string.Join(", ", testItem.Tags)",
        }
        if (!string.IsNullOrEmpty(testItem.TestTarget) && !testItem.TestTarget.Equals("Default"))
        {
            @:"ScenarioTarget": "@testItem.TestTarget",
        }
        @:"Sequence": @counter,
        @:"Status": "@test.ResultType",
        @:"ExecutionTime": "@test.ExecutionTime.DurationSeconds",
        @:"Thread": @test.ThreadId,
        if (!string.IsNullOrEmpty(test.Result.Error))
        {
            @:"Error": "@string.Join(" ", Regex.Split(test.Result.Error, @"(?:\r\n|\n|\r|\t)")).Replace("\\", "\\\\")",
        }
        else
        {
            @:"Error":"",
        }
        @:"TestOutcome":[
        bool traceEventsCounterFlags = false;
        foreach (var traceEvent in test.Result.TraceEvents)
        {
            if (!IsRelevant(traceEvent))
            {
                continue;
            }
            if (traceEventsCounterFlags)
            {
                @:,
            }
            else
            {
                traceEventsCounterFlags = true;
            }

            var relatedNode = GetTestNode(traceEvent);

            @:{
            @:"Steps": "@Regex.Replace(traceEvent.BusinessMessages.Trim(), @"(?:\r\n|\n|\r|\t)", " ").Replace("\\", "\\\\").Replace("\"", " ")",

            if (!string.IsNullOrEmpty(traceEvent.Error))
            {
                if (!string.IsNullOrEmpty(traceEvent.StackTrace.TrimEnd()))
                {
                    @:"Trace": "@Regex.Replace(traceEvent.TechMessages.TrimEnd() + traceEvent.StackTrace.TrimEnd(), @"(?:\r\n|\n|\r|\t)", " ").Replace("\\", "\\\\").Replace("\"", " ")",
                }
                else
                {
                    @:"Trace": "@Regex.Replace(traceEvent.TechMessages.TrimEnd() + traceEvent.Error, @"(?:\r\n|\n|\r|\t)", " ").Replace("\\", "\\\\")",
                }
            }
            else
            {
                @:"Trace": "@Regex.Replace(traceEvent.TechMessages.TrimEnd(), @"(?:\r\n|\n|\r|\t)", " ").Replace("\\", "\\\\")",
            }
            @:"Result": "@traceEvent.ResultType in @GetSeconds(Math.Round(traceEvent.Duration.TotalSeconds, 3))s"
            @:}
        }
        @:]
        @:}
        if (countResult != counter)
        {
            @:,
        }
    }]
    }
    }
RoToon
  • 11
  • 2
  • I'm afraid this question is too broad to answer. Can you [edit] your question to focus on a single problem and provide a [repro]? – Greg Burghardt May 16 '23 at 12:39
  • 1
    @GregBurghardt thanks for the advice. I have reworded the question to focus on just the one problem and added examples. If you have the time you might take a look. Cheers. – RoToon May 16 '23 at 17:58

0 Answers0