8.3 C
New York
Thursday, October 17, 2024

automated testing – How do I swap reporting from attract studies to extent studies in an present automation framework?


You possibly can simply implement the extent class and lengthen it in your check courses. Since your framework’s setting configurations may differ, you will want to customise the OneTimeSetUp technique accordingly. Moreover, to stick to present automation requirements for a strong check automation technique, it is advisable to include Extent reporting.

I’ve carried out it in NUnit framework utilizing C#. You possibly can comply with the same strategy, in case you are utilizing some other programming language in Automation.

Extent Base Class.

utilizing APITestAutomation.APIObjects.REST;
utilizing APITestAutomation.Utilities;
utilizing AventStack.ExtentReports;
utilizing AventStack.ExtentReports.Reporter;
utilizing NUnit.Framework;
utilizing NUnit.Framework.Interfaces;
utilizing System;
utilizing System.IO;

namespace APIAutomationTest.Utilities
{
public class ExtentBase
{
    public static ExtentReports extent;
    public static ExtentTest check;


    [OneTimeSetUp]
    public void Setup()
    {
        // Get the present date and time to create a novel folder
        string timestamp = DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss");

        // Get the setting title from the ConfigurationManager
        string setting = ConfigurationManager.Setting;

        string workingDirectory = Setting.CurrentDirectory;
        string projectDirectory = Listing.GetParent(workingDirectory).Mother or father.Mother or father.FullName;

        // Specify the trail for the Extent report within the "goal" folder
        var reportPath = Path.Mix(projectDirectory, $"{timestamp}_Html_Report_{setting}", "goal", "automation-report.html");

        var htmlReporter = new ExtentSparkReporter(reportPath);
        extent = new ExtentReports();
        extent.AttachReporter(htmlReporter);
        // Add system info (together with the setting title) to the report with HTML formatting
        extent.AddSystemInfo("Setting", $"{setting}");

    }

    [SetUp]
    public void BeforeTest()
    {
        // Get the present check title
        var testName = TestContext.CurrentContext.Take a look at.Identify;

        // Get the setting title from the ConfigurationManager
        string setting = ConfigurationManager.Setting;

        // Append the setting to the check title
        testName = $"{testName}_{setting}";

        // Create the check with the modified title
        check = extent.CreateTest(testName);
    }


    [TearDown]
    public void AfterTest()
    {
        var standing = TestContext.CurrentContext.Consequence.End result.Standing;
        var stacktrace = string.IsNullOrEmpty(TestContext.CurrentContext.Consequence.StackTrace)
            ? ""
            : string.Format("
{0}

", TestContext.CurrentContext.Consequence.StackTrace);

Standing logstatus;
// Get the present check title
var testName = TestContext.CurrentContext.Take a look at.Identify;

Logger.LogTestCaseName(testName); // Log check case title

swap (standing)
{
case TestStatus.Failed:
// Log the failure cause out of your assertion
logstatus = Standing.Fail;
var failureReason = TestContext.CurrentContext.Consequence.Message;
check.Log(logstatus, $"Take a look at ended with {logstatus}. Failure Purpose: {failureReason}");

// Add the stacktrace to the report
check.Log(logstatus, "Stack Hint: " + stacktrace);

Logger.LogError($"Take a look at ended with {logstatus}. Failure Purpose: {failureReason}");

// Add the stacktrace to the report
Logger.LogError("Stack Hint: " + stacktrace);

break;

case TestStatus.Inconclusive:
logstatus = Standing.Warning;
check.Log(logstatus, "Take a look at ended with " + logstatus + stacktrace);
logstatus = Standing.Warning;
Logger.LogInfo("Take a look at ended with " + logstatus + stacktrace);
break;

case TestStatus.Skipped:
logstatus = Standing.Skip;
check.Log(logstatus, "Take a look at ended with " + logstatus);
Logger.LogInfo("Take a look at ended with " + logstatus);
break;

default:
logstatus = Standing.Cross;
check.Log(logstatus, "Take a look at ended with " + logstatus);
Logger.LogInfo("Take a look at ended with " + logstatus);
break;
}
extent.Flush();
}

[OneTimeTearDown]
public void TearDown()
{
extent.Flush();
}
}
}

This is an instance of a check class the place I’ve validated each the response code and ensured the absence of a null physique. Fluent Assertions provide a complicated strategy to conducting these validations.

Take a look at Class:

utilizing APIAutomationTest.APIObjects.REST.JsonBody;
utilizing APITestAutomation.APIObjects.REST;
utilizing APITestAutomation.Utilities;
utilizing NUnit.Framework;
utilizing FluentAssertions;
utilizing System.Web;
utilizing APIAutomationTest.Utilities;
utilizing APIAutomationTest.APIObjects;

namespace APITestAutomation.Assessments
{

[TestFixture]
public class ApiTests : ExtentBase
{
    non-public ApiUtils apiUtils;

    [OneTimeSetUp]
    public void Setup()
    {
        apiUtils = new ApiUtils();
    }

    [Test]
    [Category("Smoke")]
    public void GetSingleUser()
    {
        // Use apiUtils to make API requests
        var response = apiUtils.ExecuteGetRequest(ApiEndpoints.SingleUser);
        response.StatusCode.Ought to().Be(HttpStatusCode.OK);
        var responseBody = apiUtils.DeserializeResponse(response);
        responseBody.Information.Ought to().BeNull(as a result of: "The 'Information' property shouldn't be null.");

    }
}

Related Articles

LEAVE A REPLY

Please enter your comment!
Please enter your name here

Latest Articles