diff --git a/assets/images/hyperexecute/html-report.webp b/assets/images/hyperexecute/html-report.webp
deleted file mode 100644
index 74e2b7c87..000000000
Binary files a/assets/images/hyperexecute/html-report.webp and /dev/null differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/cypress-mochawesome-report.gif b/assets/images/hyperexecute/knowledge-base/reports/cypress-mochawesome-report.gif
new file mode 100644
index 000000000..6e48ec73e
Binary files /dev/null and b/assets/images/hyperexecute/knowledge-base/reports/cypress-mochawesome-report.gif differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/error-categorization-report.gif b/assets/images/hyperexecute/knowledge-base/reports/error-categorization-report.gif
new file mode 100644
index 000000000..6bba78500
Binary files /dev/null and b/assets/images/hyperexecute/knowledge-base/reports/error-categorization-report.gif differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/error-categorized-report-1.png b/assets/images/hyperexecute/knowledge-base/reports/error-categorized-report-1.png
deleted file mode 100644
index 66cab20b2..000000000
Binary files a/assets/images/hyperexecute/knowledge-base/reports/error-categorized-report-1.png and /dev/null differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/error-categorized-report-2.png b/assets/images/hyperexecute/knowledge-base/reports/error-categorized-report-2.png
deleted file mode 100644
index c2ce70e4b..000000000
Binary files a/assets/images/hyperexecute/knowledge-base/reports/error-categorized-report-2.png and /dev/null differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/error-dashbaord.png b/assets/images/hyperexecute/knowledge-base/reports/error-dashbaord.png
deleted file mode 100644
index 3565be816..000000000
Binary files a/assets/images/hyperexecute/knowledge-base/reports/error-dashbaord.png and /dev/null differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/mochawesome-report.png b/assets/images/hyperexecute/knowledge-base/reports/mochawesome-report.png
deleted file mode 100644
index 36cc4ece3..000000000
Binary files a/assets/images/hyperexecute/knowledge-base/reports/mochawesome-report.png and /dev/null differ
diff --git a/assets/images/hyperexecute/knowledge-base/reports/playwright-html-report.gif b/assets/images/hyperexecute/knowledge-base/reports/playwright-html-report.gif
new file mode 100644
index 000000000..c1894930b
Binary files /dev/null and b/assets/images/hyperexecute/knowledge-base/reports/playwright-html-report.gif differ
diff --git a/assets/images/zipboard-integration/1.webp b/assets/images/zipboard-integration/1.webp
deleted file mode 100644
index 30e10f5de..000000000
Binary files a/assets/images/zipboard-integration/1.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/10.webp b/assets/images/zipboard-integration/10.webp
deleted file mode 100644
index 671c31d5d..000000000
Binary files a/assets/images/zipboard-integration/10.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/11.webp b/assets/images/zipboard-integration/11.webp
deleted file mode 100644
index a385e086d..000000000
Binary files a/assets/images/zipboard-integration/11.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/12.webp b/assets/images/zipboard-integration/12.webp
deleted file mode 100644
index a153178ed..000000000
Binary files a/assets/images/zipboard-integration/12.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/13.webp b/assets/images/zipboard-integration/13.webp
deleted file mode 100644
index 01a9cd047..000000000
Binary files a/assets/images/zipboard-integration/13.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/14.webp b/assets/images/zipboard-integration/14.webp
deleted file mode 100644
index 7c9955f80..000000000
Binary files a/assets/images/zipboard-integration/14.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/15.webp b/assets/images/zipboard-integration/15.webp
deleted file mode 100644
index 2aa9e825e..000000000
Binary files a/assets/images/zipboard-integration/15.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/2.webp b/assets/images/zipboard-integration/2.webp
deleted file mode 100644
index 08ebd48a7..000000000
Binary files a/assets/images/zipboard-integration/2.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/3.webp b/assets/images/zipboard-integration/3.webp
deleted file mode 100644
index 862963cec..000000000
Binary files a/assets/images/zipboard-integration/3.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/33.webp b/assets/images/zipboard-integration/33.webp
deleted file mode 100644
index cc346eb39..000000000
Binary files a/assets/images/zipboard-integration/33.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/4.webp b/assets/images/zipboard-integration/4.webp
deleted file mode 100644
index 42c89d6fa..000000000
Binary files a/assets/images/zipboard-integration/4.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/5.webp b/assets/images/zipboard-integration/5.webp
deleted file mode 100644
index ba3a2a062..000000000
Binary files a/assets/images/zipboard-integration/5.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/6.webp b/assets/images/zipboard-integration/6.webp
deleted file mode 100644
index a78cb16a5..000000000
Binary files a/assets/images/zipboard-integration/6.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/7-1.webp b/assets/images/zipboard-integration/7-1.webp
deleted file mode 100644
index a84dbd206..000000000
Binary files a/assets/images/zipboard-integration/7-1.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/7.webp b/assets/images/zipboard-integration/7.webp
deleted file mode 100644
index a7e8e9944..000000000
Binary files a/assets/images/zipboard-integration/7.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/8.webp b/assets/images/zipboard-integration/8.webp
deleted file mode 100644
index e6764120f..000000000
Binary files a/assets/images/zipboard-integration/8.webp and /dev/null differ
diff --git a/assets/images/zipboard-integration/9.webp b/assets/images/zipboard-integration/9.webp
deleted file mode 100644
index d8fc5050e..000000000
Binary files a/assets/images/zipboard-integration/9.webp and /dev/null differ
diff --git a/assets/videos/bugherd-integration/api-token.mp4 b/assets/videos/integration/bug-tracking/bugherd/api-token.mp4
similarity index 100%
rename from assets/videos/bugherd-integration/api-token.mp4
rename to assets/videos/integration/bug-tracking/bugherd/api-token.mp4
diff --git a/assets/videos/bugherd-integration/bugherd-integrate.mp4 b/assets/videos/integration/bug-tracking/bugherd/bugherd-integrate.mp4
similarity index 100%
rename from assets/videos/bugherd-integration/bugherd-integrate.mp4
rename to assets/videos/integration/bug-tracking/bugherd/bugherd-integrate.mp4
diff --git a/assets/videos/bugherd-integration/lodge-bug.mp4 b/assets/videos/integration/bug-tracking/bugherd/lodge-bug.mp4
similarity index 100%
rename from assets/videos/bugherd-integration/lodge-bug.mp4
rename to assets/videos/integration/bug-tracking/bugherd/lodge-bug.mp4
diff --git a/assets/videos/bugherd-integration/uninstall-bugherd.mp4 b/assets/videos/integration/bug-tracking/bugherd/uninstall-bugherd.mp4
similarity index 100%
rename from assets/videos/bugherd-integration/uninstall-bugherd.mp4
rename to assets/videos/integration/bug-tracking/bugherd/uninstall-bugherd.mp4
diff --git a/assets/videos/integration/bug-tracking/zipboard/remove-zipboard.mp4 b/assets/videos/integration/bug-tracking/zipboard/remove-zipboard.mp4
new file mode 100644
index 000000000..723cb7d2d
Binary files /dev/null and b/assets/videos/integration/bug-tracking/zipboard/remove-zipboard.mp4 differ
diff --git a/assets/videos/integration/bug-tracking/zipboard/zipbaord-api-token.mp4 b/assets/videos/integration/bug-tracking/zipboard/zipbaord-api-token.mp4
new file mode 100644
index 000000000..4ffb8283b
Binary files /dev/null and b/assets/videos/integration/bug-tracking/zipboard/zipbaord-api-token.mp4 differ
diff --git a/assets/videos/integration/bug-tracking/zipboard/zipboard-integration.mp4 b/assets/videos/integration/bug-tracking/zipboard/zipboard-integration.mp4
new file mode 100644
index 000000000..82f9f7d65
Binary files /dev/null and b/assets/videos/integration/bug-tracking/zipboard/zipboard-integration.mp4 differ
diff --git a/assets/videos/teamwork-integration/api-key.mp4 b/assets/videos/integration/project-management/teamwork/api-key.mp4
similarity index 100%
rename from assets/videos/teamwork-integration/api-key.mp4
rename to assets/videos/integration/project-management/teamwork/api-key.mp4
diff --git a/assets/videos/teamwork-integration/lodge-bug.mp4 b/assets/videos/integration/project-management/teamwork/lodge-bug.mp4
similarity index 100%
rename from assets/videos/teamwork-integration/lodge-bug.mp4
rename to assets/videos/integration/project-management/teamwork/lodge-bug.mp4
diff --git a/assets/videos/teamwork-integration/teamwork-integration.mp4 b/assets/videos/integration/project-management/teamwork/teamwork-integration.mp4
similarity index 100%
rename from assets/videos/teamwork-integration/teamwork-integration.mp4
rename to assets/videos/integration/project-management/teamwork/teamwork-integration.mp4
diff --git a/assets/videos/teamwork-integration/uninstall-teamwork.mp4 b/assets/videos/integration/project-management/teamwork/uninstall-teamwork.mp4
similarity index 100%
rename from assets/videos/teamwork-integration/uninstall-teamwork.mp4
rename to assets/videos/integration/project-management/teamwork/uninstall-teamwork.mp4
diff --git a/assets/videos/integration/team-communication/rocketchat/rocketchat-bug.mp4 b/assets/videos/integration/team-communication/rocketchat/rocketchat-bug.mp4
new file mode 100644
index 000000000..5e9360f5e
Binary files /dev/null and b/assets/videos/integration/team-communication/rocketchat/rocketchat-bug.mp4 differ
diff --git a/assets/videos/integration/team-communication/rocketchat/rocketchat-integration.mp4 b/assets/videos/integration/team-communication/rocketchat/rocketchat-integration.mp4
new file mode 100644
index 000000000..bcae741a2
Binary files /dev/null and b/assets/videos/integration/team-communication/rocketchat/rocketchat-integration.mp4 differ
diff --git a/assets/videos/integration/team-communication/rocketchat/rocketchat-remove.mp4 b/assets/videos/integration/team-communication/rocketchat/rocketchat-remove.mp4
new file mode 100644
index 000000000..47abd48a9
Binary files /dev/null and b/assets/videos/integration/team-communication/rocketchat/rocketchat-remove.mp4 differ
diff --git a/assets/videos/integration/team-communication/rocketchat/rocketchat-webhook.mp4 b/assets/videos/integration/team-communication/rocketchat/rocketchat-webhook.mp4
new file mode 100644
index 000000000..6c7dafb7c
Binary files /dev/null and b/assets/videos/integration/team-communication/rocketchat/rocketchat-webhook.mp4 differ
diff --git a/assets/videos/practitest-integration/api-key.mp4 b/assets/videos/practitest-integration/api-key.mp4
deleted file mode 100644
index cba5bc1ea..000000000
Binary files a/assets/videos/practitest-integration/api-key.mp4 and /dev/null differ
diff --git a/docs/allure-reports.md b/docs/allure-reports.md
index e457e8caf..f58b29fcb 100644
--- a/docs/allure-reports.md
+++ b/docs/allure-reports.md
@@ -2,7 +2,7 @@
id: allure-reports
title: Allure Reports on HyperExecute
hide_title: false
-sidebar_label: Allure Reports
+sidebar_label: Allure
description: Learn how to generate Allure test report for HyperExecute on lambdatest and download the reports from the dashboard
keywords:
- allure reports
diff --git a/docs/bugherd-integration.md b/docs/bugherd-integration.md
index 45af69658..d0dceb408 100644
--- a/docs/bugherd-integration.md
+++ b/docs/bugherd-integration.md
@@ -62,7 +62,7 @@ LambdaTest integrates with BugHerd, allowing users to file issues directly from
**Step 4:** Now, provide your BugHerd API Token to establish integration with LambdaTest and click on **install** button.
:::info Fetch your Bugherd API Token
@@ -71,7 +71,7 @@ LambdaTest integrates with BugHerd, allowing users to file issues directly from
- You will find the "**API Token**" under the section **Authentication Token**. Copy your API token.
:::
@@ -89,7 +89,7 @@ LambdaTest integrates with BugHerd, allowing users to file issues directly from
**Step 5:** Now update the comments of the bug and other details and click on **Create Task** button.
**Step 6:** Go to your dashboard and check a ticket will be created for the same.
@@ -103,7 +103,7 @@ LambdaTest integrates with BugHerd, allowing users to file issues directly from
**Step 3:** Click on the **Remove** button.
> That was all you need to know for LambdaTest + BugHerd Integration. Increase your productivity with our integrations. If you still have any questions for us, please feel free to let us know. Our experts are always available on window.openLTChatWidget()}>**chat** to help you out with any roadblock.
diff --git a/docs/cucumber-report.md b/docs/cucumber-report.md
index 7f03e22e8..892ec8f53 100644
--- a/docs/cucumber-report.md
+++ b/docs/cucumber-report.md
@@ -2,7 +2,7 @@
id: cucumber-report
title: Cucumber Report on HyperExecute
hide_title: true
-sidebar_label: Cucumber Report
+sidebar_label: Cucumber
description: Learn how to generate Cucumber Report on lambdatest and download the reports from the dashboard
keywords:
- cucumber testing reports
diff --git a/docs/cypress-mochaawesome-report.md b/docs/cypress-mochaawesome-report.md
index cb2a9a10a..affbb0c0d 100644
--- a/docs/cypress-mochaawesome-report.md
+++ b/docs/cypress-mochaawesome-report.md
@@ -1,8 +1,8 @@
---
id: cypress-mochaawesome-report
-title: Consolidated Mochawesome Report for Cypress on HyperExecute
+title: Consolidated Mochawesome Report for Cypress
hide_title: false
-sidebar_label: Cypress Mochawesome Report
+sidebar_label: Cypress Mochawesome
description: Discover how to create MochaAwesome test reports for Cypress framework testing on LambdaTest and how to download these reports from the dashboard
keywords:
- cypress testing
@@ -33,46 +33,21 @@ slug: cypress-mochaawesome-report/
},{
"@type": "ListItem",
"position": 3,
- "name": "Playwright Testing With Playwright Test",
+ "name": "Cypress Mochawesome Report",
"item": "https://www.lambdatest.com/support/docs/cypress-mochaawesome-report/"
}]
})
}}
>
-Cypress is an open-source, end-to-end testing framework made especially for contemporary web applications. It enables programmers to create and run tests that mimic user interactions right inside a web browser.
+The Mochawesome reporter is a custom Cypress reporter designed to generate standalone HTML reports. These reports are clean, modern, and interactive, making it easier to visualize test results. With features like filtering tests and displaying stack traces for failures, the Mochawesome report is an essential tool in Cypress for clear and concise test insights.
-### Built-in Reporters
-There several pre-defined or built-in reporters and since Cypress is built on top of Mocha, that means any reporter built for Mocha can be used with Cypress as well. Each reporter may present the test results in a different way, suitable for specific needs or preferences. These built-in reporters offer different levels of detail and visualization options, helping developers to understand the test results and identify any potential issues more effectively.
+## Generating a Consolidated HTML Report
+Follow these steps to generate a consolidated HTML report using Mochawesome:
-:::note info
-Following are some of the built-in reporters:
-- Spec Reporter
-- Dot Matrix Reporter
-- Nyan Reporter
-- Tap Reporter
-- Landing Strip Reporter
-- List Reporter
-:::
-
-There are many other built-in reporters as well. By default, Cypress uses the `spec` reporter to output information to `STDOUT`
-
-### Custom Reporters
-Apart from the built-in reporters, Cypress Test also allows developers to create custom reporters i.e. you have the flexibility to define your own format and layout for displaying test results. Custom reporters are beneficial when you have specific reporting requirements or when you want to integrate the test results seamlessly into your existing development workflow.
-
-You can load custom Mocha reporters through a relative or absolute path which can be specified in your **Cypress Configuration file** or via the **CLI**.
-
-## Mochawesome Report on HyperExecute
-***
-
-Mochawesome reporter is a custom reporter that generates a standalone HTML report which helps to help visualize the test runs. It has simple, clean, and modern design. The report has filters to display only the desired tests and renders stack trace for failed tests. The Mochawesome report is one of the most important reports in Cypress.
+### Step 1: Update Your Cypress Configuration
+In your Cypress configuration file `cypress.config.js`, add the following code to enable Mochawesome as a reporter:
-### Steps to Generate HTML Report on HyperExecute
-
-**Step 1:** Update your Cypress Configuration File by adding the below code
-
-#### Cypress Config File
-
-```bash
+```javascript title="cypress.config.js"
"reporter": "cypress-multi-reporters",
"reporterOptions": {
"reporterEnabled": [
@@ -87,17 +62,16 @@ Mochawesome reporter is a custom reporter that generates a standalone HTML repor
}
```
-> The `overwrite` parameter should be set to **true** and `html` should be set to **false** because the mocha-merge utility which we use to merge reports do not support html files in path while merging json files
-
-:::tip Note
-
-The path to report is set to **``"reportDir": "cypress/results"``** and it should not be changed as we use this path for showing command logs on a dashboard.
-
+:::note
+- The `overwrite` parameter should be set to `true` to ensure the report is replaced with the latest run results.
+- The `html` option should be set to `false` because we will be merging JSON files later, and the mocha-merge utility does not support HTML files.
+- Ensure the `reportDir` path is set to `"cypress/results"` —this path is used to generate logs that will be visible on the dashboard. **Do not change this path.**
:::
-**Step 2:** Now configure the report parameters in the **HyperExecute YAML** file.
+### Step 2: Configure the HyperExecute YAML File
+In your HyperExecute YAML configuration, define the [`report`](https://www.lambdatest.com/support/docs/deep-dive-into-hyperexecute-yaml/#report) parameters like this:
-```bash
+```yaml title="hyperexecute.yaml"
report: true
partialReports:
frameworkName: cypress
@@ -105,6 +79,7 @@ partialReports:
type: html
```
-**Step 3:** Now Execute your job by triggering the HyperExecute CLI. You can visit the HyperExecute dashboard to download the report after job completion.
+### Step 3: Execute Your Tests
+Run your Cypress tests on HyperExecute using the CLI. After your job completes, you can visit the HyperExecute dashboard to download and view the consolidated Mochawesome report.
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/docs/error-categorization-report.md b/docs/error-categorization-report.md
index 7dd190103..934c7b268 100644
--- a/docs/error-categorization-report.md
+++ b/docs/error-categorization-report.md
@@ -1,8 +1,8 @@
---
id: error-categorization-report
-title: Error Categorization Report on HyperExecute
-hide_title: true
-sidebar_label: Error Categorization Report
+title: Error Categorization Report
+hide_title: false
+sidebar_label: Error Categorization
description: Streamline error analysis with the Error Categorization Report. Identify patterns in test failures to enhance quality and efficiency in your testing process.
keywords:
- error categorization reports
@@ -35,55 +35,25 @@ slug: error-categorization-report/
})
}}
>
+The Error Categorization Report is a specialized report that helps simplify the process of identifying and categorizing errors in your HyperExecute job. By organizing test failures with similar characteristics, this report provides a clear and structured overview of problematic stages and associated tests for efficient error tracking and analysis.
-# HyperExecute Error Categorization Report
-
-The Error Categorization Report is a custom report that is used to help you categorize errors based on failed tests, providing a clear and organized view of test failures with similar characteristics.
-
-This report aims to simplify the identification of problematic stages and associated tests, allowing you to navigate through detailed views and download relevant information for further analysis.
-
-## Generate Error Categorization Report
-
-Follow the guided steps below to generate the error-categorized report for your HyperExecute Job.
-
-### Prerequisite
-
-- This report will only be generated if your job contains multiple categories of errors.
+:::info
+You can generate and download other reports as well, along with the error-report.
+:::
-### Step 1: YAML file Configuration
+## Configure YAML file to Generate the Error Categorization Report
-In your YAML file, add the below code to generate the error-categorization report.
+This report is generated when your job includes multiple error categories. To enable the error categorization report, simply pass the [`errorCategorizedReport`](https://www.lambdatest.com/support/docs/deep-dive-into-hyperexecute-yaml/#errorcategorizedreport) flag to your YAML file:
-```bash
+```yaml title="hyperexecute.yaml"
errorCategorizedReport:
enabled: true
```
-:::tip
-There is no dependency on the [**report flag**](https://www.lambdatest.com/support/docs/deep-dive-into-hyperexecute-yaml/#report). It means that even if you do not pass the `report: true`, it will still generate the error-categorized report.
+:::note
+This report does not depend on the [`report: true`](https://www.lambdatest.com/support/docs/deep-dive-into-hyperexecute-yaml/#report) flag. It will generate the error report even if you do not pass the `report: true` flag in your YAML file.
:::
-> **NOTE :** You can generate and download other reports as well, along with the error-report.
-
-### Step 2: Execute your Job
-
-After configuring your YAML file, you can execute your Job on the HyperExecute.
-
-- In the below screenshot, you can see that 3 categories of errors are generated
- - ElementNotFound Error
- - Assertion Error
- - WebDriver Exception
-
-- You can either open the **Error-RCA Report** or you can download it as well.
-
-
-
-### Step 3: Download or Open the Generated Error Report
-
-- This report list down all the error summaries along with the specific details.
-
-- You can also check for the particular error for the specific test.
-
-
+Now trigger your job on HyperExecute, and if your job fails (encounters any failed tests), it will generate the Error Categorization Report. This report list down all the error summaries along with the specific details. You can also check for the particular error for the specific test.
-
+
diff --git a/docs/extent-report.md b/docs/extent-report.md
index 9e25c68c9..f4642c217 100644
--- a/docs/extent-report.md
+++ b/docs/extent-report.md
@@ -2,7 +2,7 @@
id: extent-report
title: Extent Report on HyperExecute
hide_title: true
-sidebar_label: Extent Report
+sidebar_label: Extent
description: Learn how to generate Extent Report on lambdatest and download the reports from the dashboard
keywords:
- extent testing reports
diff --git a/docs/hyperexecute-job-reports.md b/docs/hyperexecute-job-reports.md
index e4102e395..73950f480 100644
--- a/docs/hyperexecute-job-reports.md
+++ b/docs/hyperexecute-job-reports.md
@@ -36,70 +36,65 @@ slug: hyperexecute-job-reports/
})
}}
>
-HyperExecute reports are a powerful tool for analyzing the quality of your builds and identifying potential issues. By understanding the different types of reports available and how to use them, you can improve your testing process and deliver high-quality software.
+HyperExecute Job Reports are designed to offer deep insights into the performance of your tests. By utilizing these reports, you can identify issues, track progress, and enhance your software testing workflow. Whether you're a developer or QA engineer, understanding and leveraging the various report types is crucial to delivering high-quality software efficiently.
-## Types of HyperExecute Reports
+## Extent Report
+The [Extent Report](/support/docs/extent-report/) is a robust reporting framework commonly used with Java TestNG, and Selenium tests. It provides a feature-rich environment to explore test results, offering:
-HyperExecute supports a wide variety of test reports, including:
+- Comprehensive test case summaries.
+- Visuals like screenshots and videos of test executions.
+- Detailed execution logs.
+- Graphical data, including charts and graphs to simplify test result analysis.
-### Extent Report
+## Extent Native
+[Extent Native](/support/docs/native-extent-report/) is a more modern reporting solution from the Extent Reports family. Extent Native offers a native reporting experience that is highly interactive and responsive. It supports Java, TestNG, and Selenium tests and includes:
-[**Extent Report**](/support/docs/extent-report/) is a popular reporting framework for Java, TestNG, and Selenium tests. It provides a comprehensive set of features for reporting test results, including detailed test case summaries, screenshots and videos of test execution, execution logs, and charts and graphs to analyze test results.
+- Drill-down analysis to examine results in depth.
+- Custom filtering options for tailored views.
+- Real-time updates, accessible on any device for a seamless experience.
-### Extent Native
+## Native TestNG Reports
+The [Native TestNG Reports](/support/docs/hyperexecute-native-testng-report/) provide a standardized summary extracted from raw Extent reports for each Virtual Machine (VM) at the conclusion of a HyperExecute job. These reports deliver a consolidated HTML view, generated by the TestNG framework, ensuring easy access to comprehensive testing results
-[**Extent Native**](/support/docs/native-extent-report/) is a newer reporting framework from the Extent Reports team. It provides a native reporting experience for Java, TestNG, and Selenium tests. Extent Native reports are interactive and responsive, and they can be viewed on any device. They also include features such as drill-down analysis, custom filters, and real-time updates.
+## Allure Report
+[Allure](/support/docs/allure-reports/) is a cross-platform reporting tool compatible with various testing frameworks and languages. It offers a rich feature set for comprehensive analysis, including:
-### Allure Report
+- Interactive and responsive reporting dashboards.
+- Drill-down analysis to explore test outcomes.
+- Custom filters and real-time updates.
+- CI/CD tool integration for streamlined workflows.
-[**Allure**](/support/docs/allure-reports/) is a cross-platform reporting framework that can be used with a variety of testing frameworks and languages. It provides a rich reporting experience with features such as interactive and responsive reports, drill-down analysis, custom filters, real-time updates, and integration with CI/CD tools.
+## Cucumber Report
+[Cucumber Report](/support/docs/cucumber-report/) focuses on Cucumber test executions. It provides:
-### Cucumber Report
+- Test step summaries with detailed logs.
+- Screenshots and videos for visual verification of results.
+- Charts and graphs for analyzing test trends.
-[**Cucumber Report**](/support/docs/cucumber-report/) is a reporting framework for Cucumber tests. It provides a detailed overview of the test execution, including test step summaries, screenshots and videos of test execution, execution logs, and charts and graphs to analyze test results.
+## Playwright HTML
+[Playwright HTML](/support/docs/playwright-html-report/) report is specifically designed for Playwright tests and offers detailed execution summaries, including:
-### Playwright HTML
+- Test suite, case, and step summaries.
+- Visual assets such as screenshots and test execution videos.
+- Execution logs along with graphical data for analyzing results.
-[**Playwright HTML**](/support/docs/playwright-html-report/) is a reporting framework for Playwright tests. It provides a detailed overview of the test execution, including test suite summaries, test case summaries, test step summaries, screenshots and videos of test execution, execution logs, and charts and graphs to analyze test results.
+## Specflow
+[Specflow](/support/docs/specflow-report/) is a popular testing framework for Behavior-Driven Development (BDD), supporting languages like C#, Java, and Ruby. Its reports include:
-### Specflow
+- Feature summaries and scenario breakdowns.
+- Step-by-step execution details.
+- Screenshots, videos, and execution logs for comprehensive result visualization.
-[**Specflow**](/support/docs/specflow-report/) is a testing framework for BDD (Behavior Driven Development). It can be used with a variety of programming languages, including C#, Java, and Ruby. Specflow reports provide a detailed overview of the test execution, including feature summaries, scenario summaries, step summaries, screenshots and videos of test execution, and execution logs.
+## Cypress Mochawesome Report
+[Cypress Mochawesome](/support/docs/cypress-mochaawesome-report/) is designed for Cypress and Mocha tests, this report offers:
-### Cypress Mochawesome Report
+- Test suite and case summaries.
+- Test step breakdowns with logs.
+- Screenshots and videos to validate test executions.
-[**Cypress Mochawesome**](/support/docs/cypress-mochaawesome-report/) report is a reporting framework for Mocha tests. It provides a detailed overview of the test execution, including test suite summaries, test case summaries, test step summaries, screenshots and videos of test execution, and execution logs.
+## Error Categorization Report
+The [Error Categorization Report](/support/docs/error-categorization-report/) report is crafted to help users classify and understand test failures systematically. Key benefits include:
-### Error Categorization Report
-
-The [**Error Categorization Report**](/support/docs/error-categorization-report/) serves as a tailored solution designed to assist users in classifying errors based on failed tests, fostering a systematic and lucid understanding of test failures with shared characteristics. Its primary objective is to streamline the process of pinpointing problematic stages and associated tests, offering users the ability to seamlessly navigate through detailed views and download pertinent information for in-depth analysis.
-
-### Cypress Report
-
-Cypress Reports is a reporting framework for Cypress tests. It provides a detailed overview of the test execution, including test suite summaries, test case summaries, test step summaries, screenshots and videos of test execution, execution logs, and charts and graphs to analyze test results.
-
-### Junit Report
-
-Junit Report is a reporting framework for Junit tests. It provides a detailed overview of the test execution, including test suite summaries, test case summaries, test step summaries, and execution logs.
-
-### Specflow Custom
-
-Specflow Custom reports are custom reports that can be generated using the Specflow reporting API. You can use the Specflow reporting API to generate reports in any format you want, such as HTML, PDF, or XML.
-
-## Benefits of Using HyperExecute Reports
-
-There are many benefits to using HyperExecute reports, including:
-
-- **Identify failed tests quickly and easily:** HyperExecute reports can help you to quickly identify which tests failed and why. This can help you to prioritize your debugging efforts and resolve issues more quickly.
-
-- **Analyze test results in depth:** HyperExecute reports provide a detailed overview of your test results, including charts and graphs to help you identify trends and patterns. This information can be used to improve your testing process and deliver high-quality software.
-
-- **Track progress over time:** HyperExecute reports can be used to track your team's progress over time and identify areas where improvement is needed. For example, you can use reports to track the percentage of tests that are passing and the average execution time.
-
-- **Improve communication and collaboration:** HyperExecute reports can be shared with other members of your team and stakeholders. This can help to improve communication and collaboration, and ensure that everyone is on the same page.
-
-Tips for Using HyperExecute Reports Effectively
-
-:::tip
-Different HyperExecute reports provide different information. Choose the report that is most relevant to your needs. For example, if you want to quickly identify which tests failed, you would use the Summary Report
-:::
\ No newline at end of file
+- Simplified error classification by grouping failures with shared characteristics.
+- Detailed navigation through the problematic test stages.
+- Downloadable data for thorough analysis and resolution.
\ No newline at end of file
diff --git a/docs/hyperexecute-native-testng-report.md b/docs/hyperexecute-native-testng-report.md
index b8c523b85..4540e54db 100644
--- a/docs/hyperexecute-native-testng-report.md
+++ b/docs/hyperexecute-native-testng-report.md
@@ -2,7 +2,7 @@
id: hyperexecute-native-testng-report
title: Native TestNG Reports on HyperExecute
hide_title: false
-sidebar_label: Native TestNG Reports
+sidebar_label: Native TestNG
description: Learn how to generate Native TestNG test report for HyperExecute on lambdatest and download the reports from the dashboard
keywords:
- native testng reports
diff --git a/docs/hyperexecute-release-notes-2-5-6.md b/docs/hyperexecute-release-notes-2-5-6.md
new file mode 100644
index 000000000..3ff4dea17
--- /dev/null
+++ b/docs/hyperexecute-release-notes-2-5-6.md
@@ -0,0 +1,49 @@
+---
+id: hyperexecute-release-notes-2-5-6
+title: Version 2.5.6
+hide_title: false
+sidebar_label: Version 2.5.6
+description: Version 2.5.6
+keywords:
+ - LambdaTest Hyperexecute
+ - LambdaTest Hyperexecute help
+ - LambdaTest Hyperexecute documentation
+ - FAQs
+url: https://www.lambdatest.com/support/docs/hyperexecute-release-notes-2-5-6/
+site_name: LambdaTest
+slug: hyperexecute-release-notes-2-5-6/
+---
+
+import NewReleaseTag from '../src/component/newRelease.js';
+import EnhancementTag from '../src/component/enhancementTag';
+import BugFixTag from '../src/component/bugFixTag';
+
+
+## Browser Version Updates:
+- **Firefox :** Added support for Firefox-130 and Firefox-131, enabling testing on the latest versions.
+- **Edge :** Downgraded to Edge-128 for compatibility and stability enhancements.
+- **Chrome :** Upgraded to Chrome-129 to include the newest features and security updates.
+
+These updates ensure broader coverage for cross-browser testing and improve test stability across the latest browser versions.
\ No newline at end of file
diff --git a/docs/hyperexecute-release-notes.md b/docs/hyperexecute-release-notes.md
index c1ef9ae23..f467e5fbb 100644
--- a/docs/hyperexecute-release-notes.md
+++ b/docs/hyperexecute-release-notes.md
@@ -46,6 +46,7 @@ HyperExecute is an AI Powered Test Orchestration Cloud Platform, enabling test a
### October, 2024
| Release Number | Release Date |
|----------------|--------------|
+| [Version 2.5.6](/support/docs/hyperexecute-release-notes-2-5-6) | October 21, 2024|
| [Version 2.5.5](/support/docs/hyperexecute-release-notes-2-5-5) | October 14, 2024|
| [Version 2.5.4](/support/docs/hyperexecute-release-notes-2-5-4) | October 07, 2024|
diff --git a/docs/native-extent-report.md b/docs/native-extent-report.md
index e470c729e..a3b47277a 100644
--- a/docs/native-extent-report.md
+++ b/docs/native-extent-report.md
@@ -2,7 +2,7 @@
id: native-extent-report
title: Native Extent Report on HyperExecute
hide_title: true
-sidebar_label: Extent Native Report
+sidebar_label: Extent Native
description: Learn how to generate Native Extent Report on lambdatest and download the reports from the dashboard
keywords:
- native testing reports
diff --git a/docs/playwright-html-report.md b/docs/playwright-html-report.md
index fc59c771e..6b9bcf754 100644
--- a/docs/playwright-html-report.md
+++ b/docs/playwright-html-report.md
@@ -1,8 +1,8 @@
---
id: playwright-html-report
-title: Consolidated Playwright HTML Report on HyperExecute
-hide_title: true
-sidebar_label: Playwright HTML Report
+title: Consolidated Playwright HTML Report
+hide_title: false
+sidebar_label: Playwright HTML
description: Learn how to generate html test report for playwright framework testing on lambdatest and download the reports from the dashboard
keywords:
- playwright testing with Playwright test runner
@@ -40,71 +40,34 @@ slug: playwright-html-report/
})
}}
>
+Playwright offers built-in HTML reports to help visualize the results of your test runs. These reports are detailed, user-friendly, and interactive, allowing you to analyze each test's status, logs, and errors in an organized manner.
-# Consolidated Playwright HTML Report on HyperExecute
-* * *
+## Steps to Generate Consolidated HTML Report
+Follow these steps to generate a consolidated Playwright HTML report:
-Playwright is a Node.js library that uses a single API to automate Chromium, Firefox, and WebKit. It is designed to enable powerful, reliable, and efficient automated browser testing. Playwright can also automate Microsoft Edge since it is built on the open-source Chromium web framework.
+### Step 1: Update Your Playwright Configuration
-LambdaTest allows you to run Playwright tests across 40+ real browsers and operating system combinations. This guide will help you on how to generate a HTML Report for Playwright testing on the LambdaTest platform.
+In your Playwright configuration file `playwright.config.js`, add the following code to enable the reporter:
-## Reporters
-***
-
-Reporters are components or tools that generate human-readable output to display the results of test executions. They play a crucial role in providing developers, testers, and other stakeholders with valuable insights into the outcomes of automated tests. Reporters take the raw test execution data and format it into a structured and readable presentation.
-
-### Built-in Reporters
-Playwright Test provides several pre-defined or built-in reporters. Each reporter may present the test results in a different way, suitable for specific needs or preferences. These built-in reporters offer different levels of detail and visualization options, helping developers to understand the test results and identify any potential issues more effectively.
->
-Following are the Playwright built-in reporters:
-- List Reporter
-- Line Reporter
-- Dot Reporter
-- HTML Reporter
-- JSON Reporter
-- JUnit Reporter
-- GitHub Actions annotations
-
-### Custom Reporters
-Apart from the built-in reporters, Playwright Test also allows developers to create custom reporters i.e. you have the flexibility to define your own format and layout for displaying test results. Custom reporters are beneficial when you have specific reporting requirements or when you want to integrate the test results seamlessly into your existing development or CI/CD (Continuous Integration/Continuous Deployment) workflow.
-
-## HTML Report on HyperExecute
-***
-
-There are several built-in reporters in **Playwright Test** for various needs, along with an option to create Custom Reporters.
-
-HTML reporter produces a self-contained folder that contains a report for the test run that can be served as a web page. With Hyperexecute, we support the generation of a single HTML report for the complete job.
-
-By default, an HTML report is opened automatically if some of the tests failed. However, you can control this behavior via the `open` property in the **Playwright config file**. The possible values for that property are `always`, `never`, and `on-failure` (default).
-
-> In the case of Hyperexecute, we always have to set it to `never`.
-
-### Steps to Generate HTML Report on HyperExecute
-
-**Step 1:** Update your Playwright Configuration File by adding the below code
-
-#### playwright.config.js File
-
-```javascript
+```javascript title="playwright.config.js"
import { devices } from '@playwright/test';
-const config = {
- timeout: 5 * 60 * 1000, // Maximum time one test can run for
- testDir: './tests',
- testMatch: /.*\.spec\.js/,
+const config = {
+ ...//
reporter: [['html', { open: 'never' }]],
+ ...//
};
```
+:::note
+- In the case of HyperExecute, we always have to set the value of `open: never`.
-:::tip Note
-
-By default, the report is written into the **playwright-report folder** in the current working directory. The same location can be used in the report parameters in the YAML file.
-
+- By default, the report is written into the **playwright-report folder** in the current working directory. The same location can be used in the report parameters in the YAML file.
:::
-**Step 2:** Now configure the report parameters in the **HyperExecute YAML** file.
+### Step 2: Configure the HyperExecute YAML File
+In your HyperExecute YAML configuration, define the [`report`](https://www.lambdatest.com/support/docs/deep-dive-into-hyperexecute-yaml/#report) parameters like this:
-```bash
+```yaml title="hyperexecute.yaml"
report: true
partialReports:
frameworkName: playwright
@@ -112,6 +75,7 @@ partialReports:
type: html
```
-**Step 3:** Now execute your job by triggering the HyperExecute CLI. You can visit the HyperExecute dashboard to download the report after job completion.
+### Step 3: Execute Your Tests
+Run your Playwright tests on HyperExecute using the CLI. After your job completes, you can visit the HyperExecute dashboard to download and view the consolidated Plywright HTML report.
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/docs/real-time-upload-and-download-files.md b/docs/real-time-upload-and-download-files.md
new file mode 100644
index 000000000..d6ce44e4e
--- /dev/null
+++ b/docs/real-time-upload-and-download-files.md
@@ -0,0 +1,175 @@
+---
+id: real-time-upload-and-download-files
+title: Upload and Download Files
+hide_title: false
+sidebar_label: Upload and Download Files
+description: Discover how to use the Recents and Favourites feature under Quick Links in LambdaTest. Quickly access recently used items and mark important resources for faster navigation.
+keywords:
+ - real time testing in lambdatest
+ - lambdatest real time testing feature
+ - online real time testing
+ - free cross browser testing tool
+ - cross browser compatibility testing tool
+ - online real time website testing
+ - test website in different screen sizes
+ - ie browser compatibility testing tool
+ - test on gionee elfie e7 online
+ - test on mac 10.11 el capitan online
+ - apple mac el capitan virtual machine
+url: https://www.lambdatest.com/support/docs/real-time-upload-and-download-files/
+site_name: LambdaTest
+slug: real-time-upload-and-download-files/
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+
+The Upload and Download Files feature allows you to manage files directly within remote testing environments. This functionality enhances the testing process by allowing seamless file transfers between local and remote environments across desktop and mobile platforms.
+
+## Key Benefits
+
+- Direct file upload to remote testing environments
+- Cross-platform file management support
+- Integrated download capability for test outputs
+- Seamless integration with existing manual testing workflows
+
+## Steps to Upload or Download the Files
+
+
+
+
+**Step 1 :** Log in to the [Web Browser Testing](https://app.lambdatest.com/console/realtime/browser/desktop) dashboard.
+
+**Step 2 :** Select your required OS and relevant browser, and device from the listing to launch your test session.
+
+**Step 3 :** Click on Files and Media from the toolbar.
+
+**Step 4 :** Click **Uploads / Downloads** button Upload or Download the file.
+
+
+
+
+
+
+
+**Step 1 :** Log in to the [Virtual Mobile](https://app.lambdatest.com/console/realtime/browser/mobile) dashboard from the **Web Browser Testing** section.
+
+**Step 2 :** Select your required OS and relevant browser, and device from the listing to launch your test session.
+
+**Step 3 :** Click on Files and Media from the toolbar.
+
+**Step 4 :** Click **Uploads / Downloads** button Upload or Download the file.
+
+
+
+
+
+
+
+**Step 1 :** Log in to the [Virtual Mobile](https://app.lambdatest.com/console/realtime/app) dashboard from the **App Testing** section.
+
+**Step 2 :** Select your OS and the required device, upload your app and then launch your test session.
+
+**Step 3 :** Click on Files and Media from the toolbar.
+
+**Step 4 :** Click **Uploads / Downloads** button Upload or Download the file.
+
+
+
+
+
+
+## Supported Platform Compatibility
+
+
+
+
+- **Supported Versions :** Android 7 and above.
+
+#### File Upload Path
+- **Android 10 and above :** Uploaded files will be accessible through the file picker in the "Downloads" folder.
+- **Below Android 10 :** Files will be uploaded to the "Downloads" folder due to the absence of a "Documents" folder.
+
+
+
+
+
+- **Supported Versions :** iOS 13 and above.
+- **Upcoming Support :** iOS 18 support is planned for a future release.
+- **Upload Location :** Files can be accessed from the "Downloads" section in the Files app.
+
+#### Browser-Specific Download Locations:
+- **Firefox :** Files are saved in the "Firefox" folder.
+- **Chromium Browsers (e.g., Chrome, Edge) :** Files are stored in the "Chromium" folder.
+- **Safari :** Files are downloaded to the "Downloads" folder.
+
+
+
+
+
+- **Supported Versions :** Windows versions newer than XP and Windows 7 with Internet Explorer 8, 9, or 10.
+- **Download Location :** Files are stored in the C:/ or E:/Downloads folder.
+- **Upload Location :** Uploaded files appear in the "Downloads" folder.
+
+
+
+
+
+- **Supported Versions :** macOS Sierra (version 10.12) and above.
+- **Download Location :** Downloaded files are accessible under "Downloads" in Finder.
+- **Upload Location :** Files can be found in the "Downloads" folder after uploading.
+
+
+
+
+## File Types and Sizes
+
+
+
+
+| File Type | Supported Formats | Maximum File Size |
+|-----------|-------------------|-------------------|
+| Image | JPG, JPEG, PNG, GIF, BMP | 15 MB |
+| Video | MP4, MOV, 3GP | 15 MB |
+| Non-media | XLS, XLSX, DOC, DOCX, PDF, CSV, TXT, PPT, Contacts | 15 MB |
+
+
+
+
+| File Type | Supported Formats |
+|-----------|-------------------|
+|File Size | The maximum total file size supported for download is 50 MB. The combined size of all the files you want to download must not exceed 50 MB. |
+
+
+
+
+:::note
+- **Number of File Uploads per Session :** You can upload 5 files max per testing session.
+
+- **File Storage Duration :** All files uploaded to Lambdatest servers, whether media or non-media, will be retained for a period of 30 days. After this period, the files will be automatically purged from the servers.
+
+- **Non-Media File Considerations for iOS :** For non-media files on iOS, it is imperative to ensure that the iOS app has the `UIFileSharingEnabled` and `LSSupportsOpeningDocumentsInPlace` keys set to `true` in the `Info.plist` file.
+:::
\ No newline at end of file
diff --git a/docs/rocketchat-integration-with-lambdatest.md b/docs/rocketchat-integration-with-lambdatest.md
index 3a3c551cc..d42e4113e 100644
--- a/docs/rocketchat-integration-with-lambdatest.md
+++ b/docs/rocketchat-integration-with-lambdatest.md
@@ -1,8 +1,8 @@
---
id: rocketchat-integration-with-lambdatest
title: Rocket.Chat Integration With LambdaTest
-hide_title: true
-sidebar_label: Rocket.Chat Integration
+hide_title: false
+sidebar_label: Rocket.Chat
description: The LambdaTest Rocket.Chat Integration allows you to share or push a bug to your specified Rocket.Chat channel from the LambdaTest platform in a single click. You can share any UI observations and input with your team on Rocket.Chat anytime, by just capturing a screenshot in the middle of your test session through LambdaTest. You can also annotate the screenshot or highlight your issue or input. The fields populated by you while marking as bug through LambdaTest gets displayed as information on Rocket.Chat for that testing instance.
keywords:
- rocket.chat integration
@@ -39,107 +39,75 @@ slug: rocketchat-integration-with-lambdatest/
}}
>
-# Rocket.Chat Integration With LambdaTest
-* * *
+Rocket.Chat is a free, open-source communication platform that allows collaboration, file sharing, and real-time chat sharing. It's cross-platform and open source, allowing users to customize and extend its functionality to meet their team's or business needs.
-Rocket.Chat is a Communication platform, which is free, open-source, scalable, highly customizable, and secure. It not only allows you to communicate and collaborate with your team, but also share files and live chat in real time. It is a cross-platform, which means it can run on different types of operating systems like Linux, Windows, macOS, Android, and iOS. The biggest advantage of using Rocket.Chat is that it allows you to access its source code, since it is open source. Therefore, you can fully customize, extend, or add new functionality to it to meet your team's or business requirements.
+The LambdaTest Rocket.Chat Integration enables users to share or push bugs to their specified channel, capture screenshots, annotations, and issue information, with the fields marked as bugs displayed on Rocket.Chat for that testing instance.
-The LambdaTest Rocket.Chat Integration allows you to share or push a bug to your specified Rocket.Chat channel from the LambdaTest platform in a single click. You can share any UI observations and input with your team on Rocket.Chat anytime, by just [capturing a screenshot](https://www.lambdatest.com/full-page-screen-capture) in the middle of your test session through LambdaTest. You can also annotate the screenshot or highlight your issue or input. The fields populated by you while marking as bug through LambdaTest gets displayed as information on Rocket.Chat for that testing instance.
+> Rocket.Chat Integration with LambdaTest is available for freemium as well as premium plan.
-**Rocket.Chat Integration with LambdaTest, like all of the integrations to 3rd party applications, is available for freemium as well as premium plan.**
+## Integrate BugHerd from your LambdaTest Account
-## Integrate Rocket.Chat With Your LambdaTest Account
+**Step 1:** Login to your LambdaTest account. You should have Admin or User level access to see and install integrations.
-* * *
-
-**Step 1:** Login to your [LambdaTest account](https://accounts.lambdatest.com/login). Make sure you have an Admin or User level access to see and install 3rd party integrations.
+**Step 2:** Click on Settings -> Integrations -> Communication.
-**Step 2:** Select **'Integrations'** tab from the left navigation menu bar. This will guide you to a screen where you will find a list of 3rd party applications, that have already been integrated as well as that are available to integrate with your LambdaTest account.
+**Step 3:** Click on **Connect** button of `Rocket Chat` block.
-
+**Step 4:** Now, provide your Rocket.Chat Webhook URL to establish integration with LambdaTest and click on **install** button.
-**Step 3:** Hover to the 'Rocket.Chat' block under "Communications" category, and click on "**Add**" button that appears.
+
-
+:::info generate your webhook url
-**Step 4:** You need a webhook URL to integrate your workspace with LambdaTest. So copy the webhook URL of the proposed LambdaTest integration on Rocket.Chat.
-
+- Visit your Rocket.Chat account -> **Workspace** tab.
+- Click on the **Integrations** -> click on **New** button to generate your new webhook.
+- Toggle the enable button to true.
+- Enter the details and click on the **Save** button -> copy your Webhook URL.
->In case you don't have a webhook ready, you can find read about how to create a webhook [here](https://docs.rocket.chat/guides/administrator-guides/integrations#create-a-new-incoming-webhook).
->Make sure to mark the creating integration as "Enabled" on Rocket.Chat.
->
+
-**Step 5:** Enter your **webhook URL** copied from the above step, in the box that appears after clicking on Add in step 3, and then press "**Install**"
+:::
-
+## Lodge your First Bug
-**Step 6:** Once you press on Install, Rocket.Chat will be integrated with LambdaTest on the same webhook URL, and an alert-box will appear on the top right corner of the screen stating that the integration is successful.
-
+> Note: If you are using Rocket.Chat for the first time, then make sure to create a project for yourself. It is a pre-requisite in order to push screenshots from your LambdaTest account.
-**Step 7:** Voila! Go to Integrations tab again and you will see a Rocket.Chat under "My Integrations".
-
+**Step 1:** Start with any type of testing, for the demo purpose we are going with the **Real Time Testing**.
-Also, you can see a **green tick** in front of Rocket.Chat under Communication category, indicating that Rocket.Chat has been successfully **installed**.
+**Step 2:** Enter your Project URL and configure for browser and operating system of your choice & hit **Start**.
-
+**Step 3:** After the VM is launched and operable. You can perform testing on your web-app for finding bugs. If a bug gets revealed, then you need to click on the Bug icon from the left panel for capturing a screenshot of the same.
-## How To Log Your First Bug Through Rocket.Chat Integration?
+**Step 4:** After clicking on "Mark as Bug" button a Teamwork specific form would open up. Fill the fields as per your requirement.
+- Select which project should the ticket go under.
+- Set which Task List should the bug go under.
+- Set the priority of the issue.
+- Mention the Title as well to easily organize your bugs.
+- Write a description to help relate the cause of the issue or the task.
-* * *
+**Step 5:** Click on **Mark as Bug** button. Now go to your dashboard and check a ticket will be created for the same.
-**Step 1:** Select any of the test from the left navigation menu of LambdaTest. For this instance, we will be taking "**Real Time Test**" option.
+
-**Step 2:** Enter the URL of the web-app to be tested in the URL box. Press **'Start'** after selecting the configuration of browser and operating system as per your choice.
+**Step 6:** Go to your Rocket.Chat dashboard and you can check on your assigned channel, a notification is sent for the same.
-
+## Uninstall Rocket.Chat Integration
-**Step 3:** After the VM is launched and operable, you can perform testing on your web-app as per your choice, such as for finding bugs. Suppose if you come across any bug, then you need to click on the **Bug icon** from the left panel, for capturing a screenshot of the current VM instance.
+**Step 1:** Login to your LambdaTest account. You should have Admin or User level access to see and install integrations.
-
+**Step 2:** Click on Settings -> Integrations -> Communication.
-**Step 4:** A screenshot will be captured and appear on your screen in an **in-built image editor**. Annotate any issue or a task as per your choice in this editor, and then press **"Mark as Bug"** button.
+**Step 3:** Click on the **Remove** button.
-
-
-**Step 5:** Complete the **Bug/Task/Issue Form** that appears upon clicking the **"Mark as Bug"** button, as per your requirement:
->* Select the **Issue type** as Bug/Task/Issue. Here, we have selected it as Bug.
-* Set the **Priority** of the bug as per convenience.
-* You can assign the bug to a member of your team by populating the field '**Assignee**'.
-* Write the '**Summary**' of the bug that would be sent to the assigned member.
-* You also get to post an optional **description** to help with the bug.
-
-At the bottom of the form, please make sure to tick the check button
-stating **"Send to Rocket.Chat"**.
-
->Note: By default, it would be **checked**, if Rocket.Chat is already integrated. This button helps send the screenshot, as per the filled form, directly to your workspace, when the Bug is created. You can uncheck the button anytime if you do not wish the bug/task/issue on Rocket.Chat
-
-Now click on **"Create Bug"** button to send the bug directly to Rocket.Chat workspace.
-
-
-
-**Step 6:** After you click on **"Create Bug"**, you will be able to observe it being successfully marked through a single click effort. You will get **prompt messages** on top of your Virtual Machine indicating the progress of bug logging. After few seconds you will be notified with a prompt message "**Bug successfully marked**" indicating that the screenshot has been pushed to your Rocket.Chat workspace.
-
-
-
-Login to your workspace on Rocket.Chat and you will be able to notice the marked bug in the set channel assigned to the set person. All the data you provided through Lambdatest form would already be presented in it. LambdaTest automatically includes test environment details and related screenshots in the screenshot as attachments.
-
-
-
-A clickable link will be provided in the rocket.chat channel along with the screenshot to instantly route users on their browser for a full screen view of the bug.
-
-## How To Remove Rocket.Chat Integration?
-
-* * *
-
->You can work with one integration at a time. So if you would want to integrate to a similar 3rd party application, then you would have to **remove** your current integration. Here is how you can do that.
-
-**Step 1:** Login to your LambdaTest account.
-
-**Step 2:** Select 'Integrations' from the left navigation menu bar & go to 'Communications' category. This will guide you to a screen where you will find 3rd party applications, available to integrate with your LambdaTest account.
-
-**Step 3:** Hover to Rocket.Chat block. A **"REMOVE"** button will appear. Click on **'REMOVE'**.
-
-
+
>That was all you need to know for LambdaTest + Rocket.Chat Integration. Increase your productivity multifold with our integrations. If you still have any questions for us, please feel free to let us know. Our experts are always window.openLTChatWidget()}>**available on chat** to help you out with any roadblock regarding our product. Happy testing!
@@ -162,4 +130,3 @@ A clickable link will be provided in the rocket.chat channel along with the scre
-
diff --git a/docs/specflow-report.md b/docs/specflow-report.md
index 60493f3a3..505d8e375 100644
--- a/docs/specflow-report.md
+++ b/docs/specflow-report.md
@@ -2,7 +2,7 @@
id: specflow-report
title: SpecFlow Report on HyperExecute
hide_title: true
-sidebar_label: SpecFlow Report
+sidebar_label: SpecFlow
description: Learn how to generate SpecFlow Report on lambdatest and download the reports from the dashboard
keywords:
- specflow testing reports
diff --git a/docs/teamwork-integration.md b/docs/teamwork-integration.md
index 30b0e01e6..73ee7581c 100644
--- a/docs/teamwork-integration.md
+++ b/docs/teamwork-integration.md
@@ -62,7 +62,7 @@ LambdaTest integrates with Teamwork Projects, allowing users to file issues dire
**Step 4:** Now, provide your Teamwork API Token and Teamwork site address to establish integration with LambdaTest and click on **install** button.
:::info Fetch your Teamwork API Token
@@ -71,7 +71,7 @@ LambdaTest integrates with Teamwork Projects, allowing users to file issues dire
- You will find the "**API Token**" under the section **API & Mobile**. Copy your API token.
:::
@@ -96,7 +96,7 @@ LambdaTest integrates with Teamwork Projects, allowing users to file issues dire
**Step 5:** Click on **Mark as Bug** button. Now go to your dashboard and check a ticket will be created for the same.
## Uninstall Teamwork Integration
@@ -108,7 +108,7 @@ LambdaTest integrates with Teamwork Projects, allowing users to file issues dire
**Step 3:** Click on the **Remove** button.
> That was all you need to know for LambdaTest + Teamwork Integration. Increase your productivity with our integrations. If you still have any questions for us, please feel free to let us know. Our experts are always window.openLTChatWidget()}>**available on chat** to help you out with any roadblock regarding our product. Happy testing!
diff --git a/docs/zipboard-integration.md b/docs/zipboard-integration.md
index dd2e5cd33..50123c033 100644
--- a/docs/zipboard-integration.md
+++ b/docs/zipboard-integration.md
@@ -1,7 +1,7 @@
---
id: zipboard-integration
title: zipBoard Integration
-hide_title: true
+hide_title: false
sidebar_label: zipBoard
description: This document will help you integrate LambdaTest with zipBoard. That way, you can log bugs to your zipBoard project in a single click as you perform cross browser testing with LambdaTest.
keywords:
@@ -42,116 +42,56 @@ slug: zipboard-integration/
})
}}
>
+ZipBoard is a visual feedback and bug tracking tool designed to streamline collaboration across teams during the web development process. By integrating LambdaTest with ZipBoard, teams can enhance their testing workflows, enabling seamless communication between developers, testers, and stakeholders.
-# zipBoard Integration
+This integration allows you to capture real-time feedback and track issues directly within your test environments on LambdaTest’s cloud platform, ensuring faster bug resolution and better product quality. In this documentation, we’ll guide you through the steps to integrate ZipBoard with LambdaTest and optimize your test management workflow.
-* * *
-zipBoard offers a review platform on cloud to help you collaborate with your team over a web project. You can attach an image and mention suggestions, tag your collaborators, and do more with full transparency. Using zipBoard your colleagues can view every reported bug or suggestion regarding the project, and understand who is doing what?
+## Integrate zipBoard from your LambdaTest Account
-LambdaTest is a cross browser testing tool on a cloud which offers 3000+ real browsers and browser versions. You can perform both manual, and automated cross browser testing with an online Selenium Grid. LambdaTest integration with zipBoard will help you report bugs directly from LambdaTest to your zipBoard instance, in just a single click.
+**Step 1:** Login to your LambdaTest account. You should have Admin or User level access to see and install integrations.
-## How To Integrate zipBoard With Your LambdaTest Account?
-***
+**Step 2:** Click on Settings -> Integrations -> Bug Tracker.
-**Step 1:** Login to your LambdaTest account. You would need to have Admin or User level access to see and install integrations.
+**Step 3:** Click on **Connect** button of zipBoard block.
-**Step 2:** Select ‘Integration’ from the left navigation menu bar. This will guide you to a screen where you will find a list of 3rd party applications, available to integrate with your LambdaTest account.
+**Step 4:** Now, provide your zipBoard API Token to establish integration with LambdaTest and click on **install** button.
-**Step 3:** Under the Bug Tracker category, click on the block that says **‘zipBoard’**.
+
-
+:::info Fetch your Bugherd API Token
+- Visit your zipBoard account -> **Edit Profile** tab.
+- Click on the **+** icon to generate your API. Copy your API token to use it for authenticate it with LambdaTest integration.
-**Step 4:** You will be routed to a screen where you need to fill up your API key.
+
+:::
-
+> This API key will be used to authenticate your zipBoard account to third-party apps. Please do not share it with anyone. If you believe your API key has been misplaced, you can always generate a new API key from zipBoard by clicking on the refresh icon in your profile settings.
-## Finding API Key In zipBoard
-***
+## Lodge your First Bug
-**Step 1:** Go to your zipBoard account and edit your profile by opening the drop-down menu from top-right corner of your screen.
+**Step 1:** Create a new projects on zipBoard if you haven't already.
-
+**Step 2:** Now, while testing your webapp or application at LambdaTest, click on the **mark as bug** button if you detect any bug.
-**Step 2:** Click on the + icon to generate your first API key if you don’t have one already.
+**Step 3:** Now update the comments of the bug and other details and click on Create Task button.
-
+**Step 4:** Go to your dashboard and check a ticket will be created for the same.
-**Step 3:** A prompt message would open up asking you to create a new API key. Click on Yes.
+## Uninstall zipBoard Integration
-
+**Step 1:** Login to your LambdaTest account. You should have Admin or User level access to see and install integrations.
-> Note: This API key will be used to authenticate your zipBoard account with third-party applications. Make sure you don’t share it with anyone. In case you feel the API key has been misplaced then you can always regenerate a new API key from zipBoard by clicking on the refresh icon under your profile settings.
+**Step 2:** Click on Settings -> Integrations -> Bug Tracker.
-
+**Step 3:** Click on the Remove button.
-**Step 4:** Copy the API key and paste it on LambdaTest to integrate with zipBoard and hit Install.
-
-
-
-**Step 5:** You have successfully integrated zipBoard with LambdaTest. If you go to integrations tab you will find a green tick over the zipBoard block, indicating the integration is successful.
-
-
-
-## Logging The First Bug From LambdaTest To zipBoard
-***
-
-**Step 1:** Login to LambdaTest and perform any type of testing. In this case we will go with [Screenshot testing](https://www.lambdatest.com/support/docs/automated-screenshot-testing/) feature under the drop-down menu for Visual UI Testing.
-
-
-
-**Step 2:** Select the browsers for which you wish to [capture screenshot](https://www.lambdatest.com/full-page-screen-capture) of your website and provide the test URL before you hit the Capture button.
-
-
-
-**Step 3:** Wait a few seconds and you will find your full-page screenshots for every configuration you selected.
-
-
-
-**Step 4:** Once you have the screenshots ready, click on any screenshot and highlight the bug you wish to report. After you highlight click on the mark as bug button.
-
-
-
-**Step 5:** A form specific to zipBoard would open up. You will need to fill the details in this form before you create the issue. Doing so will directly mark a bug into your zipBoard instance.
-
-You can attach the below details with the screenshot:
-
----
-
-* Project
-* Assignee
-* Task Status
-* Task Priority
-* Task Type
-* Task Name
-* Description
----
-
-
-
-Every detail you provide at LambdaTest will automatically be reflected in the zipBoard task.
-
-> Note: After you click on "Create Bug", you will be able to observe it being successfully marked through a single click effort. You will get prompt messages on top of your Virtual Machine indicating the progress of bug logging. After few seconds you will be notified with a prompt message "Bug successfully marked" indicating that the screenshot has been pushed to your zipBoard project.
-
-**Step 6:** If you visit your respective Project on zipBoard, you will find a new task created under it.
-
-
-
-**Step 7:** Open tasks of the Project and you will find the bug marked from LambdaTest with all the information you specified while your performed cross browser testing.
-
-
-
-**Step 8:** Click on the task and you can also find the URL to the screenshot you highlighted at LambdaTest along with the test environment details.
-
-
-
-## Remove LambdaTest Integration With zipBoard
-***
-
-> You can work with one integration at a time. So if you would want to integrate to a similar 3rd party application, then you would have to remove your current integration. Here is how you can do that.
-
-Go to LambdaTest application and hit the `Integrations` tab. Under the block for **zipBoard** you will find the option to **REMOVE** the integration.
-
-
+
> That was all you need to know for LambdaTest + zipBoard Integration. Increase your productivity with our integrations. If you still have any questions for us, please feel free to let us know. Our experts are always available on window.openLTChatWidget()}>**chat** to help you out with any roadblock regarding our product. Happy testing! 🙂
diff --git a/sidebars.js b/sidebars.js
index 69d259d27..3fd54e1e1 100644
--- a/sidebars.js
+++ b/sidebars.js
@@ -855,6 +855,7 @@ module.exports = {
collapsed: true,
label: "October, 2024",
items: [
+ 'hyperexecute-release-notes-2-5-6',
'hyperexecute-release-notes-2-5-5',
'hyperexecute-release-notes-2-5-4',
]