Classic Infrastructures Command & Control Awesomeness
-
Pabawi is a web frontend for infrastructure management, inventory and remote execution. It currently provides integrations with Puppet, Bolt, PuppetDB, and Hiera. It supports both Puppet Enterprise and Open Source Puppet / OpenVox. It provides a unified web interface for managing infrastructure, executing commands, viewing system information, and tracking operations across your entire environment.
+
Pabawi is a web frontend for infrastructure management, inventory and remote execution. It currently provides integrations with Puppet, Bolt, Ansible, PuppetDB, and Hiera. It supports both Puppet Enterprise and Open Source Puppet / OpenVox. It provides a unified web interface for managing infrastructure, executing commands, viewing system information, and tracking operations across your entire environment.
@@ -22,6 +22,7 @@
- [Screenshots](#screenshots)
- [Prerequisites](#prerequisites)
- [Bolt Integration](#bolt-integration)
+ - [Ansible Integration](#ansible-integration)
- [PuppetDB Integration](#puppetdb-integration)
- [PuppetServer Integration](#puppetserver-integration)
- [Hiera Integration](#hiera-integration)
@@ -126,6 +127,13 @@ To have an idea of Pabawi awesomeness, here some random screenshots
- Any required SSH keys used in Bolt configuration
- For details: [Bolt Setup](docs/integrations/bolt.md)
+### Ansible Integration
+
+- Ansible CLI installed (`ansible` and `ansible-playbook`)
+- A valid local Ansible inventory file
+- SSH (or configured Ansible transport) access to target nodes
+- For details: [Ansible Setup](docs/integrations/ansible.md)
+
### PuppetDB Integration
- Network access to PuppetDB port 8081
@@ -201,7 +209,7 @@ For comprehensive Docker deployment instructions including all integrations, see
Pabawi uses a `.env` file for configuration. Use `backend/.env.example` as reference.
-For detailed configuration options including Bolt, PuppetDB, PuppetServer, and Hiera integration settings, please refer to the [Configuration Guide](docs/configuration.md).
+For detailed configuration options including Bolt, Ansible, PuppetDB, PuppetServer, and Hiera integration settings, please refer to the [Configuration Guide](docs/configuration.md).
For API details, see the [Integrations API Documentation](docs/integrations-api.md).
@@ -219,7 +227,7 @@ For details of the repository files and configurations check the [Repository Str
### Planned Features
-- **Additional Integrations**: Ansible, Tiny Puppet
+- **Additional Integrations**: Tiny Puppet
- **Additional Integrations (to evaluate)**: Terraform, AWS CLI, Azure CLI, Kubernetes, Choria, Icinga
- **Scheduled Executions**: Cron-like scheduling for recurring tasks
- **Custom Dashboards**: User-configurable dashboard widgets
@@ -229,6 +237,8 @@ For details of the repository files and configurations check the [Repository Str
### Version History
+- **v0.7.0**: Ansible Integration. Used classed aware hiera lookups
+- **v0.6.0**: Code consolidation and fixing
- **v0.5.0**: Report filtering, puppet run history visualization, enhanced expert mode with frontend logging
- **v0.4.0**: Hiera integration, puppetserver CA management removal, enhanced plugin architecture
- **v0.3.0**: Puppetserver integration, interface enhancements
@@ -262,6 +272,7 @@ This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENS
#### Integration Setup
- [Bolt Setup](docs/integrations/bolt.md) - Bolt configuration guide
+- [Ansible Setup](docs/integrations/ansible.md) - Ansible configuration guide
- [Hiera Setup](docs/integrations/hiera.md) - Hiera configuration guide
- [PuppetDB Integration Setup](docs/integrations/puppetdb.md) - PuppetDB configuration guide
- [Puppetserver Setup](docs/integrations/puppetserver.md) - Puppetserver configuration guide
diff --git a/backend/.env.example b/backend/.env.example
index 23f0bf8a..8ac2a218 100644
--- a/backend/.env.example
+++ b/backend/.env.example
@@ -11,6 +11,12 @@ BOLT_COMMAND_WHITELIST_ALLOW_ALL=false
BOLT_COMMAND_WHITELIST=["ls","pwd","whoami"]
BOLT_EXECUTION_TIMEOUT=300000
+# Ansible integration configuration
+# ANSIBLE_ENABLED=true
+# ANSIBLE_PROJECT_PATH=/path/to/your/ansible/project
+# ANSIBLE_INVENTORY_PATH=inventory/hosts
+# ANSIBLE_EXECUTION_TIMEOUT=300000
+
# Logging configuration (v0.5.0+)
# LOG_LEVEL controls backend logging verbosity
# Options: error, warn, info, debug
diff --git a/backend/package.json b/backend/package.json
index 091211b8..eacb69e2 100644
--- a/backend/package.json
+++ b/backend/package.json
@@ -1,6 +1,6 @@
{
"name": "backend",
- "version": "0.6.0",
+ "version": "0.7.0",
"description": "Backend API server for Pabawi",
"main": "dist/server.js",
"scripts": {
diff --git a/backend/src/config/ConfigService.ts b/backend/src/config/ConfigService.ts
index 84175706..f404b97c 100644
--- a/backend/src/config/ConfigService.ts
+++ b/backend/src/config/ConfigService.ts
@@ -27,6 +27,12 @@ export class ConfigService {
* Parse integrations configuration from environment variables
*/
private parseIntegrationsConfig(): {
+ ansible?: {
+ enabled: boolean;
+ projectPath: string;
+ inventoryPath?: string;
+ timeout?: number;
+ };
puppetdb?: {
enabled: boolean;
serverUrl: string;
@@ -106,6 +112,18 @@ export class ConfigService {
} {
const integrations: ReturnType = {};
+ // Parse Ansible configuration
+ if (process.env.ANSIBLE_ENABLED === "true") {
+ integrations.ansible = {
+ enabled: true,
+ projectPath: process.env.ANSIBLE_PROJECT_PATH ?? process.cwd(),
+ inventoryPath: process.env.ANSIBLE_INVENTORY_PATH,
+ timeout: process.env.ANSIBLE_EXECUTION_TIMEOUT
+ ? parseInt(process.env.ANSIBLE_EXECUTION_TIMEOUT, 10)
+ : undefined,
+ };
+ }
+
// Parse PuppetDB configuration
if (process.env.PUPPETDB_ENABLED === "true") {
const serverUrl = process.env.PUPPETDB_SERVER_URL;
@@ -608,6 +626,19 @@ export class ConfigService {
return null;
}
+ /**
+ * Get Ansible configuration if enabled
+ */
+ public getAnsibleConfig():
+ | (typeof this.config.integrations.ansible & { enabled: true })
+ | null {
+ const ansible = this.config.integrations.ansible;
+ if (ansible?.enabled) {
+ return ansible as typeof ansible & { enabled: true };
+ }
+ return null;
+ }
+
/**
* Get Puppetserver configuration if enabled
*/
diff --git a/backend/src/config/schema.ts b/backend/src/config/schema.ts
index 3c7503f5..f15ca1f9 100644
--- a/backend/src/config/schema.ts
+++ b/backend/src/config/schema.ts
@@ -176,6 +176,18 @@ export const PuppetserverConfigSchema = z.object({
export type PuppetserverConfig = z.infer;
+/**
+ * Ansible integration configuration schema
+ */
+export const AnsibleConfigSchema = z.object({
+ enabled: z.boolean().default(false),
+ projectPath: z.string().default(process.cwd()),
+ inventoryPath: z.string().default("inventory/hosts"),
+ timeout: z.number().int().positive().default(300000),
+});
+
+export type AnsibleConfig = z.infer;
+
/**
* Hiera fact source configuration schema
*/
@@ -261,6 +273,7 @@ export type HieraConfig = z.infer;
* Integrations configuration schema
*/
export const IntegrationsConfigSchema = z.object({
+ ansible: AnsibleConfigSchema.optional(),
puppetdb: PuppetDBConfigSchema.optional(),
puppetserver: PuppetserverConfigSchema.optional(),
hiera: HieraConfigSchema.optional(),
diff --git a/backend/src/database/ExecutionRepository.ts b/backend/src/database/ExecutionRepository.ts
index c63bb224..aa33cc4c 100644
--- a/backend/src/database/ExecutionRepository.ts
+++ b/backend/src/database/ExecutionRepository.ts
@@ -21,6 +21,7 @@ interface DbRow {
re_execution_count: number | null;
stdout: string | null;
stderr: string | null;
+ execution_tool: string | null;
total?: number;
running?: number;
success?: number;
@@ -33,6 +34,8 @@ interface DbRow {
*/
export type ExecutionType = "command" | "task" | "facts" | "puppet" | "package";
+export type ExecutionTool = "bolt" | "ansible";
+
/**
* Execution status
*/
@@ -74,6 +77,7 @@ export interface ExecutionRecord {
reExecutionCount?: number;
stdout?: string;
stderr?: string;
+ executionTool?: ExecutionTool;
}
/**
@@ -130,8 +134,8 @@ export class ExecutionRepository {
INSERT INTO executions (
id, type, target_nodes, action, parameters, status,
started_at, completed_at, results, error, command, expert_mode,
- original_execution_id, re_execution_count, stdout, stderr
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ original_execution_id, re_execution_count, stdout, stderr, execution_tool
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`;
const params = [
@@ -151,6 +155,7 @@ export class ExecutionRepository {
record.reExecutionCount ?? 0,
record.stdout ?? null,
record.stderr ?? null,
+ record.executionTool ?? "bolt",
];
try {
@@ -181,6 +186,7 @@ export class ExecutionRepository {
"reExecutionCount",
"stdout",
"stderr",
+ "executionTool",
];
const updateFields: string[] = [];
const params: unknown[] = [];
@@ -474,6 +480,10 @@ export class ExecutionRepository {
reExecutionCount: row.re_execution_count ?? 0,
stdout: row.stdout ?? undefined,
stderr: row.stderr ?? undefined,
+ executionTool:
+ row.execution_tool === "ansible"
+ ? "ansible"
+ : "bolt",
};
}
diff --git a/backend/src/database/migrations.sql b/backend/src/database/migrations.sql
index e78335c7..52594db8 100644
--- a/backend/src/database/migrations.sql
+++ b/backend/src/database/migrations.sql
@@ -27,3 +27,7 @@ ALTER TABLE executions ADD COLUMN stdout TEXT;
-- Add stderr column if it doesn't exist
ALTER TABLE executions ADD COLUMN stderr TEXT;
+
+-- Migration: Add execution_tool column to indicate which execution engine was used
+-- Values: bolt, ansible
+ALTER TABLE executions ADD COLUMN execution_tool TEXT DEFAULT 'bolt';
diff --git a/backend/src/database/schema.sql b/backend/src/database/schema.sql
index aecdd75a..253846cf 100644
--- a/backend/src/database/schema.sql
+++ b/backend/src/database/schema.sql
@@ -15,7 +15,8 @@ CREATE TABLE IF NOT EXISTS executions (
original_execution_id TEXT, -- Reference to original execution if this is a re-execution
re_execution_count INTEGER DEFAULT 0, -- Number of times this execution has been re-executed
stdout TEXT, -- Complete stdout output (stored when expert mode enabled)
- stderr TEXT -- Complete stderr output (stored when expert mode enabled)
+ stderr TEXT, -- Complete stderr output (stored when expert mode enabled)
+ execution_tool TEXT DEFAULT 'bolt' CHECK(execution_tool IN ('bolt', 'ansible'))
);
-- Index Strategy:
diff --git a/backend/src/integrations/IntegrationManager.ts b/backend/src/integrations/IntegrationManager.ts
index 94ca0ed2..089448ac 100644
--- a/backend/src/integrations/IntegrationManager.ts
+++ b/backend/src/integrations/IntegrationManager.ts
@@ -15,7 +15,7 @@ import type {
PluginRegistration,
Action,
} from "./types";
-import type { Node, Facts, ExecutionResult } from "../bolt/types";
+import type { Node, Facts, ExecutionResult } from "./bolt/types";
import { NodeLinkingService, type LinkedNode } from "./NodeLinkingService";
import { LoggerService } from "../services/LoggerService";
diff --git a/backend/src/integrations/NodeLinkingService.ts b/backend/src/integrations/NodeLinkingService.ts
index 37579375..6f95cbe9 100644
--- a/backend/src/integrations/NodeLinkingService.ts
+++ b/backend/src/integrations/NodeLinkingService.ts
@@ -5,7 +5,7 @@
* Implements the node linking strategy described in the design document.
*/
-import type { Node } from "../bolt/types";
+import type { Node } from "./bolt/types";
import type { IntegrationManager } from "./IntegrationManager";
import { LoggerService } from "../services/LoggerService";
diff --git a/backend/src/integrations/ansible/AnsiblePlugin.ts b/backend/src/integrations/ansible/AnsiblePlugin.ts
new file mode 100644
index 00000000..e9571cca
--- /dev/null
+++ b/backend/src/integrations/ansible/AnsiblePlugin.ts
@@ -0,0 +1,435 @@
+import { spawn } from "child_process";
+import { existsSync } from "fs";
+import { resolve } from "path";
+import { BasePlugin } from "../BasePlugin";
+import type {
+ Action,
+ Capability,
+ ExecutionToolPlugin,
+ InformationSourcePlugin,
+ HealthStatus,
+} from "../types";
+import type { ExecutionResult, Node, Facts } from "../bolt/types";
+import type { LoggerService } from "../../services/LoggerService";
+import type { PerformanceMonitorService } from "../../services/PerformanceMonitorService";
+import type { AnsibleService } from "./AnsibleService";
+
+export class AnsiblePlugin extends BasePlugin implements ExecutionToolPlugin, InformationSourcePlugin {
+ readonly type = "both" as const;
+ private readonly ansibleService: AnsibleService;
+
+ constructor(
+ ansibleService: AnsibleService,
+ logger?: LoggerService,
+ performanceMonitor?: PerformanceMonitorService,
+ ) {
+ super("ansible", "both", logger, performanceMonitor);
+ this.ansibleService = ansibleService;
+ }
+
+ protected async performInitialization(): Promise {
+ await this.performHealthCheck();
+ }
+
+ protected async performHealthCheck(): Promise> {
+ const [ansibleOk, ansiblePlaybookOk, ansibleInventoryOk] = await Promise.all([
+ this.checkBinary("ansible"),
+ this.checkBinary("ansible-playbook"),
+ this.checkBinary("ansible-inventory"),
+ ]);
+
+ const inventoryPath = resolve(this.ansibleService.getAnsibleProjectPath(), this.ansibleService.getInventoryPath());
+ const inventoryExists = existsSync(inventoryPath);
+
+ if (!ansibleOk || !ansiblePlaybookOk || !ansibleInventoryOk) {
+ return {
+ healthy: false,
+ message: "Ansible CLI is not available",
+ details: {
+ ansibleAvailable: ansibleOk,
+ ansiblePlaybookAvailable: ansiblePlaybookOk,
+ ansibleInventoryAvailable: ansibleInventoryOk,
+ },
+ };
+ }
+
+ if (!inventoryExists) {
+ return {
+ healthy: false,
+ degraded: true,
+ message: "Ansible inventory file was not found",
+ details: {
+ inventoryPath,
+ },
+ };
+ }
+
+ return {
+ healthy: true,
+ message: "Ansible is configured and available",
+ details: {
+ inventoryPath,
+ },
+ };
+ }
+
+ async executeAction(action: Action): Promise {
+ if (!this.initialized) {
+ throw new Error("Ansible plugin not initialized");
+ }
+
+ const target = Array.isArray(action.target)
+ ? action.target[0]
+ : action.target;
+
+ if (!target) {
+ throw new Error("No target specified for action");
+ }
+
+ const streamingCallback = action.metadata?.streamingCallback as
+ | {
+ onCommand?: (cmd: string) => void;
+ onStdout?: (chunk: string) => void;
+ onStderr?: (chunk: string) => void;
+ }
+ | undefined;
+
+ switch (action.type) {
+ case "command":
+ return await this.ansibleService.runCommand(
+ target,
+ action.action,
+ streamingCallback,
+ );
+ case "task": {
+ if (action.action !== "package") {
+ throw new Error(
+ `Unsupported ansible task action: ${action.action}. Only 'package' is currently supported.`,
+ );
+ }
+
+ const packageNameParam = action.parameters?.packageName;
+ const packageName = typeof packageNameParam === "string" ? packageNameParam.trim() : "";
+ if (!packageName) {
+ throw new Error("packageName is required for ansible package action");
+ }
+
+ const ensure =
+ action.parameters?.ensure === "absent" ||
+ action.parameters?.ensure === "latest"
+ ? (action.parameters.ensure)
+ : "present";
+
+ const version =
+ typeof action.parameters?.version === "string"
+ ? action.parameters.version
+ : undefined;
+
+ const settings =
+ action.parameters?.settings &&
+ typeof action.parameters.settings === "object"
+ ? (action.parameters.settings as Record)
+ : undefined;
+
+ return await this.ansibleService.installPackage(
+ target,
+ packageName,
+ ensure,
+ version,
+ settings,
+ streamingCallback,
+ );
+ }
+ case "plan": {
+ const extraVars =
+ action.parameters?.extraVars &&
+ typeof action.parameters.extraVars === "object"
+ ? (action.parameters.extraVars as Record)
+ : undefined;
+
+ return await this.ansibleService.runPlaybook(
+ target,
+ action.action,
+ extraVars,
+ streamingCallback,
+ );
+ }
+ default:
+ throw new Error(`Unsupported action type for ansible: ${action.type}`);
+ }
+ }
+
+ listCapabilities(): Capability[] {
+ return [
+ {
+ name: "command",
+ description: "Execute shell commands on target nodes via ansible shell module",
+ },
+ {
+ name: "package",
+ description: "Install or remove packages on target nodes via ansible package module",
+ },
+ {
+ name: "playbook",
+ description: "Execute ansible playbooks against target nodes",
+ },
+ ];
+ }
+
+ /**
+ * Get inventory from Ansible
+ * Implements InformationSourcePlugin interface
+ */
+ async getInventory(): Promise {
+ if (!this.initialized) {
+ throw new Error("Ansible plugin not initialized");
+ }
+
+ return await this.ansibleService.getInventory();
+ }
+
+ /**
+ * Get facts for a specific node
+ * Implements InformationSourcePlugin interface
+ *
+ * Note: Ansible facts are gathered dynamically via setup module
+ */
+ async getNodeFacts(nodeId: string): Promise {
+ if (!this.initialized) {
+ throw new Error("Ansible plugin not initialized");
+ }
+
+ // Use ansible setup module to gather facts
+ // This is a simplified implementation - could be enhanced with caching
+ const args = [
+ nodeId,
+ "-i",
+ this.ansibleService.getInventoryPath(),
+ "-m",
+ "setup",
+ ];
+
+ try {
+ const result = await new Promise<{ stdout: string; success: boolean }>((resolve, reject) => {
+ const child = spawn("ansible", args, {
+ cwd: this.ansibleService.getAnsibleProjectPath(),
+ env: process.env,
+ });
+
+ let stdout = "";
+ let stderr = "";
+
+ child.stdout.on("data", (data: Buffer) => {
+ stdout += data.toString();
+ });
+
+ child.stderr.on("data", (data: Buffer) => {
+ stderr += data.toString();
+ });
+
+ child.on("close", (exitCode: number | null) => {
+ if (exitCode === 0) {
+ resolve({ stdout, success: true });
+ } else {
+ reject(new Error(`Failed to gather facts: ${stderr || stdout}`));
+ }
+ });
+
+ child.on("error", (error: Error) => {
+ reject(error);
+ });
+ });
+
+ // Parse ansible facts from output
+ // Ansible setup module returns JSON in stdout
+ const factsMatch = /"ansible_facts":\s*({[\s\S]*?})\s*}/.exec(result.stdout);
+ if (factsMatch) {
+ const ansibleFacts = JSON.parse(factsMatch[1]) as Record;
+
+ // Convert Ansible facts to Bolt-compatible format
+ const distribution = typeof ansibleFacts.ansible_distribution === "string" ? ansibleFacts.ansible_distribution : undefined;
+ const osFamily = typeof ansibleFacts.ansible_os_family === "string" ? ansibleFacts.ansible_os_family : undefined;
+ const osName = distribution ?? osFamily ?? "Unknown";
+ const osFamilyValue = osFamily ?? "Unknown";
+
+ const distVersion = typeof ansibleFacts.ansible_distribution_version === "string" ? ansibleFacts.ansible_distribution_version : undefined;
+ const distMajorVersion = typeof ansibleFacts.ansible_distribution_major_version === "string" ? ansibleFacts.ansible_distribution_major_version : undefined;
+
+ const processorCount = typeof ansibleFacts.ansible_processor_count === "number" ? ansibleFacts.ansible_processor_count : undefined;
+ const processorVcpus = typeof ansibleFacts.ansible_processor_vcpus === "number" ? ansibleFacts.ansible_processor_vcpus : undefined;
+ const cpuCount = processorCount ?? processorVcpus ?? 0;
+ const processorModels = Array.isArray(ansibleFacts.ansible_processor)
+ ? ansibleFacts.ansible_processor.filter((p): p is string => typeof p === "string")
+ : [];
+
+ const hostname = typeof ansibleFacts.ansible_hostname === "string" ? ansibleFacts.ansible_hostname : undefined;
+ const interfaces = typeof ansibleFacts.ansible_interfaces === "object" && ansibleFacts.ansible_interfaces !== null
+ ? ansibleFacts.ansible_interfaces as Record
+ : {};
+ const fqdn = typeof ansibleFacts.ansible_fqdn === "string" ? ansibleFacts.ansible_fqdn : undefined;
+ const defaultIpv4 = typeof ansibleFacts.ansible_default_ipv4 === "object" && ansibleFacts.ansible_default_ipv4 !== null
+ ? ansibleFacts.ansible_default_ipv4 as Record
+ : undefined;
+ const ipAddress = defaultIpv4 && typeof defaultIpv4.address === "string" ? defaultIpv4.address : undefined;
+
+ const memTotalMb = typeof ansibleFacts.ansible_memtotal_mb === "number" ? ansibleFacts.ansible_memtotal_mb : undefined;
+ const memFreeMb = typeof ansibleFacts.ansible_memfree_mb === "number" ? ansibleFacts.ansible_memfree_mb : undefined;
+
+ const uptimeSeconds = typeof ansibleFacts.ansible_uptime_seconds === "number" ? ansibleFacts.ansible_uptime_seconds : undefined;
+
+ return {
+ nodeId,
+ gatheredAt: new Date().toISOString(),
+ source: "ansible",
+ facts: {
+ os: {
+ name: osName,
+ family: osFamilyValue,
+ release: {
+ full: distVersion ?? "Unknown",
+ major: distMajorVersion ?? "Unknown",
+ },
+ },
+ processors: {
+ count: cpuCount,
+ models: processorModels,
+ },
+ networking: {
+ hostname: hostname ?? nodeId,
+ interfaces: {
+ ...interfaces,
+ fqdn,
+ ip: ipAddress,
+ },
+ },
+ memory: {
+ system: {
+ total: memTotalMb !== undefined ? `${String(memTotalMb)} MB` : "Unknown",
+ available: memFreeMb !== undefined ? `${String(memFreeMb)} MB` : "Unknown",
+ },
+ },
+ system_uptime: {
+ seconds: uptimeSeconds,
+ },
+ // Store raw ansible facts for reference
+ ansible_facts: ansibleFacts,
+ },
+ };
+ }
+
+ // Return minimal facts if parsing fails
+ return {
+ nodeId,
+ gatheredAt: new Date().toISOString(),
+ source: "ansible",
+ facts: {
+ os: {
+ name: "Unknown",
+ family: "Unknown",
+ release: {
+ full: "Unknown",
+ major: "Unknown",
+ },
+ },
+ processors: {
+ count: 0,
+ models: [],
+ },
+ networking: {
+ hostname: nodeId,
+ interfaces: {},
+ },
+ memory: {
+ system: {
+ total: "Unknown",
+ available: "Unknown",
+ },
+ },
+ },
+ };
+ } catch (error) {
+ this.logger.warn(`Failed to gather facts for node ${nodeId}`, {
+ component: "AnsiblePlugin",
+ operation: "getNodeFacts",
+ metadata: { error: error instanceof Error ? error.message : String(error) },
+ });
+
+ // Return minimal facts rather than failing
+ return {
+ nodeId,
+ gatheredAt: new Date().toISOString(),
+ source: "ansible",
+ facts: {
+ os: {
+ name: "Unknown",
+ family: "Unknown",
+ release: {
+ full: "Unknown",
+ major: "Unknown",
+ },
+ },
+ processors: {
+ count: 0,
+ models: [],
+ },
+ networking: {
+ hostname: nodeId,
+ interfaces: {},
+ },
+ memory: {
+ system: {
+ total: "Unknown",
+ available: "Unknown",
+ },
+ },
+ },
+ };
+ }
+ }
+
+ /**
+ * Get arbitrary data for a node
+ * Implements InformationSourcePlugin interface
+ *
+ * Note: Ansible doesn't have a centralized data store like PuppetDB
+ * This is a placeholder implementation
+ */
+ getNodeData(_nodeId: string, dataType: string): Promise {
+ if (!this.initialized) {
+ throw new Error("Ansible plugin not initialized");
+ }
+
+ // Ansible doesn't have built-in support for arbitrary data retrieval
+ // This could be extended to query custom fact files or external sources
+ throw new Error(`Ansible does not support data type: ${dataType}`);
+ }
+
+ private async checkBinary(binary: "ansible" | "ansible-playbook" | "ansible-inventory"): Promise {
+ return await new Promise((resolve) => {
+ const child = spawn(binary, ["--version"], { stdio: "pipe" });
+ let resolved = false;
+
+ child.on("close", (code) => {
+ if (!resolved) {
+ resolved = true;
+ resolve(code === 0);
+ }
+ });
+
+ child.on("error", () => {
+ if (!resolved) {
+ resolved = true;
+ resolve(false);
+ }
+ });
+
+ setTimeout(() => {
+ if (!resolved) {
+ resolved = true;
+ child.kill();
+ resolve(false);
+ }
+ }, 5000);
+ });
+ }
+}
diff --git a/backend/src/integrations/ansible/AnsibleService.ts b/backend/src/integrations/ansible/AnsibleService.ts
new file mode 100644
index 00000000..c0dfe7b8
--- /dev/null
+++ b/backend/src/integrations/ansible/AnsibleService.ts
@@ -0,0 +1,435 @@
+import { randomUUID } from "crypto";
+import { spawn, type ChildProcess } from "child_process";
+import type { ExecutionResult, Node } from "../bolt/types";
+
+export interface StreamingCallback {
+ onStdout?: (chunk: string) => void;
+ onStderr?: (chunk: string) => void;
+ onCommand?: (command: string) => void;
+}
+
+interface CommandExecutionResult {
+ success: boolean;
+ stdout: string;
+ stderr: string;
+ exitCode: number | null;
+ command: string;
+}
+
+export class AnsibleService {
+ private readonly ansibleProjectPath: string;
+ private readonly inventoryPath: string;
+ private readonly defaultTimeout: number;
+
+ constructor(
+ ansibleProjectPath: string,
+ inventoryPath: string,
+ defaultTimeout = 300000,
+ ) {
+ this.ansibleProjectPath = ansibleProjectPath;
+ this.inventoryPath = inventoryPath;
+ this.defaultTimeout = defaultTimeout;
+ }
+
+ public getAnsibleProjectPath(): string {
+ return this.ansibleProjectPath;
+ }
+
+ public getInventoryPath(): string {
+ return this.inventoryPath;
+ }
+
+ public async runCommand(
+ nodeId: string,
+ command: string,
+ streamingCallback?: StreamingCallback,
+ ): Promise {
+ const startedAt = new Date().toISOString();
+ const startMs = Date.now();
+
+ const args = [
+ nodeId,
+ "-i",
+ this.inventoryPath,
+ "-m",
+ "shell",
+ "-a",
+ command,
+ ];
+
+ const exec = await this.executeCommand("ansible", args, streamingCallback);
+ const completedAt = new Date().toISOString();
+ const status = exec.success ? "success" : "failed";
+ const duration = Math.max(Date.now() - startMs, 0);
+ const errorMessage = !exec.success
+ ? exec.stderr || exec.stdout || "Ansible command execution failed"
+ : undefined;
+
+ return {
+ id: randomUUID(),
+ type: "command",
+ targetNodes: [nodeId],
+ action: command,
+ status,
+ startedAt,
+ completedAt,
+ results: [
+ {
+ nodeId,
+ status,
+ output: {
+ stdout: exec.stdout,
+ stderr: exec.stderr,
+ exitCode: exec.exitCode ?? undefined,
+ },
+ error: errorMessage,
+ duration,
+ },
+ ],
+ error: errorMessage,
+ command: exec.command,
+ stdout: exec.stdout,
+ stderr: exec.stderr,
+ };
+ }
+
+ public async installPackage(
+ nodeId: string,
+ packageName: string,
+ ensure: "present" | "absent" | "latest",
+ version?: string,
+ settings?: Record,
+ streamingCallback?: StreamingCallback,
+ ): Promise {
+ const startedAt = new Date().toISOString();
+ const startMs = Date.now();
+
+ const moduleArgs: Record = {
+ name: version ? `${packageName}-${version}` : packageName,
+ state: ensure,
+ ...(settings ?? {}),
+ };
+
+ const args = [
+ nodeId,
+ "-i",
+ this.inventoryPath,
+ "-m",
+ "package",
+ "-a",
+ this.toModuleArgString(moduleArgs),
+ ];
+
+ const exec = await this.executeCommand("ansible", args, streamingCallback);
+ const completedAt = new Date().toISOString();
+ const status = exec.success ? "success" : "failed";
+ const duration = Math.max(Date.now() - startMs, 0);
+ const errorMessage = !exec.success
+ ? exec.stderr || exec.stdout || "Ansible package installation failed"
+ : undefined;
+
+ return {
+ id: randomUUID(),
+ type: "task",
+ targetNodes: [nodeId],
+ action: "ansible.builtin.package",
+ parameters: {
+ packageName,
+ ensure,
+ version,
+ settings,
+ },
+ status,
+ startedAt,
+ completedAt,
+ results: [
+ {
+ nodeId,
+ status,
+ output: {
+ stdout: exec.stdout,
+ stderr: exec.stderr,
+ exitCode: exec.exitCode ?? undefined,
+ },
+ error: errorMessage,
+ duration,
+ },
+ ],
+ error: errorMessage,
+ command: exec.command,
+ stdout: exec.stdout,
+ stderr: exec.stderr,
+ };
+ }
+
+ public async runPlaybook(
+ nodeId: string,
+ playbookPath: string,
+ extraVars?: Record,
+ streamingCallback?: StreamingCallback,
+ ): Promise {
+ const startedAt = new Date().toISOString();
+ const startMs = Date.now();
+
+ const args = [
+ "-i",
+ this.inventoryPath,
+ playbookPath,
+ "--limit",
+ nodeId,
+ ];
+
+ if (extraVars && Object.keys(extraVars).length > 0) {
+ args.push("--extra-vars", JSON.stringify(extraVars));
+ }
+
+ const exec = await this.executeCommand(
+ "ansible-playbook",
+ args,
+ streamingCallback,
+ );
+
+ const completedAt = new Date().toISOString();
+ const status = exec.success ? "success" : "failed";
+ const duration = Math.max(Date.now() - startMs, 0);
+ const errorMessage = !exec.success
+ ? exec.stderr || exec.stdout || "Ansible playbook execution failed"
+ : undefined;
+
+ return {
+ id: randomUUID(),
+ type: "task",
+ targetNodes: [nodeId],
+ action: playbookPath,
+ parameters: {
+ playbook: true,
+ extraVars,
+ },
+ status,
+ startedAt,
+ completedAt,
+ results: [
+ {
+ nodeId,
+ status,
+ output: {
+ stdout: exec.stdout,
+ stderr: exec.stderr,
+ exitCode: exec.exitCode ?? undefined,
+ },
+ error: errorMessage,
+ duration,
+ },
+ ],
+ error: errorMessage,
+ command: exec.command,
+ stdout: exec.stdout,
+ stderr: exec.stderr,
+ };
+ }
+
+ private async executeCommand(
+ binary: "ansible" | "ansible-playbook" | "ansible-inventory",
+ args: string[],
+ streamingCallback?: StreamingCallback,
+ ): Promise {
+ if (streamingCallback?.onCommand) {
+ streamingCallback.onCommand(this.buildCommandString(binary, args));
+ }
+
+ const timeout = this.defaultTimeout;
+
+ return new Promise((resolve, reject) => {
+ let stdout = "";
+ let stderr = "";
+ let timedOut = false;
+ let childProcess: ChildProcess | null = null;
+
+ const timeoutId = setTimeout(() => {
+ timedOut = true;
+ if (childProcess) {
+ childProcess.kill("SIGTERM");
+ setTimeout(() => {
+ if (childProcess && !childProcess.killed) {
+ childProcess.kill("SIGKILL");
+ }
+ }, 5000);
+ }
+ }, timeout);
+
+ try {
+ childProcess = spawn(binary, args, {
+ cwd: this.ansibleProjectPath,
+ env: process.env,
+ shell: false,
+ });
+
+ if (childProcess.stdout) {
+ childProcess.stdout.on("data", (data: Buffer) => {
+ const chunk = data.toString();
+ stdout += chunk;
+ if (streamingCallback?.onStdout) {
+ streamingCallback.onStdout(chunk);
+ }
+ });
+ }
+
+ if (childProcess.stderr) {
+ childProcess.stderr.on("data", (data: Buffer) => {
+ const chunk = data.toString();
+ stderr += chunk;
+ if (streamingCallback?.onStderr) {
+ streamingCallback.onStderr(chunk);
+ }
+ });
+ }
+
+ childProcess.on("close", (exitCode: number | null) => {
+ clearTimeout(timeoutId);
+
+ if (timedOut) {
+ reject(
+ new Error(
+ `${binary} execution exceeded timeout of ${String(timeout)}ms`,
+ ),
+ );
+ return;
+ }
+
+ resolve({
+ success: exitCode === 0,
+ stdout: stdout.trim(),
+ stderr: stderr.trim(),
+ exitCode,
+ command: this.buildCommandString(binary, args),
+ });
+ });
+
+ childProcess.on("error", (error: Error) => {
+ clearTimeout(timeoutId);
+ reject(
+ new Error(`Failed to execute ${binary} command: ${error.message}`),
+ );
+ });
+ } catch (error) {
+ clearTimeout(timeoutId);
+ reject(error instanceof Error ? error : new Error(String(error)));
+ }
+ });
+ }
+
+ /**
+ * Converts a key/value object to Ansible module argument string format.
+ * e.g. { name: "curl", state: "present" } -> 'name=curl state=present'
+ * Values containing spaces are quoted; internal double quotes are escaped.
+ */
+ private toModuleArgString(args: Record): string {
+ return Object.entries(args)
+ .map(([key, value]) => {
+ const strValue = String(value).replace(/\\/g, "\\\\").replace(/"/g, '\\"');
+ return strValue.includes(" ") ? `${key}="${strValue}"` : `${key}=${strValue}`;
+ })
+ .join(" ");
+ }
+
+ private buildCommandString(binary: string, args: string[]): string {
+ const escapedArgs = args.map((arg) => {
+ if (arg.includes(" ") || arg.includes('"') || arg.includes("'")) {
+ return `"${arg.replace(/"/g, '\\"')}"`;
+ }
+ return arg;
+ });
+
+ return `${binary} ${escapedArgs.join(" ")}`;
+ }
+
+ /**
+ * Get inventory from Ansible using ansible-inventory command
+ * Parses the inventory and returns nodes in Bolt-compatible format
+ */
+ public async getInventory(): Promise {
+ const args = [
+ "-i",
+ this.inventoryPath,
+ "--list",
+ ];
+
+ try {
+ const exec = await this.executeCommand("ansible-inventory", args);
+
+ if (!exec.success) {
+ throw new Error(`Failed to get Ansible inventory: ${exec.stderr || exec.stdout}`);
+ }
+
+ // Parse JSON output from ansible-inventory
+ const inventoryData = JSON.parse(exec.stdout) as { _meta?: { hostvars?: Record } };
+ const nodes: Node[] = [];
+
+ // Extract hosts from inventory structure
+ // ansible-inventory --list returns: { _meta: { hostvars: {...} }, groups: {...} }
+ const metaData = inventoryData._meta ?? {};
+ const hostvars = metaData.hostvars ?? {};
+
+ for (const [hostname, vars] of Object.entries(hostvars)) {
+ const hostVars = typeof vars === "object" && vars !== null ? vars as Record : {};
+
+ // Determine transport based on connection type
+ let transport: "ssh" | "winrm" | "local" = "ssh";
+ const connection = hostVars.ansible_connection as string | undefined;
+
+ if (connection === "winrm") {
+ transport = "winrm";
+ } else if (connection === "local") {
+ transport = "local";
+ }
+
+ // Build URI
+ const host = (hostVars.ansible_host as string | undefined) ?? hostname;
+ const port = hostVars.ansible_port as number | undefined;
+ const user = hostVars.ansible_user as string | undefined;
+
+ let uri = host;
+ if (port) {
+ uri = `${host}:${String(port)}`;
+ }
+
+ // Build config object
+ const config: Record = {};
+
+ if (user) {
+ config.user = user;
+ }
+ if (port) {
+ config.port = port;
+ }
+
+ // Add other relevant ansible variables to config
+ if (hostVars.ansible_ssh_private_key_file) {
+ config["private-key"] = hostVars.ansible_ssh_private_key_file;
+ }
+ if (hostVars.ansible_become) {
+ config.sudo = hostVars.ansible_become;
+ }
+ if (hostVars.ansible_become_user) {
+ config["run-as"] = hostVars.ansible_become_user;
+ }
+
+ nodes.push({
+ id: hostname,
+ name: hostname,
+ uri,
+ transport,
+ config,
+ source: "ansible",
+ });
+ }
+
+ return nodes;
+ } catch (error) {
+ if (error instanceof Error) {
+ throw new Error(`Failed to parse Ansible inventory: ${error.message}`);
+ }
+ throw error;
+ }
+ }
+}
diff --git a/backend/src/integrations/bolt/BoltPlugin.ts b/backend/src/integrations/bolt/BoltPlugin.ts
index 3b5b71e1..1955252e 100644
--- a/backend/src/integrations/bolt/BoltPlugin.ts
+++ b/backend/src/integrations/bolt/BoltPlugin.ts
@@ -13,8 +13,8 @@ import type {
Action,
Capability,
} from "../types";
-import type { BoltService } from "../../bolt/BoltService";
-import type { ExecutionResult, Node, Facts } from "../../bolt/types";
+import type { BoltService } from "./BoltService";
+import type { ExecutionResult, Node, Facts } from "./types";
import type { LoggerService } from "../../services/LoggerService";
import type { PerformanceMonitorService } from "../../services/PerformanceMonitorService";
diff --git a/backend/src/bolt/BoltService.ts b/backend/src/integrations/bolt/BoltService.ts
similarity index 99%
rename from backend/src/bolt/BoltService.ts
rename to backend/src/integrations/bolt/BoltService.ts
index 1a326fea..8eab55bf 100644
--- a/backend/src/bolt/BoltService.ts
+++ b/backend/src/integrations/bolt/BoltService.ts
@@ -17,7 +17,7 @@ import {
BoltTaskNotFoundError,
BoltTaskParameterError,
} from "./types";
-import { LoggerService } from "../services/LoggerService";
+import { LoggerService } from "../../services/LoggerService";
/**
* Streaming callback for real-time output
diff --git a/backend/src/bolt/types.ts b/backend/src/integrations/bolt/types.ts
similarity index 100%
rename from backend/src/bolt/types.ts
rename to backend/src/integrations/bolt/types.ts
diff --git a/backend/src/integrations/hiera/HieraPlugin.ts b/backend/src/integrations/hiera/HieraPlugin.ts
index 18f98a6d..53a73c3e 100644
--- a/backend/src/integrations/hiera/HieraPlugin.ts
+++ b/backend/src/integrations/hiera/HieraPlugin.ts
@@ -18,7 +18,7 @@ import type {
InformationSourcePlugin,
HealthStatus,
} from "../types";
-import type { Node, Facts } from "../../bolt/types";
+import type { Node, Facts } from "../bolt/types";
import type { IntegrationManager } from "../IntegrationManager";
import { HieraService } from "./HieraService";
import type { HieraServiceConfig } from "./HieraService";
diff --git a/backend/src/integrations/hiera/HieraService.ts b/backend/src/integrations/hiera/HieraService.ts
index 0580d0af..baa6975c 100644
--- a/backend/src/integrations/hiera/HieraService.ts
+++ b/backend/src/integrations/hiera/HieraService.ts
@@ -12,7 +12,7 @@
import * as fs from "fs";
import * as path from "path";
import type { IntegrationManager } from "../IntegrationManager";
-import type { Catalog } from "../puppetdb/types";
+import type { Resource } from "../puppetdb/types";
import { HieraParser } from "./HieraParser";
import { HieraScanner } from "./HieraScanner";
import { HieraResolver } from "./HieraResolver";
@@ -439,7 +439,7 @@ export class HieraService {
const keys = await this.resolveAllKeys(nodeId);
// Classify keys as used/unused based on catalog analysis
- const { usedKeys, unusedKeys } = await this.classifyKeyUsage(nodeId, keys);
+ const { usedKeys, unusedKeys, classes } = await this.classifyKeyUsage(nodeId, keys);
// Generate hierarchy file information
const hierarchyFiles = await this.getHierarchyFiles(nodeId, facts);
@@ -451,6 +451,7 @@ export class HieraService {
usedKeys,
unusedKeys,
hierarchyFiles,
+ classes,
};
// Update cache
@@ -470,14 +471,14 @@ export class HieraService {
*
* @param nodeId - Node identifier
* @param keys - Map of resolved keys
- * @returns Object with usedKeys and unusedKeys sets
+ * @returns Object with usedKeys, unusedKeys sets, and classes array
*
* Requirements: 6.6
*/
private async classifyKeyUsage(
nodeId: string,
keys: Map
- ): Promise<{ usedKeys: Set; unusedKeys: Set }> {
+ ): Promise<{ usedKeys: Set; unusedKeys: Set; classes: string[] }> {
const usedKeys = new Set();
const unusedKeys = new Set();
@@ -491,7 +492,7 @@ export class HieraService {
unusedKeys.add(keyName);
}
this.log(`No-catalog classification: ${String(usedKeys.size)} used keys, ${String(unusedKeys.size)} unused keys`);
- return { usedKeys, unusedKeys };
+ return { usedKeys, unusedKeys, classes: [] };
}
// Build class prefixes for matching
@@ -509,7 +510,7 @@ export class HieraService {
}
this.log(`Class-based classification: ${String(usedKeys.size)} used keys, ${String(unusedKeys.size)} unused keys`);
- return { usedKeys, unusedKeys };
+ return { usedKeys, unusedKeys, classes: includedClasses };
}
/**
@@ -530,37 +531,28 @@ export class HieraService {
return [];
}
- // Use the same method as Managed Resources: call getNodeCatalog directly
- // This ensures we get the properly transformed catalog data
- const catalog = await (puppetdb as unknown as { getNodeCatalog: (nodeId: string) => Promise }).getNodeCatalog(nodeId);
+ // Use getNodeResources to get all resources including Class resources
+ // This is more reliable than using the catalog endpoint
+ const resourcesByType = await (puppetdb as unknown as { getNodeResources: (nodeId: string) => Promise> }).getNodeResources(nodeId);
- if (!catalog) {
- this.log(`No catalog data available for node: ${nodeId}`);
- return [];
- }
-
- // Extract class names from catalog resources
- if (!Array.isArray(catalog.resources)) {
- this.log(`Catalog for node ${nodeId} has no resources array`);
- return [];
- }
+ // Get Class resources specifically
+ const classResources = resourcesByType.Class;
- // Filter for Class resources and extract titles
- const classes = catalog.resources
- .filter(resource => resource.type === "Class")
- .map(resource => resource.title.toLowerCase());
+ this.log(`Found ${String(classResources.length)} Class resources for node: ${nodeId}`);
- this.log(`Found ${String(classes.length)} classes in catalog for node: ${nodeId}`);
+ // Extract class titles and convert to lowercase
+ const classes = classResources.map(resource => resource.title.toLowerCase());
- // Log some example classes for debugging
+ // Log all classes for debugging
if (classes.length > 0) {
- const exampleClasses = classes.slice(0, 5).join(", ");
- this.log(`Example classes: ${exampleClasses}`);
+ this.log(`All classes: ${classes.join(", ")}`);
+ } else {
+ this.log(`WARNING: No Class resources found. This may indicate the node has no catalog or no classes included.`);
}
return classes;
} catch (error) {
- this.log(`Failed to get catalog for key usage analysis: ${error instanceof Error ? error.message : String(error)}`);
+ this.log(`Failed to get resources for key usage analysis: ${error instanceof Error ? error.message : String(error)}`);
return [];
}
}
diff --git a/backend/src/integrations/hiera/types.ts b/backend/src/integrations/hiera/types.ts
index fcd6e685..7c48a4ef 100644
--- a/backend/src/integrations/hiera/types.ts
+++ b/backend/src/integrations/hiera/types.ts
@@ -160,6 +160,7 @@ export interface NodeHieraData {
usedKeys: Set;
unusedKeys: Set;
hierarchyFiles: HierarchyFileInfo[];
+ classes?: string[];
}
/**
@@ -353,6 +354,7 @@ export interface NodeHieraDataResponse {
warnings?: string[];
hierarchyFiles: HierarchyFileInfo[];
totalKeys: number;
+ classes?: string[];
}
/**
diff --git a/backend/src/integrations/puppetdb/PuppetDBService.ts b/backend/src/integrations/puppetdb/PuppetDBService.ts
index 749badaf..30f1687e 100644
--- a/backend/src/integrations/puppetdb/PuppetDBService.ts
+++ b/backend/src/integrations/puppetdb/PuppetDBService.ts
@@ -10,7 +10,7 @@
import { BasePlugin } from "../BasePlugin";
import type { InformationSourcePlugin, HealthStatus } from "../types";
-import type { Node, Facts } from "../../bolt/types";
+import type { Node, Facts } from "../bolt/types";
import type { PuppetDBConfig } from "../../config/schema";
import type { PuppetDBClient } from "./PuppetDBClient";
import {
diff --git a/backend/src/integrations/puppetserver/PuppetserverService.ts b/backend/src/integrations/puppetserver/PuppetserverService.ts
index cf2df74e..5ca4f545 100644
--- a/backend/src/integrations/puppetserver/PuppetserverService.ts
+++ b/backend/src/integrations/puppetserver/PuppetserverService.ts
@@ -12,7 +12,7 @@
import { BasePlugin } from "../BasePlugin";
import type { InformationSourcePlugin, HealthStatus } from "../types";
-import type { Node, Facts } from "../../bolt/types";
+import type { Node, Facts } from "../bolt/types";
import type { PuppetserverConfig } from "../../config/schema";
import { PuppetserverClient } from "./PuppetserverClient";
import type { LoggerService } from "../../services/LoggerService";
diff --git a/backend/src/integrations/types.ts b/backend/src/integrations/types.ts
index e1601f9b..ae497895 100644
--- a/backend/src/integrations/types.ts
+++ b/backend/src/integrations/types.ts
@@ -5,7 +5,7 @@
* backend systems (execution tools and information sources) into Pabawi.
*/
-import type { Node, Facts, ExecutionResult } from "../bolt/types";
+import type { Node, Facts, ExecutionResult } from "./bolt/types";
/**
* Health status for an integration
diff --git a/backend/src/routes/commands.ts b/backend/src/routes/commands.ts
index 13864a66..835ee8e7 100644
--- a/backend/src/routes/commands.ts
+++ b/backend/src/routes/commands.ts
@@ -3,7 +3,7 @@ import { z } from "zod";
import type { ExecutionRepository } from "../database/ExecutionRepository";
import type { CommandWhitelistService } from "../validation/CommandWhitelistService";
import { CommandNotAllowedError } from "../validation/CommandWhitelistService";
-import { BoltInventoryNotFoundError } from "../bolt/types";
+import { BoltInventoryNotFoundError } from "../integrations/bolt/types";
import { asyncHandler } from "./asyncHandler";
import type { StreamingExecutionManager } from "../services/StreamingExecutionManager";
import type { IntegrationManager } from "../integrations/IntegrationManager";
@@ -14,6 +14,7 @@ import { NodeIdParamSchema } from "../validation/commonSchemas";
const CommandExecutionBodySchema = z.object({
command: z.string().min(1, "Command is required"),
expertMode: z.boolean().optional(),
+ tool: z.enum(["bolt", "ansible"]).optional(),
});
/**
@@ -65,6 +66,26 @@ export function createCommandsRouter(
const nodeId = params.id;
const command = body.command;
const expertMode = body.expertMode ?? false;
+ const requestedTool = body.tool;
+
+ const boltTool = integrationManager.getExecutionTool("bolt");
+ const ansibleTool = integrationManager.getExecutionTool("ansible");
+ const selectedTool = requestedTool
+ ?? (boltTool ? "bolt" : ansibleTool ? "ansible" : "bolt");
+
+ if (!integrationManager.getExecutionTool(selectedTool)) {
+ const errorResponse = {
+ error: {
+ code: "EXECUTION_TOOL_NOT_AVAILABLE",
+ message: `Execution tool '${selectedTool}' is not available`,
+ },
+ };
+
+ res.status(503).json(
+ debugInfo ? expertModeService.attachDebugInfo(errorResponse, debugInfo) : errorResponse,
+ );
+ return;
+ }
if (debugInfo) {
expertModeService.addDebug(debugInfo, {
@@ -178,6 +199,7 @@ export function createCommandsRouter(
startedAt: new Date().toISOString(),
results: [],
expertMode,
+ executionTool: selectedTool,
});
logger.info("Execution record created, starting command execution", {
@@ -205,7 +227,7 @@ export function createCommandsRouter(
);
// Execute action through IntegrationManager
- const result = await integrationManager.executeAction("bolt", {
+ const result = await integrationManager.executeAction(selectedTool, {
type: "command",
target: nodeId,
action: command,
@@ -282,10 +304,11 @@ export function createCommandsRouter(
// Attach debug info if expert mode is enabled
if (debugInfo) {
debugInfo.duration = duration;
- expertModeService.setIntegration(debugInfo, 'bolt');
+ expertModeService.setIntegration(debugInfo, selectedTool);
expertModeService.addMetadata(debugInfo, 'executionId', executionId);
expertModeService.addMetadata(debugInfo, 'nodeId', nodeId);
expertModeService.addMetadata(debugInfo, 'command', command);
+ expertModeService.addMetadata(debugInfo, 'tool', selectedTool);
expertModeService.addInfo(debugInfo, {
message: "Command execution started",
context: JSON.stringify({ executionId, nodeId, command }),
diff --git a/backend/src/routes/executions.ts b/backend/src/routes/executions.ts
index e6d4ddfd..54cfa0e0 100644
--- a/backend/src/routes/executions.ts
+++ b/backend/src/routes/executions.ts
@@ -819,6 +819,7 @@ export function createExecutionsRouter(
results: [] as NodeResult[],
command: (modifications.command ?? originalExecution.command),
expertMode: (modifications.expertMode ?? originalExecution.expertMode),
+ executionTool: originalExecution.executionTool,
};
logger.debug("Creating re-execution with parameters", {
diff --git a/backend/src/routes/facts.ts b/backend/src/routes/facts.ts
index 25d84b35..80a00618 100644
--- a/backend/src/routes/facts.ts
+++ b/backend/src/routes/facts.ts
@@ -6,7 +6,7 @@ import {
BoltExecutionError,
BoltParseError,
BoltInventoryNotFoundError,
-} from "../bolt/types";
+} from "../integrations/bolt/types";
import { asyncHandler } from "./asyncHandler";
import { LoggerService } from "../services/LoggerService";
import { ExpertModeService } from "../services/ExpertModeService";
diff --git a/backend/src/routes/hiera.ts b/backend/src/routes/hiera.ts
index 0fdf862d..db6e6c41 100644
--- a/backend/src/routes/hiera.ts
+++ b/backend/src/routes/hiera.ts
@@ -1002,6 +1002,7 @@ export function createHieraRouter(integrationManager: IntegrationManager): Route
factSource,
totalKeys: keysArray.length,
hierarchyFiles: nodeData.hierarchyFiles,
+ classes: nodeData.classes,
};
if (debugInfo) {
diff --git a/backend/src/routes/inventory.ts b/backend/src/routes/inventory.ts
index a6a129e2..bca1ac92 100644
--- a/backend/src/routes/inventory.ts
+++ b/backend/src/routes/inventory.ts
@@ -1,12 +1,12 @@
import { Router, type Request, type Response } from "express";
import { z } from "zod";
-import type { BoltService } from "../bolt/BoltService";
+import type { BoltService } from "../integrations/bolt/BoltService";
import {
BoltInventoryNotFoundError,
BoltExecutionError,
BoltParseError,
type Node,
-} from "../bolt/types";
+} from "../integrations/bolt/types";
import { asyncHandler } from "./asyncHandler";
import type { IntegrationManager } from "../integrations/IntegrationManager";
import { ExpertModeService } from "../services/ExpertModeService";
diff --git a/backend/src/routes/packages.ts b/backend/src/routes/packages.ts
index 259c41c7..152809e6 100644
--- a/backend/src/routes/packages.ts
+++ b/backend/src/routes/packages.ts
@@ -1,7 +1,8 @@
import { Router, type Request, type Response } from "express";
import { z } from "zod";
-import type { BoltService } from "../bolt/BoltService";
+import type { BoltService } from "../integrations/bolt/BoltService";
import type { ExecutionRepository } from "../database/ExecutionRepository";
+import type { IntegrationManager } from "../integrations/IntegrationManager";
import { asyncHandler } from "./asyncHandler";
import type { StreamingExecutionManager } from "../services/StreamingExecutionManager";
import { LoggerService } from "../services/LoggerService";
@@ -11,12 +12,13 @@ import { ExpertModeService } from "../services/ExpertModeService";
* Request body schema for package installation
*/
const InstallPackageRequestSchema = z.object({
- taskName: z.string().min(1, "Task name is required"),
+ taskName: z.string().min(1, "Task name is required").optional(),
packageName: z.string().min(1, "Package name is required"),
ensure: z.enum(["present", "absent", "latest"]).optional().default("present"),
version: z.string().optional(),
settings: z.record(z.unknown()).optional(),
expertMode: z.boolean().optional().default(false),
+ tool: z.enum(["bolt", "ansible"]).optional(),
});
/**
@@ -42,6 +44,7 @@ interface PackageTaskConfig {
* @returns Express router
*/
export function createPackagesRouter(
+ integrationManager: IntegrationManager,
boltService: BoltService,
executionRepository: ExecutionRepository,
packageTasks: PackageTaskConfig[],
@@ -178,32 +181,58 @@ export function createPackagesRouter(
return;
}
- const { taskName, packageName, ensure, version, settings, expertMode } =
+ const {
+ taskName,
+ packageName,
+ ensure,
+ version,
+ settings,
+ expertMode,
+ tool,
+ } =
validationResult.data;
+ const boltTool = integrationManager.getExecutionTool("bolt");
+ const ansibleTool = integrationManager.getExecutionTool("ansible");
+ const selectedTool = tool ?? (boltTool ? "bolt" : ansibleTool ? "ansible" : "bolt");
+
+ if (!integrationManager.getExecutionTool(selectedTool)) {
+ const errorResponse = {
+ error: {
+ code: "EXECUTION_TOOL_NOT_AVAILABLE",
+ message: `Execution tool '${selectedTool}' is not available`,
+ },
+ };
+
+ res.status(503).json(
+ debugInfo ? expertModeService.attachDebugInfo(errorResponse, debugInfo) : errorResponse,
+ );
+ return;
+ }
+
if (debugInfo) {
expertModeService.addDebug(debugInfo, {
- message: "Finding task configuration",
- context: JSON.stringify({ taskName }),
+ message: "Determining package execution mode",
+ context: JSON.stringify({ taskName, selectedTool }),
level: 'debug',
});
}
- // Find the task configuration
- const taskConfig = packageTasks.find((t) => t.name === taskName);
- if (!taskConfig) {
+ // Find the task configuration (required for Bolt only)
+ const taskConfig = taskName ? packageTasks.find((t) => t.name === taskName) : undefined;
+ if (selectedTool === "bolt" && !taskConfig) {
logger.warn("Package installation task not configured", {
component: "PackagesRouter",
integration: "bolt",
operation: "installPackage",
- metadata: { taskName, availableTasks: packageTasks.map((t) => t.name) },
+ metadata: { taskName: taskName ?? "", availableTasks: packageTasks.map((t) => t.name) },
});
if (debugInfo) {
debugInfo.duration = Date.now() - startTime;
expertModeService.setIntegration(debugInfo, 'bolt');
expertModeService.addWarning(debugInfo, {
- message: `Package installation task '${taskName}' is not configured`,
+ message: `Package installation task '${taskName ?? ""}' is not configured`,
context: `Available tasks: ${packageTasks.map((t) => t.name).join(", ")}`,
level: 'warn',
});
@@ -214,7 +243,7 @@ export function createPackagesRouter(
const errorResponse = {
error: {
code: "INVALID_TASK",
- message: `Package installation task '${taskName}' is not configured`,
+ message: `Package installation task '${taskName ?? "unknown"}' is not configured`,
details: `Available tasks: ${packageTasks.map((t) => t.name).join(", ")}`,
},
};
@@ -237,19 +266,20 @@ export function createPackagesRouter(
const executionId = await executionRepository.create({
type: "package",
targetNodes: [nodeId],
- action: taskName,
+ action: selectedTool === "ansible" ? "ansible.builtin.package" : (taskName ?? "package"),
parameters: { packageName, ensure, version, settings },
status: "running",
startedAt: new Date().toISOString(),
results: [],
expertMode,
+ executionTool: selectedTool,
});
logger.info("Execution record created, starting package installation", {
component: "PackagesRouter",
integration: "bolt",
operation: "installPackage",
- metadata: { executionId, nodeId, taskName, packageName },
+ metadata: { executionId, nodeId, taskName: taskName ?? "", packageName, selectedTool },
});
if (debugInfo) {
@@ -269,18 +299,40 @@ export function createPackagesRouter(
);
// Execute package installation task with parameter mapping
- const result = await boltService.installPackage(
- nodeId,
- taskName,
- {
- packageName,
- ensure,
- version,
- settings,
- },
- taskConfig.parameterMapping,
- streamingCallback,
- );
+ let result;
+ if (selectedTool === "ansible") {
+ result = await integrationManager.executeAction("ansible", {
+ type: "task",
+ target: nodeId,
+ action: "package",
+ parameters: {
+ packageName,
+ ensure,
+ version,
+ settings,
+ },
+ metadata: {
+ streamingCallback,
+ },
+ });
+ } else {
+ // For bolt, taskName and taskConfig are guaranteed to exist due to validation above
+ if (!taskName || !taskConfig) {
+ throw new Error("Task name and configuration required for Bolt execution");
+ }
+ result = await boltService.installPackage(
+ nodeId,
+ taskName,
+ {
+ packageName,
+ ensure,
+ version,
+ settings,
+ },
+ taskConfig.parameterMapping,
+ streamingCallback,
+ );
+ }
// Update execution record with results
// Include stdout/stderr when expert mode is enabled
@@ -301,9 +353,16 @@ export function createPackagesRouter(
} catch (error) {
logger.error("Error installing package", {
component: "PackagesRouter",
- integration: "bolt",
+ integration: selectedTool,
operation: "installPackage",
- metadata: { executionId, nodeId, taskName, packageName },
+ metadata: {
+ executionId,
+ nodeId,
+ packageName,
+ ...(selectedTool === "ansible"
+ ? { action: "ansible:package" }
+ : { taskName: taskName ?? "package" }),
+ },
}, error instanceof Error ? error : undefined);
let errorMessage = "Unknown error";
@@ -337,9 +396,9 @@ export function createPackagesRouter(
logger.info("Package installation request accepted", {
component: "PackagesRouter",
- integration: "bolt",
+ integration: selectedTool,
operation: "installPackage",
- metadata: { executionId, nodeId, taskName, packageName, duration },
+ metadata: { executionId, nodeId, taskName: taskName ?? "", packageName, duration, selectedTool },
});
// Return execution ID and initial status immediately
@@ -352,14 +411,15 @@ export function createPackagesRouter(
// Attach debug info if expert mode is enabled
if (debugInfo) {
debugInfo.duration = duration;
- expertModeService.setIntegration(debugInfo, 'bolt');
+ expertModeService.setIntegration(debugInfo, selectedTool);
expertModeService.addMetadata(debugInfo, 'executionId', executionId);
expertModeService.addMetadata(debugInfo, 'nodeId', nodeId);
- expertModeService.addMetadata(debugInfo, 'taskName', taskName);
+ expertModeService.addMetadata(debugInfo, 'taskName', taskName ?? 'ansible.builtin.package');
expertModeService.addMetadata(debugInfo, 'packageName', packageName);
+ expertModeService.addMetadata(debugInfo, 'tool', selectedTool);
expertModeService.addInfo(debugInfo, {
message: "Package installation started",
- context: JSON.stringify({ executionId, nodeId, taskName, packageName }),
+ context: JSON.stringify({ executionId, nodeId, taskName, packageName, selectedTool }),
level: 'info',
});
diff --git a/backend/src/routes/playbooks.ts b/backend/src/routes/playbooks.ts
new file mode 100644
index 00000000..96843374
--- /dev/null
+++ b/backend/src/routes/playbooks.ts
@@ -0,0 +1,208 @@
+import { Router, type Request, type Response } from "express";
+import { z } from "zod";
+import type { IntegrationManager } from "../integrations/IntegrationManager";
+import type { ExecutionRepository } from "../database/ExecutionRepository";
+import type { StreamingExecutionManager } from "../services/StreamingExecutionManager";
+import { asyncHandler } from "./asyncHandler";
+import { LoggerService } from "../services/LoggerService";
+import { ExpertModeService } from "../services/ExpertModeService";
+import { NodeIdParamSchema } from "../validation/commonSchemas";
+
+const PlaybookExecutionBodySchema = z.object({
+ playbookPath: z.string().min(1, "Playbook path is required"),
+ extraVars: z.record(z.unknown()).optional(),
+ expertMode: z.boolean().optional(),
+ tool: z.enum(["ansible"]).optional(),
+});
+
+export function createPlaybooksRouter(
+ integrationManager: IntegrationManager,
+ executionRepository: ExecutionRepository,
+ streamingManager?: StreamingExecutionManager,
+): Router {
+ const router = Router();
+ const logger = new LoggerService();
+
+ router.post(
+ "/:id/playbook",
+ asyncHandler(async (req: Request, res: Response): Promise => {
+ const startTime = Date.now();
+ const expertModeService = new ExpertModeService();
+ const requestId = req.id ?? expertModeService.generateRequestId();
+
+ const debugInfo = req.expertMode
+ ? expertModeService.createDebugInfo("POST /api/nodes/:id/playbook", requestId, 0)
+ : null;
+
+ try {
+ const params = NodeIdParamSchema.parse(req.params);
+ const body = PlaybookExecutionBodySchema.parse(req.body);
+
+ const nodeId = params.id;
+ const playbookPath = body.playbookPath;
+ const extraVars = body.extraVars;
+ const expertMode = body.expertMode ?? false;
+
+ const ansibleTool = integrationManager.getExecutionTool("ansible");
+ if (!ansibleTool) {
+ const errorResponse = {
+ error: {
+ code: "EXECUTION_TOOL_NOT_AVAILABLE",
+ message: "Ansible integration is not available",
+ },
+ };
+
+ res.status(503).json(
+ debugInfo ? expertModeService.attachDebugInfo(errorResponse, debugInfo) : errorResponse,
+ );
+ return;
+ }
+
+ const aggregatedInventory = await integrationManager.getAggregatedInventory();
+ const node = aggregatedInventory.nodes.find(
+ (n) => n.id === nodeId || n.name === nodeId,
+ );
+
+ if (!node) {
+ const errorResponse = {
+ error: {
+ code: "INVALID_NODE_ID",
+ message: `Node '${nodeId}' not found in inventory`,
+ },
+ };
+
+ res.status(404).json(
+ debugInfo ? expertModeService.attachDebugInfo(errorResponse, debugInfo) : errorResponse,
+ );
+ return;
+ }
+
+ const executionId = await executionRepository.create({
+ type: "task",
+ targetNodes: [nodeId],
+ action: playbookPath,
+ parameters: {
+ playbook: true,
+ extraVars,
+ },
+ status: "running",
+ startedAt: new Date().toISOString(),
+ results: [],
+ expertMode,
+ executionTool: "ansible",
+ });
+
+ void (async (): Promise => {
+ try {
+ const streamingCallback = streamingManager?.createStreamingCallback(
+ executionId,
+ expertMode,
+ );
+
+ const result = await integrationManager.executeAction("ansible", {
+ type: "plan",
+ target: nodeId,
+ action: playbookPath,
+ parameters: {
+ extraVars,
+ },
+ metadata: {
+ streamingCallback,
+ },
+ });
+
+ await executionRepository.update(executionId, {
+ status: result.status,
+ completedAt: result.completedAt,
+ results: result.results,
+ error: result.error,
+ command: result.command,
+ stdout: expertMode ? result.stdout : undefined,
+ stderr: expertMode ? result.stderr : undefined,
+ });
+
+ if (streamingManager) {
+ streamingManager.emitComplete(executionId, result);
+ }
+ } catch (error) {
+ logger.error("Error executing playbook", {
+ component: "PlaybooksRouter",
+ integration: "ansible",
+ operation: "executePlaybook",
+ metadata: { executionId, nodeId, playbookPath },
+ }, error instanceof Error ? error : undefined);
+
+ const errorMessage =
+ error instanceof Error ? error.message : "Unknown error";
+
+ await executionRepository.update(executionId, {
+ status: "failed",
+ completedAt: new Date().toISOString(),
+ results: [
+ {
+ nodeId,
+ status: "failed",
+ error: errorMessage,
+ duration: 0,
+ },
+ ],
+ error: errorMessage,
+ });
+
+ if (streamingManager) {
+ streamingManager.emitError(executionId, errorMessage);
+ }
+ }
+ })();
+
+ const duration = Date.now() - startTime;
+
+ const responseData = {
+ executionId,
+ status: "running",
+ message: "Playbook execution started",
+ };
+
+ if (debugInfo) {
+ debugInfo.duration = duration;
+ expertModeService.setIntegration(debugInfo, "ansible");
+ expertModeService.addMetadata(debugInfo, "executionId", executionId);
+ expertModeService.addMetadata(debugInfo, "nodeId", nodeId);
+ expertModeService.addMetadata(debugInfo, "playbookPath", playbookPath);
+ expertModeService.addInfo(debugInfo, {
+ message: "Playbook execution started",
+ context: JSON.stringify({ executionId, nodeId, playbookPath }),
+ level: "info",
+ });
+ debugInfo.performance = expertModeService.collectPerformanceMetrics();
+ debugInfo.context = expertModeService.collectRequestContext(req);
+ res.status(202).json(expertModeService.attachDebugInfo(responseData, debugInfo));
+ } else {
+ res.status(202).json(responseData);
+ }
+ } catch (error) {
+ const duration = Date.now() - startTime;
+
+ logger.error("Error processing playbook execution request", {
+ component: "PlaybooksRouter",
+ integration: "ansible",
+ operation: "executePlaybook",
+ metadata: { duration },
+ }, error instanceof Error ? error : undefined);
+
+ const errorResponse = {
+ error: {
+ code: "INTERNAL_SERVER_ERROR",
+ message: "Failed to process playbook execution request",
+ },
+ };
+
+ res.status(500).json(
+ debugInfo ? expertModeService.attachDebugInfo(errorResponse, debugInfo) : errorResponse,
+ );
+ }
+ }),
+ );
+
+ return router;
+}
\ No newline at end of file
diff --git a/backend/src/routes/puppet.ts b/backend/src/routes/puppet.ts
index 4c3488ba..28f12eda 100644
--- a/backend/src/routes/puppet.ts
+++ b/backend/src/routes/puppet.ts
@@ -1,8 +1,8 @@
import { Router, type Request, type Response } from "express";
import { z } from "zod";
-import type { BoltService } from "../bolt/BoltService";
+import type { BoltService } from "../integrations/bolt/BoltService";
import type { ExecutionRepository } from "../database/ExecutionRepository";
-import { BoltInventoryNotFoundError } from "../bolt/types";
+import { BoltInventoryNotFoundError } from "../integrations/bolt/types";
import { asyncHandler } from "./asyncHandler";
import type { StreamingExecutionManager } from "../services/StreamingExecutionManager";
import { LoggerService } from "../services/LoggerService";
diff --git a/backend/src/routes/tasks.ts b/backend/src/routes/tasks.ts
index eaf7d87e..9beaf8bf 100644
--- a/backend/src/routes/tasks.ts
+++ b/backend/src/routes/tasks.ts
@@ -9,7 +9,7 @@ import {
BoltInventoryNotFoundError,
BoltTaskNotFoundError,
BoltTaskParameterError,
-} from "../bolt/types";
+} from "../integrations/bolt/types";
import { asyncHandler } from "./asyncHandler";
import type { BoltPlugin } from "../integrations/bolt/BoltPlugin";
import { LoggerService } from "../services/LoggerService";
diff --git a/backend/src/server.ts b/backend/src/server.ts
index f6b3f10d..28f0d794 100644
--- a/backend/src/server.ts
+++ b/backend/src/server.ts
@@ -4,13 +4,14 @@ import path from "path";
import { ConfigService } from "./config/ConfigService";
import { DatabaseService } from "./database/DatabaseService";
import { BoltValidator, BoltValidationError } from "./validation/BoltValidator";
-import { BoltService } from "./bolt/BoltService";
+import { BoltService } from "./integrations/bolt/BoltService";
import { ExecutionRepository } from "./database/ExecutionRepository";
import { CommandWhitelistService } from "./validation/CommandWhitelistService";
import { createInventoryRouter } from "./routes/inventory";
import { createFactsRouter } from "./routes/facts";
import { createCommandsRouter } from "./routes/commands";
import { createTasksRouter } from "./routes/tasks";
+import { createPlaybooksRouter } from "./routes/playbooks";
import { createExecutionsRouter } from "./routes/executions";
import { createPuppetRouter } from "./routes/puppet";
import { createPuppetHistoryRouter } from "./routes/puppetHistory";
@@ -29,6 +30,8 @@ import { PuppetDBService } from "./integrations/puppetdb/PuppetDBService";
import { PuppetserverService } from "./integrations/puppetserver/PuppetserverService";
import { HieraPlugin } from "./integrations/hiera/HieraPlugin";
import { BoltPlugin } from "./integrations/bolt/BoltPlugin";
+import { AnsibleService } from "./integrations/ansible/AnsibleService";
+import { AnsiblePlugin } from "./integrations/ansible/AnsiblePlugin";
import type { IntegrationConfig } from "./integrations/types";
import { LoggerService } from "./services/LoggerService";
import { PerformanceMonitorService } from "./services/PerformanceMonitorService";
@@ -283,6 +286,62 @@ async function startServer(): Promise {
});
}
+ // Initialize Ansible integration only if configured
+ let ansiblePlugin: AnsiblePlugin | undefined;
+ const ansibleConfig = config.integrations.ansible;
+ const ansibleConfigured = ansibleConfig?.enabled === true;
+
+ if (ansibleConfigured) {
+ logger.info("Initializing Ansible integration...", {
+ component: "Server",
+ operation: "initializeAnsible",
+ });
+
+ try {
+ const ansibleService = new AnsibleService(
+ ansibleConfig.projectPath,
+ ansibleConfig.inventoryPath,
+ ansibleConfig.timeout,
+ );
+
+ ansiblePlugin = new AnsiblePlugin(ansibleService, logger, performanceMonitor);
+
+ const integrationConfig: IntegrationConfig = {
+ enabled: true,
+ name: "ansible",
+ type: "both",
+ config: {
+ projectPath: ansibleConfig.projectPath,
+ inventoryPath: ansibleConfig.inventoryPath,
+ timeout: ansibleConfig.timeout,
+ },
+ priority: 5,
+ };
+
+ integrationManager.registerPlugin(ansiblePlugin, integrationConfig);
+
+ logger.info("Ansible integration registered successfully", {
+ component: "Server",
+ operation: "initializeAnsible",
+ metadata: {
+ projectPath: ansibleConfig.projectPath,
+ inventoryPath: ansibleConfig.inventoryPath,
+ },
+ });
+ } catch (error) {
+ logger.warn(`WARNING: Failed to initialize Ansible integration: ${error instanceof Error ? error.message : "Unknown error"}`, {
+ component: "Server",
+ operation: "initializeAnsible",
+ });
+ ansiblePlugin = undefined;
+ }
+ } else {
+ logger.warn("Ansible integration not configured - skipping registration", {
+ component: "Server",
+ operation: "initializeAnsible",
+ });
+ }
+
// Initialize PuppetDB integration only if configured
let puppetDBService: PuppetDBService | undefined;
const puppetDBConfig = config.integrations.puppetdb;
@@ -690,6 +749,14 @@ async function startServer(): Promise {
streamingManager,
),
);
+ app.use(
+ "/api/nodes",
+ createPlaybooksRouter(
+ integrationManager,
+ executionRepository,
+ streamingManager,
+ ),
+ );
app.use(
"/api/nodes",
createPuppetRouter(boltService, executionRepository, streamingManager),
@@ -704,6 +771,7 @@ async function startServer(): Promise {
app.use(
"/api",
createPackagesRouter(
+ integrationManager,
boltService,
executionRepository,
config.packageTasks,
@@ -713,6 +781,7 @@ async function startServer(): Promise {
app.use(
"/api/nodes",
createPackagesRouter(
+ integrationManager,
boltService,
executionRepository,
config.packageTasks,
diff --git a/backend/src/services/PuppetRunHistoryService.ts b/backend/src/services/PuppetRunHistoryService.ts
index f2d5b5f8..b9e7153c 100644
--- a/backend/src/services/PuppetRunHistoryService.ts
+++ b/backend/src/services/PuppetRunHistoryService.ts
@@ -63,11 +63,10 @@ export class PuppetRunHistoryService {
try {
// Calculate date range
const endDate = new Date();
- // Set end date to end of today
- endDate.setHours(23, 59, 59, 999);
+ // Set end date to current moment (not end of day) to show partial data for today
const startDate = new Date();
- startDate.setDate(startDate.getDate() - days);
+ startDate.setDate(startDate.getDate() - (days - 1)); // Include today in the count
// Set start date to beginning of that day
startDate.setHours(0, 0, 0, 0);
@@ -122,14 +121,15 @@ export class PuppetRunHistoryService {
try {
// Calculate date range
const endDate = new Date();
- // Set end date to end of today
- endDate.setHours(23, 59, 59, 999);
+ // Set end date to current moment (not end of day) to show partial data for today
const startDate = new Date();
- startDate.setDate(startDate.getDate() - days);
+ startDate.setDate(startDate.getDate() - (days - 1)); // Include today in the count
// Set start date to beginning of that day
startDate.setHours(0, 0, 0, 0);
+ this.log(`Date range: ${startDate.toISOString()} to ${endDate.toISOString()}`, "debug");
+
// Use the efficient aggregate query to get counts by date and status
const counts = await this.puppetDBService.getReportCountsByDateAndStatus(
startDate.toISOString(),
@@ -141,6 +141,8 @@ export class PuppetRunHistoryService {
// Convert counts to RunHistoryData format
const history = this.convertCountsToHistory(counts, startDate, endDate);
+ this.log(`Converted to ${String(history.length)} days of history`, "debug");
+
return history;
} catch (error) {
this.logError("Failed to get aggregated run history", error);
@@ -164,7 +166,10 @@ export class PuppetRunHistoryService {
// Pre-populate all days with zero counts
const dateMap = new Map();
const currentDate = new Date(startDate);
- while (currentDate <= endDate) {
+ const endDateOnly = new Date(endDate);
+ endDateOnly.setHours(0, 0, 0, 0); // Normalize to start of day for comparison
+
+ while (currentDate <= endDateOnly) {
const dateKey = currentDate.toISOString().split("T")[0];
dateMap.set(dateKey, {
date: dateKey,
@@ -277,7 +282,7 @@ export class PuppetRunHistoryService {
* @param message - Message to log
* @param level - Log level (default: info)
*/
- private log(message: string, level: "info" | "warn" | "error" = "info"): void {
+ private log(message: string, level: "debug" | "info" | "warn" | "error" = "info"): void {
if (this.logger) {
switch (level) {
case "error":
@@ -290,6 +295,11 @@ export class PuppetRunHistoryService {
component: "PuppetRunHistoryService",
});
break;
+ case "debug":
+ this.logger.debug(message, {
+ component: "PuppetRunHistoryService",
+ });
+ break;
default:
this.logger.info(message, {
component: "PuppetRunHistoryService",
diff --git a/backend/test/bolt/BoltService.test.ts b/backend/test/bolt/BoltService.test.ts
index 507049ea..e9ec3bfd 100644
--- a/backend/test/bolt/BoltService.test.ts
+++ b/backend/test/bolt/BoltService.test.ts
@@ -1,5 +1,5 @@
import { describe, it, expect, beforeEach } from "vitest";
-import { BoltService } from "../../src/bolt/BoltService";
+import { BoltService } from "../../src/integrations/bolt/BoltService";
describe("BoltService - gatherFacts", () => {
let boltService: BoltService;
diff --git a/backend/test/database/ExecutionRepository.test.ts b/backend/test/database/ExecutionRepository.test.ts
index 2d369acf..38f4d935 100644
--- a/backend/test/database/ExecutionRepository.test.ts
+++ b/backend/test/database/ExecutionRepository.test.ts
@@ -32,7 +32,8 @@ describe("ExecutionRepository", () => {
original_execution_id TEXT,
re_execution_count INTEGER DEFAULT 0,
stdout TEXT,
- stderr TEXT
+ stderr TEXT,
+ execution_tool TEXT DEFAULT 'bolt'
)
`;
diff --git a/backend/test/debug-inventory-route.test.ts b/backend/test/debug-inventory-route.test.ts
index 0b5e9447..77967fe9 100644
--- a/backend/test/debug-inventory-route.test.ts
+++ b/backend/test/debug-inventory-route.test.ts
@@ -1,12 +1,12 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import express, { type Express } from "express";
import request from "supertest";
-import { BoltService } from "../src/bolt/BoltService";
+import { BoltService } from "../src/integrations/bolt/BoltService";
import { IntegrationManager } from "../src/integrations/IntegrationManager";
import { createInventoryRouter } from "../src/routes/inventory";
import { requestIdMiddleware } from "../src/middleware/errorHandler";
import { expertModeMiddleware } from "../src/middleware/expertMode";
-import type { Node } from "../src/bolt/types";
+import type { Node } from "../src/integrations/bolt/types";
// Mock child_process to avoid actual Bolt CLI execution
vi.mock("child_process", () => ({
diff --git a/backend/test/integration/api.test.ts b/backend/test/integration/api.test.ts
index 42db1f79..a48cb5da 100644
--- a/backend/test/integration/api.test.ts
+++ b/backend/test/integration/api.test.ts
@@ -8,7 +8,7 @@ import {
beforeEach,
} from "vitest";
import express, { type Express } from "express";
-import { BoltService } from "../../src/bolt/BoltService";
+import { BoltService } from "../../src/integrations/bolt/BoltService";
import { ExecutionRepository } from "../../src/database/ExecutionRepository";
import { CommandWhitelistService } from "../../src/validation/CommandWhitelistService";
import { StreamingExecutionManager } from "../../src/services/StreamingExecutionManager";
diff --git a/backend/test/integration/bolt-plugin-integration.test.ts b/backend/test/integration/bolt-plugin-integration.test.ts
index b7e242d1..7c176533 100644
--- a/backend/test/integration/bolt-plugin-integration.test.ts
+++ b/backend/test/integration/bolt-plugin-integration.test.ts
@@ -13,10 +13,10 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { IntegrationManager } from "../../src/integrations/IntegrationManager";
import { BoltPlugin } from "../../src/integrations/bolt/BoltPlugin";
-import { BoltService } from "../../src/bolt/BoltService";
+import { BoltService } from "../../src/integrations/bolt/BoltService";
import { LoggerService } from "../../src/services/LoggerService";
import type { IntegrationConfig, Action } from "../../src/integrations/types";
-import type { Node } from "../../src/bolt/types";
+import type { Node } from "../../src/integrations/bolt/types";
// Check if Bolt is available before running tests
async function checkBoltAvailability(): Promise {
diff --git a/backend/test/integration/bolt-service.test.ts b/backend/test/integration/bolt-service.test.ts
index f2dd07af..cee72931 100644
--- a/backend/test/integration/bolt-service.test.ts
+++ b/backend/test/integration/bolt-service.test.ts
@@ -1,5 +1,5 @@
import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
-import { BoltService } from '../../src/bolt/BoltService';
+import { BoltService } from '../../src/integrations/bolt/BoltService';
import { spawn } from 'child_process';
import type { ChildProcess } from 'child_process';
import { EventEmitter } from 'events';
diff --git a/backend/test/integration/expert-mode-routes.test.ts b/backend/test/integration/expert-mode-routes.test.ts
index 922c6a84..f064092e 100644
--- a/backend/test/integration/expert-mode-routes.test.ts
+++ b/backend/test/integration/expert-mode-routes.test.ts
@@ -1,12 +1,12 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import express, { type Express } from "express";
-import { BoltService } from "../../src/bolt/BoltService";
+import { BoltService } from "../../src/integrations/bolt/BoltService";
import { IntegrationManager } from "../../src/integrations/IntegrationManager";
import { createInventoryRouter } from "../../src/routes/inventory";
import { createIntegrationsRouter } from "../../src/routes/integrations";
import { requestIdMiddleware } from "../../src/middleware/errorHandler";
import { expertModeMiddleware } from "../../src/middleware/expertMode";
-import type { Node } from "../../src/bolt/types";
+import type { Node } from "../../src/integrations/bolt/types";
// Mock child_process to avoid actual Bolt CLI execution
vi.mock("child_process", () => ({
diff --git a/backend/test/integration/external-api-errors-expert-mode.test.ts b/backend/test/integration/external-api-errors-expert-mode.test.ts
index 3029703c..b08def19 100644
--- a/backend/test/integration/external-api-errors-expert-mode.test.ts
+++ b/backend/test/integration/external-api-errors-expert-mode.test.ts
@@ -16,7 +16,7 @@ import { createTasksRouter } from '../../src/routes/tasks';
import { expertModeMiddleware } from '../../src/middleware/expertMode';
import { PuppetDBService } from '../../src/integrations/puppetdb/PuppetDBService';
import { PuppetserverService } from '../../src/integrations/puppetserver/PuppetserverService';
-import { BoltService } from '../../src/bolt/BoltService';
+import { BoltService } from '../../src/integrations/bolt/BoltService';
import {
PuppetDBConnectionError,
PuppetDBAuthenticationError,
@@ -31,7 +31,7 @@ import {
BoltExecutionError,
BoltNodeUnreachableError,
BoltTimeoutError,
-} from '../../src/bolt/types';
+} from '../../src/integrations/bolt/types';
describe('External API Errors in Expert Mode', () => {
let app: Express;
diff --git a/backend/test/integration/integration-status.test.ts b/backend/test/integration/integration-status.test.ts
index e172ed68..546ce891 100644
--- a/backend/test/integration/integration-status.test.ts
+++ b/backend/test/integration/integration-status.test.ts
@@ -16,7 +16,7 @@ import type {
HealthStatus,
InformationSourcePlugin,
} from "../../src/integrations/types";
-import type { Node, Facts } from "../../src/bolt/types";
+import type { Node, Facts } from "../../src/integrations/bolt/types";
/**
* Mock information source plugin for testing
diff --git a/backend/test/integration/integration-test-suite.test.ts b/backend/test/integration/integration-test-suite.test.ts
index cfe379e8..795e145e 100644
--- a/backend/test/integration/integration-test-suite.test.ts
+++ b/backend/test/integration/integration-test-suite.test.ts
@@ -15,12 +15,12 @@ import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
import { IntegrationManager } from '../../src/integrations/IntegrationManager';
import { LoggerService } from '../../src/services/LoggerService';
import { BoltPlugin } from '../../src/integrations/bolt/BoltPlugin';
-import { BoltService } from '../../src/bolt/BoltService';
+import { BoltService } from '../../src/integrations/bolt/BoltService';
import { PuppetDBService } from '../../src/integrations/puppetdb/PuppetDBService';
import { PuppetserverService } from '../../src/integrations/puppetserver/PuppetserverService';
import { NodeLinkingService } from '../../src/integrations/NodeLinkingService';
import type { IntegrationConfig, Action } from '../../src/integrations/types';
-import type { Node, Facts } from '../../src/bolt/types';
+import type { Node, Facts } from '../../src/integrations/bolt/types';
describe('Comprehensive Integration Test Suite', () => {
let integrationManager: IntegrationManager;
diff --git a/backend/test/integration/inventory-filtering.test.ts b/backend/test/integration/inventory-filtering.test.ts
index f5ac9885..0490c76d 100644
--- a/backend/test/integration/inventory-filtering.test.ts
+++ b/backend/test/integration/inventory-filtering.test.ts
@@ -4,9 +4,9 @@
*/
import { describe, it, expect, beforeEach, vi } from "vitest";
-import type { Node } from "../../src/bolt/types";
+import type { Node } from "../../src/integrations/bolt/types";
import type { IntegrationManager } from "../../src/integrations/IntegrationManager";
-import type { BoltService } from "../../src/bolt/BoltService";
+import type { BoltService } from "../../src/integrations/bolt/BoltService";
import { createInventoryRouter } from "../../src/routes/inventory";
import express, { type Express } from "express";
import request from "supertest";
diff --git a/backend/test/integration/puppetserver-nodes.test.ts b/backend/test/integration/puppetserver-nodes.test.ts
index b53b5f56..5a8a26fd 100644
--- a/backend/test/integration/puppetserver-nodes.test.ts
+++ b/backend/test/integration/puppetserver-nodes.test.ts
@@ -11,7 +11,7 @@ import { PuppetserverService } from "../../src/integrations/puppetserver/Puppets
import { createIntegrationsRouter } from "../../src/routes/integrations";
import { requestIdMiddleware } from "../../src/middleware/errorHandler";
import type { IntegrationConfig } from "../../src/integrations/types";
-import type { Node, Facts } from "../../src/bolt/types";
+import type { Node, Facts } from "../../src/integrations/bolt/types";
import type { NodeStatus } from "../../src/integrations/puppetserver/types";
/**
diff --git a/backend/test/integrations/FactService.test.ts b/backend/test/integrations/FactService.test.ts
index 9bbcf851..76fdc304 100644
--- a/backend/test/integrations/FactService.test.ts
+++ b/backend/test/integrations/FactService.test.ts
@@ -8,7 +8,7 @@ import * as path from "path";
import { FactService } from "../../src/integrations/hiera/FactService";
import type { IntegrationManager } from "../../src/integrations/IntegrationManager";
import type { InformationSourcePlugin } from "../../src/integrations/types";
-import type { Facts } from "../../src/bolt/types";
+import type { Facts } from "../../src/integrations/bolt/types";
// Mock fs module
vi.mock("fs");
diff --git a/backend/test/integrations/IntegrationManager.test.ts b/backend/test/integrations/IntegrationManager.test.ts
index 66031684..2809e281 100644
--- a/backend/test/integrations/IntegrationManager.test.ts
+++ b/backend/test/integrations/IntegrationManager.test.ts
@@ -13,7 +13,7 @@ import type {
ExecutionToolPlugin,
Action,
} from "../../src/integrations/types";
-import type { Node, Facts, ExecutionResult } from "../../src/bolt/types";
+import type { Node, Facts, ExecutionResult } from "../../src/integrations/bolt/types";
/**
* Mock information source plugin for testing
diff --git a/backend/test/integrations/NodeLinkingService.test.ts b/backend/test/integrations/NodeLinkingService.test.ts
index fb69ac4f..5fa1ad17 100644
--- a/backend/test/integrations/NodeLinkingService.test.ts
+++ b/backend/test/integrations/NodeLinkingService.test.ts
@@ -6,7 +6,7 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { NodeLinkingService } from "../../src/integrations/NodeLinkingService";
import type { IntegrationManager } from "../../src/integrations/IntegrationManager";
-import type { Node } from "../../src/bolt/types";
+import type { Node } from "../../src/integrations/bolt/types";
describe("NodeLinkingService", () => {
let service: NodeLinkingService;
diff --git a/backend/test/services/PuppetRunHistoryService.test.ts b/backend/test/services/PuppetRunHistoryService.test.ts
index c56d0abd..4d2e2000 100644
--- a/backend/test/services/PuppetRunHistoryService.test.ts
+++ b/backend/test/services/PuppetRunHistoryService.test.ts
@@ -107,8 +107,8 @@ describe('PuppetRunHistoryService', () => {
// Request only 3 days of history
const result = await service.getNodeHistory('node1', 3);
- // Should return 4 days (today + 3 days back)
- expect(result.history.length).toBe(4);
+ // Should return 3 days (including today)
+ expect(result.history.length).toBe(3);
// Total runs from counts
expect(result.summary.totalRuns).toBe(1);
});
@@ -150,8 +150,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getNodeHistory('node1', 7);
- // Should return 8 days (today + 7 days back), with data on 2 of them
- expect(result.history.length).toBe(8);
+ // Should return 7 days (including today), with data on 2 of them
+ expect(result.history.length).toBe(7);
// Find the entries for our specific dates
const date1Entry = result.history.find(h => h.date === date1Str);
@@ -395,8 +395,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getNodeHistory('node1', 7);
expect(result.nodeId).toBe('node1');
- // Should return 8 days (today + 7 days back) with zero counts
- expect(result.history.length).toBe(8);
+ // Should return 7 days (including today) with zero counts
+ expect(result.history.length).toBe(7);
result.history.forEach(day => {
expect(day.success).toBe(0);
expect(day.failed).toBe(0);
@@ -415,8 +415,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getNodeHistory('node1', 3);
- // Should return 4 days (today + 3 days back) with zero counts
- expect(result.history.length).toBe(4);
+ // Should return 3 days (including today) with zero counts
+ expect(result.history.length).toBe(3);
result.history.forEach(day => {
expect(day.success).toBe(0);
expect(day.failed).toBe(0);
@@ -481,8 +481,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getAggregatedHistory(7);
- // Should return 8 days (today + 7 days back) with zero counts
- expect(result.length).toBe(8);
+ // Should return 7 days (including today) with zero counts
+ expect(result.length).toBe(7);
result.forEach(day => {
expect(day.success).toBe(0);
expect(day.failed).toBe(0);
@@ -508,8 +508,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getAggregatedHistory(7);
- // Should return 8 days with data on the specific date
- expect(result.length).toBe(8);
+ // Should return 7 days with data on the specific date
+ expect(result.length).toBe(7);
const dateEntry = result.find(h => h.date === dateStr);
expect(dateEntry).toBeDefined();
@@ -537,8 +537,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getAggregatedHistory(7);
- // Should return 8 days with data on 2 specific dates
- expect(result.length).toBe(8);
+ // Should return 7 days with data on 2 specific dates
+ expect(result.length).toBe(7);
const date1Entry = result.find(h => h.date === date1Str);
const date2Entry = result.find(h => h.date === date2Str);
@@ -570,8 +570,8 @@ describe('PuppetRunHistoryService', () => {
const result = await service.getAggregatedHistory(7);
- // Should return 8 days (today + 7 days back) with zero counts
- expect(result.length).toBe(8);
+ // Should return 7 days (including today) with zero counts
+ expect(result.length).toBe(7);
result.forEach(day => {
expect(day.success).toBe(0);
expect(day.failed).toBe(0);
diff --git a/backend/test/unit/integrations/BoltPlugin.test.ts b/backend/test/unit/integrations/BoltPlugin.test.ts
index 3b4f6f22..2148f20d 100644
--- a/backend/test/unit/integrations/BoltPlugin.test.ts
+++ b/backend/test/unit/integrations/BoltPlugin.test.ts
@@ -4,7 +4,7 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { BoltPlugin } from "../../../src/integrations/bolt/BoltPlugin";
-import type { BoltService } from "../../../src/bolt/BoltService";
+import type { BoltService } from "../../../src/integrations/bolt/BoltService";
import type { IntegrationConfig } from "../../../src/integrations/types";
// Mock child_process
diff --git a/docs/configuration.md b/docs/configuration.md
index f580f735..9e507149 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -13,6 +13,7 @@ Pabawi is designed to work with minimal configuration by using your existing Bol
- [Bolt Project Requirements](#bolt-project-requirements)
- [Command Whitelist Configuration](#command-whitelist-configuration)
- [Package Installation Configuration](#package-installation-configuration)
+- [Ansible Integration](#ansible-integration)
- [Expert Mode](#expert-mode)
- [Streaming Configuration](#streaming-configuration)
- [Caching Configuration](#caching-configuration)
@@ -112,6 +113,10 @@ All configuration is managed through environment variables. You can set these in
For detailed Bolt configuration, security whitelisting, and package task setup, please refer to the dedicated [Bolt Integration Setup Guide](./integrations/bolt.md).
+### Ansible Integration
+
+For detailed Ansible configuration, inventory requirements, playbook usage, and troubleshooting, please refer to the dedicated [Ansible Integration Setup Guide](./integrations/ansible.md).
+
### Hiera Integration
For detailed Hiera configuration, hierarchy setup, and code analysis features, please refer to the dedicated [Hiera Integration Setup Guide](./integrations/hiera.md).
@@ -274,6 +279,12 @@ For detailed command whitelist configuration, security modes, and examples, plea
For detailed package installation configuration, including how to define available tasks and parameter mappings, please refer to the dedicated [Bolt Integration Setup Guide](./integrations/bolt.md).
+## Ansible Integration
+
+Pabawi supports Ansible as an execution integration for commands, package operations, and playbooks.
+
+For the complete setup process and environment variable reference (`ANSIBLE_ENABLED`, `ANSIBLE_PROJECT_PATH`, `ANSIBLE_INVENTORY_PATH`, `ANSIBLE_EXECUTION_TIMEOUT`), see the [Ansible Integration Setup Guide](./integrations/ansible.md).
+
## Expert Mode
Expert mode provides detailed diagnostic information for troubleshooting. It can be enabled globally or per-request.
diff --git a/docs/integrations/ansible.md b/docs/integrations/ansible.md
new file mode 100644
index 00000000..d1c5c96c
--- /dev/null
+++ b/docs/integrations/ansible.md
@@ -0,0 +1,138 @@
+# Ansible Integration Setup Guide
+
+## Overview
+
+Pabawi supports Ansible as an execution integration for:
+
+- Ad-hoc command execution on nodes
+- Package installation/removal
+- Playbook execution
+- Execution history tracking with tool attribution (`ansible`)
+
+This guide covers the minimum configuration needed to enable and validate Ansible in Pabawi.
+
+## Table of Contents
+
+- [Prerequisites](#prerequisites)
+- [Environment Configuration](#environment-configuration)
+- [Inventory Setup](#inventory-setup)
+- [Playbook Setup](#playbook-setup)
+- [Validation](#validation)
+- [Troubleshooting](#troubleshooting)
+
+## Prerequisites
+
+- `ansible` and `ansible-playbook` available in `PATH`
+- A reachable inventory file for your managed hosts
+- SSH (or compatible Ansible transport) connectivity from the machine running Pabawi
+
+Example validation:
+
+```bash
+ansible --version
+ansible-playbook --version
+```
+
+## Environment Configuration
+
+Add the following to your `backend/.env`:
+
+```bash
+ANSIBLE_ENABLED=true
+ANSIBLE_PROJECT_PATH=.
+ANSIBLE_INVENTORY_PATH=inventory/hosts
+ANSIBLE_EXECUTION_TIMEOUT=300000
+```
+
+### Variable Reference
+
+- `ANSIBLE_ENABLED`: Enables Ansible integration (`true`/`false`)
+- `ANSIBLE_PROJECT_PATH`: Working directory used when running Ansible commands
+- `ANSIBLE_INVENTORY_PATH`: Inventory path relative to `ANSIBLE_PROJECT_PATH` (or absolute)
+- `ANSIBLE_EXECUTION_TIMEOUT`: Execution timeout in milliseconds
+
+## Inventory Setup
+
+Pabawi can work with your existing Ansible inventory. The configured `ANSIBLE_INVENTORY_PATH` must point to a valid inventory file.
+
+### INI Example (`inventory/hosts`)
+
+```ini
+[linux]
+web01.example.com
+db01.example.com
+
+[linux:vars]
+ansible_user=ubuntu
+ansible_ssh_private_key_file=~/.ssh/id_rsa
+```
+
+### YAML Example (`inventory/hosts.yaml`)
+
+```yaml
+all:
+ children:
+ linux:
+ hosts:
+ web01.example.com:
+ db01.example.com:
+ vars:
+ ansible_user: ubuntu
+ ansible_ssh_private_key_file: ~/.ssh/id_rsa
+```
+
+## Playbook Setup
+
+Create playbooks in your project path (for example, under `playbooks/`) and execute them from the Node Actions page.
+
+Example playbook:
+
+```yaml
+---
+- name: Sample maintenance playbook
+ hosts: all
+ become: true
+ tasks:
+ - name: Ensure curl is present
+ ansible.builtin.package:
+ name: curl
+ state: present
+```
+
+## Validation
+
+Before testing from UI, validate directly from CLI in `ANSIBLE_PROJECT_PATH`:
+
+```bash
+ansible all -i inventory/hosts -m ping
+ansible-playbook -i inventory/hosts playbooks/site.yml --check
+```
+
+Then in Pabawi:
+
+1. Open Integrations and verify Ansible status is `connected` or `degraded`
+2. Go to a node and run:
+ - Command execution (select tool = Ansible)
+ - Package installation (select tool = Ansible)
+ - Playbook execution
+3. Check Executions page and confirm `Tool` shows `Ansible`
+
+## Troubleshooting
+
+### "Ansible integration is not available"
+
+- Ensure `ANSIBLE_ENABLED=true`
+- Restart backend after updating `.env`
+- Confirm `ansible` and `ansible-playbook` are installed on host/container
+
+### "Ansible inventory file was not found"
+
+- Verify `ANSIBLE_PROJECT_PATH` and `ANSIBLE_INVENTORY_PATH`
+- Use absolute paths if needed
+- Check file permissions for the backend process user
+
+### Commands work in shell but fail in Pabawi
+
+- Validate the same inventory path used by Pabawi
+- Check SSH key/user in inventory vars
+- Review backend logs with `LOG_LEVEL=debug`
diff --git a/frontend/package.json b/frontend/package.json
index f41a0ef0..6892d0dd 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "frontend",
- "version": "0.6.0",
+ "version": "0.7.0",
"description": "Pabawi frontend web interface",
"type": "module",
"scripts": {
diff --git a/frontend/src/components/AnsiblePlaybookInterface.svelte b/frontend/src/components/AnsiblePlaybookInterface.svelte
new file mode 100644
index 00000000..5de73e21
--- /dev/null
+++ b/frontend/src/components/AnsiblePlaybookInterface.svelte
@@ -0,0 +1,285 @@
+
+
+
\ No newline at end of file
diff --git a/frontend/src/components/AnsibleSetupGuide.svelte b/frontend/src/components/AnsibleSetupGuide.svelte
new file mode 100644
index 00000000..b48f7ebf
--- /dev/null
+++ b/frontend/src/components/AnsibleSetupGuide.svelte
@@ -0,0 +1,239 @@
+
+
+
+
+
Ansible Integration Setup
+
+ Configure Pabawi to execute remote commands, package installs, and playbooks using Ansible.
+
+
+
+
+
+
Prerequisites
+
+
+ โข
+ Ansible CLI installed and available in PATH (ansible, ansible-playbook)
+
+
+ โข
+ Inventory file with reachable targets
+
+
+ โข
+ SSH connectivity and credentials for managed nodes
+
- Class-Matched mode shows the same results as Found Keys mode until class detection is fixed.
- Currently showing all keys with resolved values as "used".
-
-
-
-
- {/if}
diff --git a/frontend/src/components/PackageInstallInterface.svelte b/frontend/src/components/PackageInstallInterface.svelte
index ee67bceb..e9ef65cb 100644
--- a/frontend/src/components/PackageInstallInterface.svelte
+++ b/frontend/src/components/PackageInstallInterface.svelte
@@ -12,6 +12,7 @@
interface Props {
nodeId: string;
+ availableExecutionTools?: Array<'bolt' | 'ansible'>;
onExecutionComplete?: () => void;
}
@@ -38,6 +39,7 @@
results: NodeResult[];
error?: string;
command?: string;
+ executionTool?: 'bolt' | 'ansible';
}
interface NodeResult {
@@ -53,12 +55,13 @@
duration: number;
}
- let { nodeId, onExecutionComplete }: Props = $props();
+ let { nodeId, availableExecutionTools = ['bolt'], onExecutionComplete }: Props = $props();
// State
let expanded = $state(false);
let availableTasks = $state([]);
let selectedTask = $state('');
+ let selectedTool = $state<'bolt' | 'ansible'>('bolt');
let packageName = $state('');
let packageVersion = $state('');
let ensure = $state<'present' | 'absent' | 'latest'>('present');
@@ -66,6 +69,7 @@
let executing = $state(false);
let error = $state(null);
let result = $state(null);
+ let currentExecutionId = $state('');
let tasksLoading = $state(false);
let tasksFetched = $state(false);
let executionStream = $state(null);
@@ -97,6 +101,8 @@
availableTasks.find((t) => t.name === selectedTask)
);
+ const shouldShowToolSelector = $derived(availableExecutionTools.length > 1);
+
// Check if settings are supported by the selected task
const supportsSettings = $derived(
selectedTaskConfig?.parameterMapping.settings !== undefined
@@ -105,7 +111,7 @@
function validateForm(): boolean {
validationError = null;
- if (!selectedTask) {
+ if (selectedTool === 'bolt' && !selectedTask) {
validationError = 'Please select a package task';
return false;
}
@@ -145,6 +151,7 @@
executing = true;
error = null;
result = null;
+ currentExecutionId = '';
executionStream = null;
try {
@@ -152,12 +159,16 @@
// Build parameters
const parameters: Record = {
- taskName: selectedTask,
packageName: packageName.trim(),
ensure,
expertMode: expertMode.enabled,
+ tool: selectedTool,
};
+ if (selectedTool === 'bolt') {
+ parameters.taskName = selectedTask;
+ }
+
if (packageVersion.trim()) {
parameters.version = packageVersion.trim();
}
@@ -173,6 +184,7 @@
);
const executionId = data.executionId;
+ currentExecutionId = executionId;
// If expert mode is enabled, create a stream for real-time output
if (expertMode.enabled) {
@@ -253,9 +265,23 @@
$effect(() => {
if (expanded && !tasksFetched) {
tasksFetched = true;
+ if (selectedTool === 'bolt') {
+ fetchPackageTasks();
+ }
+ }
+ });
+
+ $effect(() => {
+ if (expanded && selectedTool === 'bolt' && availableTasks.length === 0 && !tasksLoading) {
fetchPackageTasks();
}
});
+
+ $effect(() => {
+ if (!availableExecutionTools.includes(selectedTool)) {
+ selectedTool = availableExecutionTools[0] ?? 'bolt';
+ }
+ });
@@ -266,7 +292,7 @@
>
Install Software
-
+
@@ -476,7 +476,7 @@
{:else if aggregatedRunHistory.length > 0}
-
Aggregated Puppet Run History (All Nodes - Last 7 Days)
+
Aggregated Puppet Run History (All Nodes - Last 7 Days)
{#if runHistoryLastUpdate}
Last updated: {runHistoryLastUpdate.toLocaleTimeString()}
diff --git a/frontend/src/pages/IntegrationSetupPage.svelte b/frontend/src/pages/IntegrationSetupPage.svelte
index fec46858..9a5c01f6 100644
--- a/frontend/src/pages/IntegrationSetupPage.svelte
+++ b/frontend/src/pages/IntegrationSetupPage.svelte
@@ -1,7 +1,7 @@