diff --git a/.github/workflows/build-and-publish.yml b/.github/workflows/build-and-publish.yml index cf2ab53..e7ffd58 100644 --- a/.github/workflows/build-and-publish.yml +++ b/.github/workflows/build-and-publish.yml @@ -3,7 +3,7 @@ name: Build and Release on: push: branches: - - dev + - dev-oltp paths: - 'agent/**' - 'dashboard/**' @@ -13,7 +13,7 @@ on: - 'v*' pull_request: branches: - - dev + - dev-oltp workflow_dispatch: inputs: release_cli: @@ -25,8 +25,8 @@ on: env: REGISTRY_GHCR: ghcr.io REGISTRY_DOCKERHUB: docker.io - GO_VERSION: '1.22' - NODE_VERSION: '18' + GO_VERSION: '1.23' + NODE_VERSION: '20' jobs: # Build and push Agent Docker image @@ -413,4 +413,4 @@ jobs: echo "- [GitHub Packages](https://github.com/${{ github.repository }}/pkgs/container)" >> $GITHUB_STEP_SUMMARY if [[ "${{ github.ref_type }}" == "tag" ]]; then echo "- [Release Page](https://github.com/${{ github.repository }}/releases/tag/${{ github.ref_name }})" >> $GITHUB_STEP_SUMMARY - fi \ No newline at end of file + fi diff --git a/agent/.env.example b/agent/.env.example new file mode 100644 index 0000000..8688b6b --- /dev/null +++ b/agent/.env.example @@ -0,0 +1,20 @@ +# Server Configuration +PORT=5000 + +# Log Paths +TRAEFIK_LOG_DASHBOARD_ACCESS_PATH=/var/log/traefik/access.log +TRAEFIK_LOG_DASHBOARD_ERROR_PATH=/var/log/traefik/traefik.log + +# Log Format (json or clf) +TRAEFIK_LOG_DASHBOARD_LOG_FORMAT=json + +# System Monitoring +TRAEFIK_LOG_DASHBOARD_SYSTEM_MONITORING=true + +# Authentication Token (required for production) +TRAEFIK_LOG_DASHBOARD_AUTH_TOKEN=your-secret-token-here + +# GeoIP Configuration +TRAEFIK_LOG_DASHBOARD_GEOIP_ENABLED=true +TRAEFIK_LOG_DASHBOARD_GEOIP_CITY_DB=GeoLite2-City.mmdb +TRAEFIK_LOG_DASHBOARD_GEOIP_COUNTRY_DB=GeoLite2-Country.mmdb \ No newline at end of file diff --git a/agent/Dockerfile b/agent/Dockerfile index c0b9759..6afa04f 100644 --- a/agent/Dockerfile +++ b/agent/Dockerfile @@ -1,5 +1,5 @@ # Build stage -FROM golang:1.22-alpine AS builder +FROM golang:1.23-alpine AS builder WORKDIR /app diff --git a/agent/cmd/agent/main.go b/agent/cmd/agent/main.go index 4950b71..9aea4c0 100644 --- a/agent/cmd/agent/main.go +++ b/agent/cmd/agent/main.go @@ -10,6 +10,7 @@ import ( "github.com/hhftechnology/traefik-log-dashboard/agent/internal/auth" "github.com/hhftechnology/traefik-log-dashboard/agent/internal/config" "github.com/hhftechnology/traefik-log-dashboard/agent/internal/routes" + "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/location" "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/logger" ) @@ -23,6 +24,26 @@ func main() { logger.Log.Printf("System Monitoring: %v", cfg.SystemMonitoring) logger.Log.Printf("Port: %s", cfg.Port) + // Initialize GeoIP location services if enabled + if cfg.GeoIPEnabled { + logger.Log.Printf("GeoIP: Enabled") + location.SetDatabasePaths(cfg.GeoIPCityDB, cfg.GeoIPCountryDB) + + // Initialize location lookups + if err := location.InitializeLookups(); err != nil { + logger.Log.Printf("GeoIP: Failed to initialize (lookups will be unavailable): %v", err) + } else if location.LocationsEnabled() { + logger.Log.Printf("GeoIP: Successfully initialized") + } else { + logger.Log.Printf("GeoIP: No databases available (lookups will be unavailable)") + } + + // Ensure cleanup on shutdown + defer location.Close() + } else { + logger.Log.Printf("GeoIP: Disabled") + } + // Initialize authentication authenticator := auth.NewAuthenticator(cfg.AuthToken) if authenticator.IsEnabled() { @@ -49,6 +70,10 @@ func main() { mux.HandleFunc("/api/system/logs", authenticator.Middleware(handler.HandleSystemLogs)) mux.HandleFunc("/api/system/resources", authenticator.Middleware(handler.HandleSystemResources)) + // Location/GeoIP endpoints (with auth) + mux.HandleFunc("/api/location/lookup", authenticator.Middleware(handler.HandleLocationLookup)) + mux.HandleFunc("/api/location/status", authenticator.Middleware(handler.HandleLocationStatus)) + // Root endpoint mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/" { diff --git a/agent/go.mod b/agent/go.mod index a231dce..3274e81 100644 --- a/agent/go.mod +++ b/agent/go.mod @@ -1,16 +1,21 @@ module github.com/hhftechnology/traefik-log-dashboard/agent -go 1.22 +go 1.23 -require github.com/shirou/gopsutil/v3 v3.24.1 +require ( + github.com/joho/godotenv v1.5.1 + github.com/oschwald/geoip2-golang v1.11.0 + github.com/shirou/gopsutil/v3 v3.24.1 +) require ( github.com/go-ole/go-ole v1.3.0 // indirect github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a // indirect + github.com/oschwald/maxminddb-golang v1.13.1 // indirect github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/tklauser/go-sysconf v0.3.13 // indirect github.com/tklauser/numcpus v0.7.0 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect - golang.org/x/sys v0.17.0 // indirect + golang.org/x/sys v0.21.0 // indirect ) diff --git a/agent/go.sum b/agent/go.sum index 7932155..36a7322 100644 --- a/agent/go.sum +++ b/agent/go.sum @@ -8,9 +8,15 @@ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a h1:3Bm7EwfUQUvhNeKIkUct/gl9eod1TcXuj8stxvi/GoI= github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= +github.com/oschwald/geoip2-golang v1.11.0 h1:hNENhCn1Uyzhf9PTmquXENiWS6AlxAEnBII6r8krA3w= +github.com/oschwald/geoip2-golang v1.11.0/go.mod h1:P9zG+54KPEFOliZ29i7SeYZ/GM6tfEL+rgSn03hYuUo= +github.com/oschwald/maxminddb-golang v1.13.1 h1:G3wwjdN9JmIK2o/ermkHM+98oX5fS+k5MbwsmL4MRQE= +github.com/oschwald/maxminddb-golang v1.13.1/go.mod h1:K4pgV9N/GcK694KSTmVSDTODk4IsCNThNdTmnaBZ/F8= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= @@ -27,8 +33,9 @@ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSS github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/go-sysconf v0.3.13 h1:GBUpcahXSpR2xN01jhkNAbTLRk2Yzgggk8IM08lq3r4= github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0= @@ -44,8 +51,8 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/agent/internal/config/config.go b/agent/internal/config/config.go index 733b28f..7869b3d 100644 --- a/agent/internal/config/config.go +++ b/agent/internal/config/config.go @@ -1,8 +1,7 @@ package config import ( - "os" - "strconv" + "github.com/hhftechnology/traefik-log-dashboard/agent/internal/env" ) // Config holds the application configuration @@ -13,48 +12,28 @@ type Config struct { SystemMonitoring bool MonitorInterval int Port string + LogFormat string + GeoIPEnabled bool + GeoIPCityDB string + GeoIPCountryDB string } -// Load reads configuration from environment variables +// Load reads configuration from environment variables using the env package func Load() *Config { + e := env.LoadEnv() + cfg := &Config{ - AccessPath: getEnv("TRAEFIK_LOG_DASHBOARD_ACCESS_PATH", "/var/log/traefik/access.log"), - ErrorPath: getEnv("TRAEFIK_LOG_DASHBOARD_ERROR_PATH", "/var/log/traefik/traefik.log"), - AuthToken: getEnv("TRAEFIK_LOG_DASHBOARD_AUTH_TOKEN", ""), - SystemMonitoring: getEnvBool("TRAEFIK_LOG_DASHBOARD_SYSTEM_MONITORING", true), - MonitorInterval: getEnvInt("TRAEFIK_LOG_DASHBOARD_MONITOR_INTERVAL", 2000), - Port: getEnv("PORT", "5000"), + AccessPath: e.AccessPath, + ErrorPath: e.ErrorPath, + AuthToken: e.AuthToken, + SystemMonitoring: e.SystemMonitoring, + MonitorInterval: 2000, // Keep default for now + Port: e.Port, + LogFormat: e.LogFormat, + GeoIPEnabled: e.GeoIPEnabled, + GeoIPCityDB: e.GeoIPCityDB, + GeoIPCountryDB: e.GeoIPCountryDB, } return cfg -} - -// getEnv retrieves an environment variable or returns a default value -func getEnv(key, defaultValue string) string { - if value := os.Getenv(key); value != "" { - return value - } - return defaultValue -} - -// getEnvBool retrieves a boolean environment variable or returns a default value -func getEnvBool(key string, defaultValue bool) bool { - if value := os.Getenv(key); value != "" { - boolValue, err := strconv.ParseBool(value) - if err == nil { - return boolValue - } - } - return defaultValue -} - -// getEnvInt retrieves an integer environment variable or returns a default value -func getEnvInt(key string, defaultValue int) int { - if value := os.Getenv(key); value != "" { - intValue, err := strconv.Atoi(value) - if err == nil { - return intValue - } - } - return defaultValue } \ No newline at end of file diff --git a/agent/internal/env/env.go b/agent/internal/env/env.go new file mode 100644 index 0000000..7741d1d --- /dev/null +++ b/agent/internal/env/env.go @@ -0,0 +1,59 @@ +package env + +import ( + "os" + + "github.com/joho/godotenv" + "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/logger" +) + +// Env holds environment variables for the agent +type Env struct { + Port string + AccessPath string + ErrorPath string + SystemMonitoring bool + AuthToken string + LogFormat string + GeoIPEnabled bool + GeoIPCityDB string + GeoIPCountryDB string +} + +// LoadEnv loads environment variables from .env file if present +// and returns an Env struct with all configuration +func LoadEnv() Env { + // Load .env file if present + if err := godotenv.Load(); err != nil { + logger.Log.Println("No .env file found, using system environment variables") + } + + return Env{ + Port: getEnv("PORT", "5000"), + AccessPath: getEnv("TRAEFIK_LOG_DASHBOARD_ACCESS_PATH", "/var/log/traefik/access.log"), + ErrorPath: getEnv("TRAEFIK_LOG_DASHBOARD_ERROR_PATH", "/var/log/traefik/traefik.log"), + SystemMonitoring: getEnvBool("TRAEFIK_LOG_DASHBOARD_SYSTEM_MONITORING", true), + AuthToken: getEnv("TRAEFIK_LOG_DASHBOARD_AUTH_TOKEN", ""), + LogFormat: getEnv("TRAEFIK_LOG_DASHBOARD_LOG_FORMAT", "json"), + GeoIPEnabled: getEnvBool("TRAEFIK_LOG_DASHBOARD_GEOIP_ENABLED", true), + GeoIPCityDB: getEnv("TRAEFIK_LOG_DASHBOARD_GEOIP_CITY_DB", "GeoLite2-City.mmdb"), + GeoIPCountryDB: getEnv("TRAEFIK_LOG_DASHBOARD_GEOIP_COUNTRY_DB", "GeoLite2-Country.mmdb"), + } +} + +// getEnv retrieves an environment variable or returns a default value +func getEnv(key, defaultValue string) string { + if value := os.Getenv(key); value != "" { + return value + } + return defaultValue +} + +// getEnvBool retrieves a boolean environment variable or returns a default value +func getEnvBool(key string, defaultValue bool) bool { + value := os.Getenv(key) + if value == "" { + return defaultValue + } + return value == "true" || value == "1" || value == "yes" +} \ No newline at end of file diff --git a/agent/internal/routes/routes.go b/agent/internal/routes/routes.go index 8c91792..153ad48 100644 --- a/agent/internal/routes/routes.go +++ b/agent/internal/routes/routes.go @@ -5,11 +5,13 @@ import ( "os" "path/filepath" "sync" + "encoding/json" "github.com/hhftechnology/traefik-log-dashboard/agent/internal/config" "github.com/hhftechnology/traefik-log-dashboard/agent/internal/utils" "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/logs" "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/system" + "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/location" ) // Handler manages HTTP routes and dependencies @@ -210,7 +212,7 @@ func (h *Handler) HandleSystemResources(w http.ResponseWriter, r *http.Request) return } - stats, err := system.GetSystemStats() + stats, err := system.MeasureSystem() if err != nil { utils.RespondError(w, http.StatusInternalServerError, err.Error()) return @@ -293,4 +295,86 @@ func (h *Handler) HandleGetLog(w http.ResponseWriter, r *http.Request) { } utils.RespondJSON(w, http.StatusOK, result) +} + +// HandleLocationLookup handles requests for IP geolocation lookups +func (h *Handler) HandleLocationLookup(w http.ResponseWriter, r *http.Request) { + utils.EnableCORS(w) + if r.Method == http.MethodOptions { + w.WriteHeader(http.StatusOK) + return + } + + // Only allow POST requests for location lookups + if r.Method != http.MethodPost { + utils.RespondError(w, http.StatusMethodNotAllowed, "Only POST method is allowed") + return + } + + // Check if GeoIP is enabled + if !h.config.GeoIPEnabled { + utils.RespondError(w, http.StatusForbidden, "GeoIP lookups are disabled") + return + } + + // Check if location services are available + if !location.LocationsEnabled() { + utils.RespondError(w, http.StatusServiceUnavailable, "GeoIP databases not available") + return + } + + // Parse request body - expecting array of IP addresses + var request struct { + IPs []string `json:"ips"` + } + + if err := json.NewDecoder(r.Body).Decode(&request); err != nil { + utils.RespondError(w, http.StatusBadRequest, "Invalid request body") + return + } + + // Validate request + if len(request.IPs) == 0 { + utils.RespondError(w, http.StatusBadRequest, "No IP addresses provided") + return + } + + // Limit to 1000 IPs per request to prevent abuse + if len(request.IPs) > 1000 { + utils.RespondError(w, http.StatusBadRequest, "Too many IP addresses (max 1000)") + return + } + + // Perform location lookups + locations, err := location.ResolveLocations(request.IPs) + if err != nil { + utils.RespondError(w, http.StatusInternalServerError, err.Error()) + return + } + + // Return results + response := map[string]interface{}{ + "locations": locations, + "count": len(locations), + } + + utils.RespondJSON(w, http.StatusOK, response) +} + +// HandleLocationStatus returns the status of the GeoIP service +func (h *Handler) HandleLocationStatus(w http.ResponseWriter, r *http.Request) { + utils.EnableCORS(w) + if r.Method == http.MethodOptions { + w.WriteHeader(http.StatusOK) + return + } + + status := map[string]interface{}{ + "enabled": h.config.GeoIPEnabled, + "available": location.LocationsEnabled(), + "city_db": h.config.GeoIPCityDB, + "country_db": h.config.GeoIPCountryDB, + } + + utils.RespondJSON(w, http.StatusOK, status) } \ No newline at end of file diff --git a/agent/pkg/location/location.go b/agent/pkg/location/location.go new file mode 100644 index 0000000..9a24356 --- /dev/null +++ b/agent/pkg/location/location.go @@ -0,0 +1,203 @@ +package location + +import ( + "net" + "sync" + + "github.com/oschwald/geoip2-golang" + "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/logger" +) + +// Location represents geolocation information for an IP address +type Location struct { + IPAddress string `json:"ipAddress"` + Country string `json:"country,omitempty"` + City string `json:"city,omitempty"` + Latitude float64 `json:"latitude,omitempty"` + Longitude float64 `json:"longitude,omitempty"` +} + +var ( + cityReader *geoip2.Reader + countryReader *geoip2.Reader + initOnce sync.Once + initErr error + initDone = make(chan struct{}) + cityDBPath string + countryDBPath string +) + +// SetDatabasePaths sets the paths for the GeoIP databases +func SetDatabasePaths(cityPath, countryPath string) { + cityDBPath = cityPath + countryDBPath = countryPath +} + +// LocationsEnabled checks if location lookups are available +func LocationsEnabled() bool { + err := InitializeLookups() + return err == nil && (cityReader != nil || countryReader != nil) +} + +// InitializeLookups ensures the MaxMind databases are loaded +func InitializeLookups() error { + initOnce.Do(func() { + // Try to open the city database first + if cityDBPath != "" { + var cityErr error + cityReader, cityErr = geoip2.Open(cityDBPath) + if cityErr != nil { + logger.Log.Printf("Failed to load GeoLite2 City database from %s: %v", cityDBPath, cityErr) + } else { + logger.Log.Println("GeoLite2 City database loaded successfully") + } + } + + // Try to open the country database if city failed or as fallback + if cityReader == nil && countryDBPath != "" { + var countryErr error + countryReader, countryErr = geoip2.Open(countryDBPath) + if countryErr != nil { + logger.Log.Printf("Failed to load GeoLite2 Country database from %s: %v", countryDBPath, countryErr) + initErr = countryErr + } else { + logger.Log.Println("GeoLite2 Country database loaded successfully") + } + } + + // If we have at least one database, consider it successful + if cityReader != nil || countryReader != nil { + initErr = nil + } + + close(initDone) + }) + + // Wait for initialization to complete + <-initDone + return initErr +} + +// LocationLookup returns geolocation information for a single IP address +func LocationLookup(ipAddress string) (Location, error) { + // Ensure databases are initialized + if err := InitializeLookups(); err != nil && cityReader == nil && countryReader == nil { + return Location{ + IPAddress: ipAddress, + Country: "", + City: "", + }, nil + } + + // Parse the IP address + ip := net.ParseIP(ipAddress) + if ip == nil { + return Location{ + IPAddress: ipAddress, + Country: "", + City: "", + }, nil + } + + // Check if it's a private IP + if isPrivateIP(ip) { + return Location{ + IPAddress: ipAddress, + Country: "Private", + City: "", + }, nil + } + + location := Location{ + IPAddress: ipAddress, + } + + // Try city lookup first if available + if cityReader != nil { + city, err := cityReader.City(ip) + if err == nil { + location.Country = city.Country.IsoCode + if city.City.Names != nil { + location.City = city.City.Names["en"] + } + location.Latitude = city.Location.Latitude + location.Longitude = city.Location.Longitude + return location, nil + } + } + + // Fall back to country lookup if available + if countryReader != nil { + country, err := countryReader.Country(ip) + if err == nil { + location.Country = country.Country.IsoCode + return location, nil + } + } + + // Return empty location if no lookup was successful + return location, nil +} + +// ResolveLocations performs geolocation lookups for multiple IP addresses in parallel +func ResolveLocations(ipAddresses []string) ([]Location, error) { + // Ensure databases are initialized + if err := InitializeLookups(); err != nil && cityReader == nil && countryReader == nil { + // Return empty locations if neither database is available + locations := make([]Location, len(ipAddresses)) + for i, ip := range ipAddresses { + locations[i] = Location{ + IPAddress: ip, + Country: "", + City: "", + } + } + return locations, nil + } + + // Use a wait group to track parallel lookups + var wg sync.WaitGroup + locations := make([]Location, len(ipAddresses)) + + // Perform lookups in parallel + for i, ip := range ipAddresses { + wg.Add(1) + go func(idx int, ipAddr string) { + defer wg.Done() + location, _ := LocationLookup(ipAddr) + locations[idx] = location + }(i, ip) + } + + // Wait for all lookups to complete + wg.Wait() + return locations, nil +} + +// isPrivateIP checks if an IP address is private/internal +func isPrivateIP(ip net.IP) bool { + // Check for IPv4 private ranges + if ip4 := ip.To4(); ip4 != nil { + return ip4[0] == 10 || + (ip4[0] == 172 && ip4[1] >= 16 && ip4[1] <= 31) || + (ip4[0] == 192 && ip4[1] == 168) || + ip4[0] == 127 // localhost + } + + // Check for IPv6 private ranges + if ip.IsLoopback() || ip.IsLinkLocalUnicast() || ip.IsPrivate() { + return true + } + + return false +} + +// Close releases resources used by MaxMind readers +func Close() { + if cityReader != nil { + cityReader.Close() + } + if countryReader != nil { + countryReader.Close() + } +} \ No newline at end of file diff --git a/agent/pkg/system/system.go b/agent/pkg/system/system.go index 0055df9..02602d9 100644 --- a/agent/pkg/system/system.go +++ b/agent/pkg/system/system.go @@ -2,123 +2,417 @@ package system import ( "fmt" + "os/exec" "runtime" + "strconv" + "strings" "time" "github.com/shirou/gopsutil/v3/cpu" "github.com/shirou/gopsutil/v3/disk" "github.com/shirou/gopsutil/v3/mem" + "github.com/hhftechnology/traefik-log-dashboard/agent/pkg/logger" ) -// SystemStats represents overall system resource statistics -type SystemStats struct { - CPU CPUStats `json:"cpu"` - Memory MemoryStats `json:"memory"` - Disk DiskStats `json:"disk"` +// SystemInfo represents system information for the API response +type SystemInfo struct { + Uptime int64 `json:"uptime"` + Timestamp string `json:"timestamp"` + CPU CPUStats `json:"cpu"` + Memory MemoryStats `json:"memory"` + Disk DiskStats `json:"disk"` } -// CPUStats represents CPU usage statistics +// CPUStats represents CPU statistics with percentage type CPUStats struct { - UsagePercent float64 `json:"usage_percent"` - Cores int `json:"cores"` + Model string `json:"model"` + Cores int `json:"cores"` + Speed float64 `json:"speed"` + UsagePercent float64 `json:"usage_percent"` // Changed from Usage to UsagePercent + CoreUsage []float64 `json:"coreUsage"` } -// MemoryStats represents memory usage statistics +// MemoryStats represents memory statistics with percentage type MemoryStats struct { - Total uint64 `json:"total"` + Free uint64 `json:"free"` Available uint64 `json:"available"` Used uint64 `json:"used"` - UsedPercent float64 `json:"used_percent"` - Free uint64 `json:"free"` + Total uint64 `json:"total"` + UsedPercent float64 `json:"used_percent"` // Added percentage calculation } -// DiskStats represents disk usage statistics +// DiskStats represents aggregated disk statistics with percentage type DiskStats struct { Total uint64 `json:"total"` Used uint64 `json:"used"` Free uint64 `json:"free"` - UsedPercent float64 `json:"used_percent"` + UsedPercent float64 `json:"used_percent"` // Added percentage calculation +} + +// DiskInfo represents individual disk information (internal use) +type DiskInfo struct { + Filesystem string `json:"filesystem"` + Size uint64 `json:"size"` + Used uint64 `json:"used"` + Free uint64 `json:"free"` + MountedOn string `json:"mountedOn"` } -// GetSystemStats retrieves current system resource statistics -func GetSystemStats() (*SystemStats, error) { +func MeasureSystem() (SystemInfo, error) { + uptime, err := getUptime() + if err != nil { + logger.Log.Printf("Warning: Could not get uptime: %v", err) + uptime = 0 + } + cpuStats, err := getCPUStats() if err != nil { - return nil, fmt.Errorf("failed to get CPU stats: %w", err) + return SystemInfo{}, fmt.Errorf("failed to get CPU stats: %w", err) } - memStats, err := getMemoryStats() + memoryStats, err := getMemoryStats() if err != nil { - return nil, fmt.Errorf("failed to get memory stats: %w", err) + return SystemInfo{}, fmt.Errorf("failed to get memory stats: %w", err) } - diskStats, err := getDiskStats("/") + diskStats, err := getDiskStats() if err != nil { - return nil, fmt.Errorf("failed to get disk stats: %w", err) + return SystemInfo{}, fmt.Errorf("failed to get disk stats: %w", err) } - return &SystemStats{ - CPU: *cpuStats, - Memory: *memStats, - Disk: *diskStats, + return SystemInfo{ + Uptime: uptime, + Timestamp: time.Now().UTC().Format(time.RFC3339), + CPU: cpuStats, + Memory: memoryStats, + Disk: diskStats, }, nil } -// getCPUStats retrieves CPU usage statistics -func getCPUStats() (*CPUStats, error) { - // Get CPU percentage over 1 second interval - percentages, err := cpu.Percent(time.Second, false) +func getCPUStats() (CPUStats, error) { + // Try gopsutil first + cpuInfo, err := cpu.Info() if err != nil { - return nil, err + logger.Log.Printf("gopsutil cpu.Info() failed: %v, trying OS-specific fallback", err) + return getCPUStatsFallback() } - var usagePercent float64 - if len(percentages) > 0 { - usagePercent = percentages[0] + if len(cpuInfo) == 0 { + logger.Log.Printf("gopsutil returned empty CPU info, trying OS-specific fallback") + return getCPUStatsFallback() } - // Get number of CPU cores - cores := runtime.NumCPU() + // Get per-CPU usage (set percpu=true) + cpuUsage, err := cpu.Percent(time.Second, true) + if err != nil { + logger.Log.Printf("Warning: Could not get CPU usage: %v", err) + cpuUsage = make([]float64, len(cpuInfo)) + } + + model := cpuInfo[0].ModelName + cores := len(cpuUsage) + if cores == 0 { + cores = runtime.NumCPU() + cpuUsage = make([]float64, cores) + } + speed := cpuInfo[0].Mhz + + // Calculate average usage across all cores + var overallUsage float64 + if len(cpuUsage) > 0 { + var total float64 + for _, usage := range cpuUsage { + total += usage + } + overallUsage = total / float64(len(cpuUsage)) + } - return &CPUStats{ - UsagePercent: usagePercent, + return CPUStats{ + Model: model, Cores: cores, + Speed: speed, + UsagePercent: parseFloat(overallUsage, 1), + CoreUsage: cpuUsage, }, nil } -// getMemoryStats retrieves memory usage statistics -func getMemoryStats() (*MemoryStats, error) { +func getCPUStatsFallback() (CPUStats, error) { + switch runtime.GOOS { + case "darwin": + return getCPUInfoMacOS() + case "windows": + return getCPUInfoWindows() + default: + return getCPUInfoLinux() + } +} + +func getCPUInfoMacOS() (CPUStats, error) { + var stats CPUStats + + // Get CPU model + cmd := exec.Command("sysctl", "-n", "machdep.cpu.brand_string") + output, err := cmd.Output() + if err == nil { + stats.Model = strings.TrimSpace(string(output)) + } else { + stats.Model = "Unknown" + } + + // Get CPU core count + cmd = exec.Command("sysctl", "-n", "hw.ncpu") + output, err = cmd.Output() + if err == nil { + if cores, err := strconv.Atoi(strings.TrimSpace(string(output))); err == nil { + stats.Cores = cores + } + } else { + stats.Cores = runtime.NumCPU() + } + + // Get CPU frequency + freqKeys := []string{"hw.cpufrequency_max", "hw.cpufrequency", "machdep.cpu.max_basic"} + for _, key := range freqKeys { + cmd = exec.Command("sysctl", "-n", key) + output, err = cmd.Output() + if err == nil { + if freq, err := strconv.ParseFloat(strings.TrimSpace(string(output)), 64); err == nil { + if freq > 1000000 { + stats.Speed = freq / 1000000 + } else { + stats.Speed = freq + } + break + } + } + } + + // Get CPU usage + cpuUsage, err := cpu.Percent(time.Second, false) + if err == nil && len(cpuUsage) > 0 { + stats.UsagePercent = parseFloat(cpuUsage[0], 1) + } + + return stats, nil +} + +func getCPUInfoWindows() (CPUStats, error) { + var stats CPUStats + + cmd := exec.Command("wmic", "cpu", "get", "Name,NumberOfCores,MaxClockSpeed", "/format:csv") + output, err := cmd.Output() + if err != nil { + return stats, err + } + + lines := strings.Split(string(output), "\n") + for _, line := range lines { + if strings.Contains(line, ",") && !strings.Contains(line, "Node") { + parts := strings.Split(line, ",") + if len(parts) >= 4 { + stats.Model = strings.TrimSpace(parts[2]) + if cores, err := strconv.Atoi(strings.TrimSpace(parts[3])); err == nil { + stats.Cores = cores + } + if speed, err := strconv.ParseFloat(strings.TrimSpace(parts[1]), 64); err == nil { + stats.Speed = speed + } + break + } + } + } + + // Get CPU usage + cpuUsage, err := cpu.Percent(time.Second, false) + if err == nil && len(cpuUsage) > 0 { + stats.UsagePercent = parseFloat(cpuUsage[0], 1) + } + + return stats, nil +} + +func getCPUInfoLinux() (CPUStats, error) { + var stats CPUStats + + cmd := exec.Command("cat", "/proc/cpuinfo") + output, err := cmd.Output() + if err != nil { + return stats, err + } + + lines := strings.Split(string(output), "\n") + coreCount := 0 + for _, line := range lines { + if strings.HasPrefix(line, "model name") { + parts := strings.Split(line, ":") + if len(parts) >= 2 { + stats.Model = strings.TrimSpace(parts[1]) + } + } else if strings.HasPrefix(line, "cpu MHz") { + parts := strings.Split(line, ":") + if len(parts) >= 2 { + if speed, err := strconv.ParseFloat(strings.TrimSpace(parts[1]), 64); err == nil { + stats.Speed = speed + } + } + } else if strings.HasPrefix(line, "processor") { + coreCount++ + } + } + stats.Cores = coreCount + + // Get CPU usage + cpuUsage, err := cpu.Percent(time.Second, false) + if err == nil && len(cpuUsage) > 0 { + stats.UsagePercent = parseFloat(cpuUsage[0], 1) + } + + return stats, nil +} + +func getMemoryStats() (MemoryStats, error) { vmStat, err := mem.VirtualMemory() if err != nil { - return nil, err + return MemoryStats{}, err } - return &MemoryStats{ - Total: vmStat.Total, + // Calculate used percentage + usedPercent := 0.0 + if vmStat.Total > 0 { + usedPercent = (float64(vmStat.Used) / float64(vmStat.Total)) * 100.0 + } + + return MemoryStats{ + Free: vmStat.Free, Available: vmStat.Available, Used: vmStat.Used, - UsedPercent: vmStat.UsedPercent, - Free: vmStat.Free, + Total: vmStat.Total, + UsedPercent: parseFloat(usedPercent, 1), }, nil } -// getDiskStats retrieves disk usage statistics for a given path -func getDiskStats(path string) (*DiskStats, error) { - usage, err := disk.Usage(path) +func getDiskStats() (DiskStats, error) { + disks, err := getDiskInfo() if err != nil { - return nil, err + return DiskStats{}, err + } + + if len(disks) == 0 { + return DiskStats{}, fmt.Errorf("no disk information available") + } + + // Aggregate all disk stats + var totalSize, totalUsed, totalFree uint64 + for _, disk := range disks { + totalSize += disk.Size + totalUsed += disk.Used + totalFree += disk.Free + } + + // Calculate used percentage + usedPercent := 0.0 + if totalSize > 0 { + usedPercent = (float64(totalUsed) / float64(totalSize)) * 100.0 } - return &DiskStats{ - Total: usage.Total, - Used: usage.Used, - Free: usage.Free, - UsedPercent: usage.UsedPercent, + return DiskStats{ + Total: totalSize, + Used: totalUsed, + Free: totalFree, + UsedPercent: parseFloat(usedPercent, 1), }, nil } -// GetDiskStatsForPath retrieves disk usage statistics for a specific path -// Useful for monitoring log directory specifically -func GetDiskStatsForPath(path string) (*DiskStats, error) { - return getDiskStats(path) +func getDiskInfo() ([]DiskInfo, error) { + var disks []DiskInfo + + partitions, err := disk.Partitions(false) + if err != nil { + return nil, err + } + + for _, partition := range partitions { + usage, err := disk.Usage(partition.Mountpoint) + if err != nil { + logger.Log.Printf("Error getting disk usage for %s: %v", partition.Mountpoint, err) + continue + } + + disks = append(disks, DiskInfo{ + Filesystem: partition.Device, + Size: usage.Total, + Used: usage.Used, + Free: usage.Free, + MountedOn: partition.Mountpoint, + }) + } + + return disks, nil +} + +func getUptime() (int64, error) { + switch runtime.GOOS { + case "windows": + cmd := exec.Command("systeminfo") + output, err := cmd.Output() + if err != nil { + return 0, err + } + lines := strings.Split(string(output), "\n") + for _, line := range lines { + if strings.Contains(line, "System Boot Time") { + bootTimeStr := strings.TrimSpace(strings.Split(line, ":")[1]) + bootTime, err := time.Parse("1/2/2006, 3:04:05 PM", bootTimeStr) + if err != nil { + return 0, err + } + return int64(time.Since(bootTime).Seconds()), nil + } + } + return 0, fmt.Errorf("could not determine system uptime") + + case "darwin": + cmd := exec.Command("sysctl", "-n", "kern.boottime") + output, err := cmd.Output() + if err != nil { + return 0, err + } + + parts := strings.Split(string(output), "sec = ") + if len(parts) < 2 { + return 0, fmt.Errorf("unexpected sysctl output format") + } + + bootTimeParts := strings.Split(parts[1], ",") + bootTimeStr := strings.TrimSpace(bootTimeParts[0]) + bootTime, err := strconv.ParseInt(bootTimeStr, 10, 64) + if err != nil { + return 0, err + } + + currentTime := time.Now().Unix() + return currentTime - bootTime, nil + + default: + cmd := exec.Command("cat", "/proc/uptime") + output, err := cmd.Output() + if err != nil { + return 0, err + } + + uptimeStr := strings.Split(string(output), " ")[0] + uptimeFloat, err := strconv.ParseFloat(uptimeStr, 64) + if err != nil { + return 0, err + } + + return int64(uptimeFloat), nil + } +} + +func parseFloat(val float64, precision int) float64 { + format := fmt.Sprintf("%%.%df", precision) + formatted := fmt.Sprintf(format, val) + result, _ := strconv.ParseFloat(formatted, 64) + return result } \ No newline at end of file diff --git a/cli/go.mod b/cli/go.mod index 6650f18..ff3ce80 100644 --- a/cli/go.mod +++ b/cli/go.mod @@ -1,6 +1,6 @@ module github.com/hhftechnology/traefik-log-dashboard/cli -go 1.22 +go 1.23 require ( github.com/charmbracelet/bubbletea v0.25.0 diff --git a/dashboard/Dockerfile b/dashboard/Dockerfile index a671540..e11cc1c 100644 --- a/dashboard/Dockerfile +++ b/dashboard/Dockerfile @@ -1,43 +1,62 @@ -# Stage 1: Build -FROM node:18-alpine AS builder +# ============================================================================== +# Stage 1: Dependencies - Install only production dependencies +# ============================================================================== +FROM node:22-alpine AS deps WORKDIR /app -# Install dependencies -# Only copy package.json and package-lock.json (if available) first to leverage caching -COPY package*.json ./ -RUN npm ci +# Install dependencies based on the preferred package manager +COPY package.json package-lock.json* ./ -# Copy the rest of the source code +# Use npm ci for faster, more reliable installs +RUN npm ci --only=production && \ + npm cache clean --force + +# ============================================================================== +# Stage 2: Builder - Build the application +# ============================================================================== +FROM node:22-alpine AS builder + +WORKDIR /app + +# Copy dependencies from deps stage +COPY --from=deps /app/node_modules ./node_modules COPY . . -# Build the application +# Install dev dependencies for build +RUN npm ci && \ + npm cache clean --force + +# Build Next.js application +# This will generate the standalone output +ENV NEXT_TELEMETRY_DISABLED=1 +ENV NODE_ENV=production + RUN npm run build -# Stage 2: Production -FROM node:18-alpine AS runner +# ============================================================================== +# Stage 3: Runner - Minimal production image using distroless +# ============================================================================== +FROM gcr.io/distroless/nodejs22-debian12:nonroot WORKDIR /app +# Set production environment ENV NODE_ENV=production +ENV NEXT_TELEMETRY_DISABLED=1 +ENV PORT=3000 +ENV HOSTNAME="0.0.0.0" -# Create a non-root user for security -RUN addgroup --system --gid 1001 nodejs -RUN adduser --system --uid 1001 nextjs - -# Copy the standalone output from the build stage -COPY --from=builder /app/.next/standalone ./ -# Copy the static assets -COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static -# Copy public assets -COPY --from=builder /app/public ./public +# Copy only necessary files from builder +# The standalone output includes all necessary dependencies +COPY --from=builder --chown=nonroot:nonroot /app/.next/standalone ./ +COPY --from=builder --chown=nonroot:nonroot /app/.next/static ./.next/static +COPY --from=builder --chown=nonroot:nonroot /app/public ./public - -USER nextjs +# Distroless runs as nonroot user (uid 65532) by default +# No USER directive needed EXPOSE 3000 -ENV PORT 3000 -ENV HOSTNAME "0.0.0.0" - -CMD ["node", "server.js"] \ No newline at end of file +# Distroless doesn't have shell, use exec form +CMD ["server.js"] \ No newline at end of file diff --git a/dashboard/app/api/agents/check-status/route.ts b/dashboard/app/api/agents/check-status/route.ts new file mode 100644 index 0000000..ea1496c --- /dev/null +++ b/dashboard/app/api/agents/check-status/route.ts @@ -0,0 +1,105 @@ +// dashboard/app/api/agents/check-status/route.ts + +import { NextRequest, NextResponse } from 'next/server'; + +export const dynamic = 'force-dynamic'; +export const revalidate = 0; + +interface CheckStatusRequest { + agentUrl: string; + agentToken: string; +} + +export async function POST(request: NextRequest) { + try { + const body: CheckStatusRequest = await request.json(); + const { agentUrl, agentToken } = body; + + if (!agentUrl || !agentToken) { + return NextResponse.json( + { error: 'Missing agentUrl or agentToken' }, + { status: 400 } + ); + } + + // Validate URL format + let url: URL; + try { + url = new URL(agentUrl); + } catch { + return NextResponse.json( + { error: 'Invalid agent URL format' }, + { status: 400 } + ); + } + + // Check agent status endpoint with authentication + const headers: HeadersInit = { + 'Content-Type': 'application/json', + 'Cache-Control': 'no-cache, no-store, must-revalidate', + 'Authorization': `Bearer ${agentToken}`, + }; + + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), 5000); // 5 second timeout + + try { + const response = await fetch(`${agentUrl}/api/logs/status`, { + headers, + signal: controller.signal, + cache: 'no-store', + }); + + clearTimeout(timeoutId); + + if (!response.ok) { + return NextResponse.json( + { + online: false, + error: `Agent returned ${response.status}: ${response.statusText}`, + }, + { status: 200 } // Return 200 so the frontend can handle the offline status + ); + } + + const data = await response.json(); + + return NextResponse.json({ + online: true, + version: data.version || 'unknown', + uptime: data.uptime || 0, + agentInfo: data, + }); + } catch (fetchError) { + clearTimeout(timeoutId); + + if (fetchError instanceof Error) { + if (fetchError.name === 'AbortError') { + return NextResponse.json( + { online: false, error: 'Connection timeout' }, + { status: 200 } + ); + } + + return NextResponse.json( + { online: false, error: `Connection failed: ${fetchError.message}` }, + { status: 200 } + ); + } + + return NextResponse.json( + { online: false, error: 'Unknown connection error' }, + { status: 200 } + ); + } + } catch (error) { + console.error('Agent status check error:', error); + return NextResponse.json( + { + error: 'Failed to check agent status', + details: error instanceof Error ? error.message : String(error), + }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/dashboard/app/api/location/lookup/route.ts b/dashboard/app/api/location/lookup/route.ts new file mode 100644 index 0000000..bb22bf7 --- /dev/null +++ b/dashboard/app/api/location/lookup/route.ts @@ -0,0 +1,72 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { agentConfig } from '@/lib/agent-config'; + +export const dynamic = 'force-dynamic'; +export const revalidate = 0; + +export async function POST(request: NextRequest) { + try { + const body = await request.json(); + const { ips } = body; + + if (!ips || !Array.isArray(ips) || ips.length === 0) { + return NextResponse.json( + { error: 'Invalid request: ips array is required' }, + { status: 400 } + ); + } + + // Limit to 1000 IPs per request + if (ips.length > 1000) { + return NextResponse.json( + { error: 'Too many IPs (max 1000)' }, + { status: 400 } + ); + } + + const AGENT_API_URL = agentConfig.url; + const AGENT_API_TOKEN = agentConfig.token; + + const headers: HeadersInit = { + 'Content-Type': 'application/json', + }; + + if (AGENT_API_TOKEN) { + headers['Authorization'] = `Bearer ${AGENT_API_TOKEN}`; + } + + const response = await fetch( + `${AGENT_API_URL}/api/location/lookup`, + { + method: 'POST', + headers, + body: JSON.stringify({ ips }), + cache: 'no-store', + } + ); + + if (!response.ok) { + const error = await response.text(); + console.error('Agent location error:', error); + return NextResponse.json( + { error: `Agent error: ${error}` }, + { status: response.status } + ); + } + + const data = await response.json(); + + const res = NextResponse.json(data); + res.headers.set('Cache-Control', 'no-cache, no-store, must-revalidate'); + res.headers.set('Pragma', 'no-cache'); + res.headers.set('Expires', '0'); + + return res; + } catch (error) { + console.error('Location lookup API error:', error); + return NextResponse.json( + { error: 'Failed to lookup locations', details: String(error) }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/dashboard/app/api/location/status/route.ts b/dashboard/app/api/location/status/route.ts new file mode 100644 index 0000000..b2f12bc --- /dev/null +++ b/dashboard/app/api/location/status/route.ts @@ -0,0 +1,66 @@ +import { NextResponse } from 'next/server'; +import { agentConfig } from '@/lib/agent-config'; + +export const dynamic = 'force-dynamic'; +export const revalidate = 0; + +export async function GET() { + try { + const AGENT_API_URL = agentConfig.url; + const AGENT_API_TOKEN = agentConfig.token; + + const headers: HeadersInit = { + 'Content-Type': 'application/json', + }; + + if (AGENT_API_TOKEN) { + headers['Authorization'] = `Bearer ${AGENT_API_TOKEN}`; + } + + const response = await fetch( + `${AGENT_API_URL}/api/location/status`, + { + headers, + cache: 'no-store', + } + ); + + if (!response.ok) { + // If agent doesn't have location endpoint, return disabled status + if (response.status === 404) { + return NextResponse.json({ + enabled: false, + available: false, + message: 'Location service not available on agent' + }); + } + + const error = await response.text(); + console.error('Agent location status error:', error); + return NextResponse.json( + { error: `Agent error: ${error}` }, + { status: response.status } + ); + } + + const data = await response.json(); + + const res = NextResponse.json(data); + res.headers.set('Cache-Control', 'no-cache, no-store, must-revalidate'); + res.headers.set('Pragma', 'no-cache'); + res.headers.set('Expires', '0'); + + return res; + } catch (error) { + console.error('Location status API error:', error); + return NextResponse.json( + { + enabled: false, + available: false, + error: 'Failed to fetch location status', + details: String(error) + }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/dashboard/app/dashboard/demo/page.tsx b/dashboard/app/dashboard/demo/page.tsx index 5cd6c9b..5314004 100644 --- a/dashboard/app/dashboard/demo/page.tsx +++ b/dashboard/app/dashboard/demo/page.tsx @@ -1,7 +1,8 @@ +// dashboard/app/dashboard/demo/page.tsx 'use client'; import { useEffect, useState } from 'react'; -import Dashboard from '@/components/dashboard/Dashboard'; +import DashboardWithFilters from '@/components/dashboard/DashboardWithFilters'; import Header from '@/components/ui/Header'; import { generateTimeSeriesLogs } from '@/lib/demo'; import { TraefikLog } from '@/lib/types'; @@ -52,7 +53,7 @@ export default function DemoDashboardPage() { demoMode={true} lastUpdate={lastUpdate} /> - + ); } \ No newline at end of file diff --git a/dashboard/app/dashboard/page.tsx b/dashboard/app/dashboard/page.tsx index 21e8d65..553b4ca 100644 --- a/dashboard/app/dashboard/page.tsx +++ b/dashboard/app/dashboard/page.tsx @@ -1,11 +1,11 @@ +// dashboard/app/dashboard/page.tsx 'use client'; import { useEffect, useState, useRef } from 'react'; -import Dashboard from '@/components/dashboard/Dashboard'; +import DashboardWithFilters from '@/components/dashboard/DashboardWithFilters'; import Header from '@/components/ui/Header'; import { TraefikLog } from '@/lib/types'; import { parseTraefikLogs } from '@/lib/traefik-parser'; -import { Activity } from 'lucide-react'; export default function DashboardPage() { const [logs, setLogs] = useState([]); @@ -20,8 +20,10 @@ export default function DashboardPage() { useEffect(() => { const fetchLogs = async () => { try { + const position = positionRef.current ?? -1; + const response = await fetch( - `/api/logs/access?period=1h&position=${positionRef.current}` + `/api/logs/access?period=1h&position=${position}` ); if (!response.ok) { @@ -33,7 +35,7 @@ export default function DashboardPage() { if (data.logs && data.logs.length > 0) { const parsedLogs = parseTraefikLogs(data.logs); - setLogs(prevLogs => { + setLogs((prevLogs: TraefikLog[]) => { if (isFirstFetch.current) { isFirstFetch.current = false; return parsedLogs; @@ -42,7 +44,7 @@ export default function DashboardPage() { }); } - if (data.positions && data.positions.length > 0) { + if (data.positions && data.positions.length > 0 && typeof data.positions[0].Position === 'number') { positionRef.current = data.positions[0].Position; } @@ -59,42 +61,49 @@ export default function DashboardPage() { }; fetchLogs(); - const interval = setInterval(fetchLogs, 3000); + const interval = setInterval(fetchLogs, 5000); + return () => clearInterval(interval); }, []); - if (loading && logs.length === 0) { + if (loading) { return ( -
-
-
-

Loading dashboard...

+
+
+
+
+
+
+

Connecting to agent...

+
+
); } - if (error && logs.length === 0) { + if (error && !connected) { return (
-
-
-
-
- +
+
+
+
+
+

+ Connection Error +

+

{error}

+

+ Make sure the agent is running and accessible +

-

Connection Error

-

- {error.includes('404') - ? 'The agent is connected but no logs are available yet.' - : 'Please check that the agent is running and accessible.'} -

-
@@ -106,8 +115,7 @@ export default function DashboardPage() {
@@ -127,7 +135,7 @@ export default function DashboardPage() {
- +
); } \ No newline at end of file diff --git a/dashboard/app/layout.tsx b/dashboard/app/layout.tsx index 7be074a..bf98966 100644 --- a/dashboard/app/layout.tsx +++ b/dashboard/app/layout.tsx @@ -1,7 +1,14 @@ +// dashboard/app/layout.tsx import React from 'react'; -import './globals.css'; // Add this import! +import './globals.css'; +import Providers from '@/components/providers/Providers'; // MODIFIED LINE 4 -export default function RootLayout({ // Also consider renaming to RootLayout +export const metadata = { + title: 'Traefik Log Dashboard', + description: 'Real-time analytics and monitoring for Traefik reverse proxy logs', +}; + +export default function RootLayout({ children, }: { children: React.ReactNode; @@ -9,9 +16,11 @@ export default function RootLayout({ // Also consider renaming to RootLayout return ( -
- {children} -
+ +
+ {children} +
+
); diff --git a/dashboard/app/settings/agents/page.tsx b/dashboard/app/settings/agents/page.tsx new file mode 100644 index 0000000..3889304 --- /dev/null +++ b/dashboard/app/settings/agents/page.tsx @@ -0,0 +1,354 @@ +// dashboard/app/settings/agents/page.tsx +'use client'; + +import { useState } from 'react'; +import Link from 'next/link'; +import { useAgents } from '@/lib/contexts/AgentContext'; +import { Agent } from '@/lib/types/agent'; +import { Button } from '@/components/ui/button'; +import { Badge } from '@/components/ui/badge'; +import { + Plus, + Trash2, + Edit, + Server, + MapPin, + RefreshCw, + CheckCircle2, + XCircle, + Circle, + Settings as SettingsIcon, + Activity, + ChevronLeft, +} from 'lucide-react'; +import AgentFormModal from '@/components/AgentFormModal'; +import AgentBulkOperations from '@/components/AgentBulkOperations'; +import AgentHealthDashboard from '@/components/AgentHealthDashboard'; + +type TabType = 'agents' | 'health' | 'bulk'; + +export default function AgentSettingsPage() { + const { agents, selectedAgent, deleteAgent, checkAgentStatus } = useAgents(); + const [activeTab, setActiveTab] = useState('agents'); + const [showAddModal, setShowAddModal] = useState(false); + const [editingAgent, setEditingAgent] = useState(null); + const [checkingStatus, setCheckingStatus] = useState>({}); + + const handleCheckStatus = async (agentId: string) => { + setCheckingStatus(prev => ({ ...prev, [agentId]: true })); + await checkAgentStatus(agentId); + setCheckingStatus(prev => ({ ...prev, [agentId]: false })); + }; + + const handleCheckAllStatus = async () => { + for (const agent of agents) { + await handleCheckStatus(agent.id); + } + }; + + const handleDelete = (agentId: string) => { + if (confirm('Are you sure you want to delete this agent?')) { + deleteAgent(agentId); + } + }; + + const getStatusIcon = (status?: Agent['status']) => { + switch (status) { + case 'online': + return ; + case 'offline': + return ; + case 'checking': + return ; + default: + return ; + } + }; + + const getLocationIcon = (location: Agent['location']) => { + return location === 'on-site' ? ( + + ) : ( + + ); + }; + + const tabs = [ + { id: 'agents' as TabType, label: 'Agents', icon: Server, count: agents.length }, + { id: 'health' as TabType, label: 'Health Monitoring', icon: Activity }, + { id: 'bulk' as TabType, label: 'Bulk Operations', icon: SettingsIcon }, + ]; + + return ( +
+
+ {/* Header */} +
+
+ +

+ Agent Settings +

+
+

+ Configure and monitor your Traefik log dashboard agents +

+
+ + {/* Tabs */} +
+ +
+ + {/* Tab Content */} + {activeTab === 'agents' && ( +
+ {/* Quick Stats */} +
+
+
+ +
+
+ {agents.length} +
+
Total Agents
+
+
+
+ +
+
+ +
+
+ {agents.filter(a => a.status === 'online').length} +
+
Online
+
+
+
+ +
+
+ +
+
+ {agents.filter(a => a.status === 'offline').length} +
+
Offline
+
+
+
+
+ + {/* Actions Bar */} +
+

+ Your Agents +

+
+ + +
+
+ + {/* Agent List */} +
+ {agents.length === 0 ? ( +
+ +

+ No Agents Configured +

+

+ Get started by adding your first agent to monitor Traefik logs +

+ +
+ ) : ( + agents.map((agent) => ( +
+
+
+
+ {getStatusIcon(checkingStatus[agent.id] ? 'checking' : agent.status)} +

+ {agent.name} +

+ + {agent.status === 'online' ? 'Active' : agent.status === 'checking' ? 'Checking' : 'Default Agent'} + +
+
+
+ {getLocationIcon(agent.location)} + + {agent.location.replace('-', ' ')} + +
+
+ Agent # {agent.number} +
+
+ + {agent.url} +
+
+
+
+ + + + + +
+
+
+ )) + )} +
+ + {/* Setup Guide */} +
+

+ Quick Setup Guide +

+
    +
  • Deploy agents on servers where Traefik logs are located
  • +
  • Configure unique authentication tokens for secure communication
  • +
  • On-site agents: Running on the same network as the dashboard
  • +
  • Off-site agents: Running on remote servers or cloud instances
  • +
  • Agent numbering is automatically assigned sequentially
  • +
+
+
+ )} + + {activeTab === 'health' && ( +
+ +
+ )} + + {activeTab === 'bulk' && ( +
+ +
+ )} +
+ + {/* Modals */} + {showAddModal && ( + { + setShowAddModal(false); + setEditingAgent(null); + }} + agent={editingAgent} + /> + )} +
+ ); +} \ No newline at end of file diff --git a/dashboard/app/settings/filters/page.tsx b/dashboard/app/settings/filters/page.tsx new file mode 100644 index 0000000..1f0e31a --- /dev/null +++ b/dashboard/app/settings/filters/page.tsx @@ -0,0 +1,766 @@ +// dashboard/app/settings/filters/page.tsx +'use client'; + +import { useState, ChangeEvent } from 'react'; +import Link from 'next/link'; +import { useFilters } from '@/lib/contexts/FilterContext'; +import { FilterCondition } from '@/lib/types/filter'; +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/Card'; +import { Button } from '@/components/ui/button'; +import { Badge } from '@/components/ui/badge'; +import { + ChevronLeft, + Filter, + Plus, + Trash2, + Shield, + Globe, + Activity, + AlertTriangle, + RefreshCw, + Save, + X, +} from 'lucide-react'; + +export default function FilterSettingsPage() { + const { settings, updateSettings, resetSettings, addCustomCondition, removeCustomCondition, updateCustomCondition } = useFilters(); + + const [newIP, setNewIP] = useState(''); + const [newStatusCode, setNewStatusCode] = useState(''); + const [newPath, setNewPath] = useState(''); + const [newCustomHeader, setNewCustomHeader] = useState(''); + const [showCustomConditionForm, setShowCustomConditionForm] = useState(false); + const [saved, setSaved] = useState(false); + + // Custom condition form state + const [customCondition, setCustomCondition] = useState>({ + name: '', + type: 'custom', + field: 'RequestPath', + operator: 'contains', + value: '', + enabled: true, + description: '', + }); + + const handleAddIP = () => { + if (newIP.trim()) { + updateSettings({ + excludedIPs: [...settings.excludedIPs, newIP.trim()], + }); + setNewIP(''); + showSavedIndicator(); + } + }; + + const handleRemoveIP = (ip: string) => { + updateSettings({ + excludedIPs: settings.excludedIPs.filter((i) => i !== ip), + }); + showSavedIndicator(); + }; + + const handleAddStatusCode = () => { + const code = parseInt(newStatusCode); + if (!isNaN(code) && code >= 100 && code < 600) { + updateSettings({ + excludeStatusCodes: [...settings.excludeStatusCodes, code], + }); + setNewStatusCode(''); + showSavedIndicator(); + } + }; + + const handleRemoveStatusCode = (code: number) => { + updateSettings({ + excludeStatusCodes: settings.excludeStatusCodes.filter((c) => c !== code), + }); + showSavedIndicator(); + }; + + const handleAddPath = () => { + if (newPath.trim()) { + updateSettings({ + excludePaths: [...settings.excludePaths, newPath.trim()], + }); + setNewPath(''); + showSavedIndicator(); + } + }; + + const handleRemovePath = (path: string) => { + updateSettings({ + excludePaths: settings.excludePaths.filter((p) => p !== path), + }); + showSavedIndicator(); + }; + + const handleAddCustomHeader = () => { + if (newCustomHeader.trim()) { + updateSettings({ + proxySettings: { + ...settings.proxySettings, + customHeaders: [...settings.proxySettings.customHeaders, newCustomHeader.trim()], + }, + }); + setNewCustomHeader(''); + showSavedIndicator(); + } + }; + + const handleRemoveCustomHeader = (header: string) => { + updateSettings({ + proxySettings: { + ...settings.proxySettings, + customHeaders: settings.proxySettings.customHeaders.filter((h) => h !== header), + }, + }); + showSavedIndicator(); + }; + + const handleAddCustomCondition = () => { + if (customCondition.name && customCondition.field && customCondition.value) { + addCustomCondition({ + id: Date.now().toString(), + name: customCondition.name, + enabled: customCondition.enabled || true, + type: customCondition.type || 'custom', + field: customCondition.field, + operator: customCondition.operator || 'contains', + value: customCondition.value, + description: customCondition.description, + } as FilterCondition); + + setCustomCondition({ + name: '', + type: 'custom', + field: 'RequestPath', + operator: 'contains', + value: '', + enabled: true, + description: '', + }); + setShowCustomConditionForm(false); + showSavedIndicator(); + } + }; + + const showSavedIndicator = () => { + setSaved(true); + setTimeout(() => setSaved(false), 2000); + }; + + const handleReset = () => { + if (confirm('Are you sure you want to reset all filter settings to defaults? This cannot be undone.')) { + resetSettings(); + showSavedIndicator(); + } + }; + + return ( +
+ {/* Header */} +
+
+
+
+ +
+

+ + Log Filters & Settings +

+

+ Configure filtering rules to control which logs appear in your dashboard +

+
+
+
+ {saved && ( + + + Saved + + )} + +
+
+
+
+ +
+
+ {/* IP Filtering */} + + + + + IP Address Filtering + + + Exclude specific IPs or IP categories from the dashboard + + + + {/* Toggle Options */} +
+ + + +
+ + {/* Excluded IPs List */} +
+ +
+ ) => setNewIP(e.target.value)} + onKeyPress={(e: React.KeyboardEvent) => e.key === 'Enter' && handleAddIP()} + placeholder="Enter IP address (e.g., 192.168.1.1)" + className="flex-1 px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> + +
+
+ {settings.excludedIPs.length === 0 ? ( +

+ No IPs excluded +

+ ) : ( + settings.excludedIPs.map((ip) => ( +
+ {ip} + +
+ )) + )} +
+
+
+
+ + {/* Proxy & Real IP Settings */} + + + + + Proxy & Real IP Detection + + + Configure how to extract the real client IP from proxy headers + + + +
+ + + + + +
+ + {/* Custom Headers */} +
+ +
+ ) => setNewCustomHeader(e.target.value)} + onKeyPress={(e: React.KeyboardEvent) => e.key === 'Enter' && handleAddCustomHeader()} + placeholder="Header name (e.g., X-Custom-IP)" + className="flex-1 px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> + +
+
+ {settings.proxySettings.customHeaders.map((header) => ( +
+ {header} + +
+ ))} +
+
+
+
+ + {/* Status Code Filtering */} + + + + + Status Code Filtering + + + Exclude specific HTTP status codes from appearing in the dashboard + + + +
+ ) => setNewStatusCode(e.target.value)} + onKeyPress={(e: React.KeyboardEvent) => e.key === 'Enter' && handleAddStatusCode()} + placeholder="Status code (e.g., 404)" + min="100" + max="599" + className="flex-1 px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> + +
+
+ {settings.excludeStatusCodes.length === 0 ? ( +

+ No status codes excluded +

+ ) : ( + settings.excludeStatusCodes.map((code) => ( +
+ {code} + +
+ )) + )} +
+
+
+ + {/* Bot & Path Filtering */} + + + + + Bot & Path Filtering + + + Filter out bots and specific request paths + + + + + +
+ +
+ ) => setNewPath(e.target.value)} + onKeyPress={(e: React.KeyboardEvent) => e.key === 'Enter' && handleAddPath()} + placeholder="Path to exclude (e.g., /health)" + className="flex-1 px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> + +
+
+ {settings.excludePaths.length === 0 ? ( +

+ No paths excluded +

+ ) : ( + settings.excludePaths.map((path) => ( +
+ {path} + +
+ )) + )} +
+
+
+
+ + {/* Custom Conditions */} + + +
+
+ + + Custom Filter Conditions + + + Create custom rules to filter logs based on any field + +
+ +
+
+ + {/* Custom Condition Form */} + {showCustomConditionForm && ( +
+

+ New Custom Condition +

+
+
+ + ) => setCustomCondition({ ...customCondition, name: e.target.value })} + placeholder="e.g., Exclude Health Checks" + className="w-full px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> +
+
+ + +
+
+ + +
+
+ + ) => setCustomCondition({ ...customCondition, value: e.target.value })} + placeholder="e.g., /health" + className="w-full px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> +
+
+ + ) => setCustomCondition({ ...customCondition, description: e.target.value })} + placeholder="Brief description of this filter" + className="w-full px-3 py-2 border border-gray-300 rounded-lg text-sm" + /> +
+
+
+ + +
+
+ )} + + {/* Custom Conditions List */} +
+ {settings.customConditions.length === 0 ? ( +

+ No custom conditions defined +

+ ) : ( + settings.customConditions.map((condition) => ( +
+
+
+
+

+ {condition.name} +

+ + {condition.enabled ? 'Active' : 'Disabled'} + +
+

+ {condition.field} {condition.operator.replace(/_/g, ' ')} "{condition.value}" +

+ {condition.description && ( +

+ {condition.description} +

+ )} +
+
+ + +
+
+
+ )) + )} +
+
+
+
+ + {/* Info Card */} + + +
+ +
+

+ How Filters Work +

+
    +
  • • All filter settings are applied at the stream level before logs reach the dashboard
  • +
  • • Filters are saved automatically and persist across browser sessions
  • +
  • • Proxy headers are checked in order of priority: CF-Connecting-IP → X-Real-IP → X-Forwarded-For → Custom Headers
  • +
  • • Custom conditions support regex patterns for advanced filtering
  • +
  • • Bot detection uses common user agent patterns (GoogleBot, BingBot, etc.)
  • +
+
+
+
+
+
+
+ ); +} \ No newline at end of file diff --git a/dashboard/components/AgentBulkOperations.tsx b/dashboard/components/AgentBulkOperations.tsx new file mode 100644 index 0000000..df6fa2f --- /dev/null +++ b/dashboard/components/AgentBulkOperations.tsx @@ -0,0 +1,396 @@ +'use client'; + +import { useState } from 'react'; +import { useAgents } from '@/lib/contexts/AgentContext'; +import { AgentImportExport } from '@/lib/agent-import-export'; +import { Button } from '@/components/ui/button'; +import { Badge } from '@/components/ui/badge'; +import { + Download, + Upload, + Copy, + Clipboard, + RefreshCw, + CheckCircle2, + AlertCircle, + X, +} from 'lucide-react'; + +interface BulkOperationsProps { + onClose?: () => void; +} + +export default function AgentBulkOperations({ onClose }: BulkOperationsProps) { + const { agents, refreshAgents, checkAgentStatus } = useAgents(); + const [isProcessing, setIsProcessing] = useState(false); + const [result, setResult] = useState<{ + type: 'success' | 'error' | 'info'; + message: string; + details?: string[]; + } | null>(null); + + const handleExport = () => { + try { + AgentImportExport.downloadAgentConfig(); + setResult({ + type: 'success', + message: 'Configuration exported successfully', + }); + } catch (error) { + setResult({ + type: 'error', + message: 'Failed to export configuration', + details: [error instanceof Error ? error.message : String(error)], + }); + } + }; + + const handleCopyToClipboard = async () => { + try { + const success = await AgentImportExport.copyToClipboard(); + if (success) { + setResult({ + type: 'success', + message: 'Configuration copied to clipboard', + }); + } else { + setResult({ + type: 'error', + message: 'Failed to copy to clipboard', + }); + } + } catch (error) { + setResult({ + type: 'error', + message: 'Failed to copy to clipboard', + details: [error instanceof Error ? error.message : String(error)], + }); + } + }; + + const handleImportFile = async (event: React.ChangeEvent) => { + const file = event.target.files?.[0]; + if (!file) return; + + setIsProcessing(true); + setResult(null); + + try { + const validation = await AgentImportExport.validateImportFile(file); + if (!validation.valid) { + setResult({ + type: 'error', + message: 'Invalid configuration file', + details: validation.errors, + }); + setIsProcessing(false); + return; + } + + const importResult = await AgentImportExport.importFromFile(file, { + mergeMode: 'merge', + }); + + if (importResult.success) { + refreshAgents(); + setResult({ + type: 'success', + message: `Imported ${importResult.imported} agent(s)`, + details: importResult.skipped > 0 ? [`Skipped ${importResult.skipped} existing agent(s)`] : undefined, + }); + } else { + setResult({ + type: 'error', + message: 'Import completed with errors', + details: importResult.errors, + }); + } + } catch (error) { + setResult({ + type: 'error', + message: 'Failed to import configuration', + details: [error instanceof Error ? error.message : String(error)], + }); + } finally { + setIsProcessing(false); + event.target.value = ''; + } + }; + + const handleImportFromClipboard = async () => { + setIsProcessing(true); + setResult(null); + + try { + const importResult = await AgentImportExport.importFromClipboard({ + mergeMode: 'merge', + }); + + if (importResult.success) { + refreshAgents(); + setResult({ + type: 'success', + message: `Imported ${importResult.imported} agent(s) from clipboard`, + details: importResult.skipped > 0 ? [`Skipped ${importResult.skipped} existing agent(s)`] : undefined, + }); + } else { + setResult({ + type: 'error', + message: 'Import failed', + details: importResult.errors, + }); + } + } catch (error) { + setResult({ + type: 'error', + message: 'Failed to import from clipboard', + details: [error instanceof Error ? error.message : String(error)], + }); + } finally { + setIsProcessing(false); + } + }; + + const handleCheckAllStatus = async () => { + setIsProcessing(true); + setResult(null); + + try { + const results = await Promise.all( + agents.map(agent => checkAgentStatus(agent.id)) + ); + + const onlineCount = results.filter(Boolean).length; + + setResult({ + type: 'info', + message: `Status check complete`, + details: [ + `${onlineCount} of ${agents.length} agent(s) online`, + `${agents.length - onlineCount} offline`, + ], + }); + } catch (error) { + setResult({ + type: 'error', + message: 'Failed to check agent status', + details: [error instanceof Error ? error.message : String(error)], + }); + } finally { + setIsProcessing(false); + } + }; + + const handleCreateBackup = () => { + try { + AgentImportExport.createBackup(); + setResult({ + type: 'success', + message: 'Backup created successfully', + details: ['Backup file downloaded to your computer'], + }); + } catch (error) { + setResult({ + type: 'error', + message: 'Failed to create backup', + details: [error instanceof Error ? error.message : String(error)], + }); + } + }; + + return ( +
+ {/* Header */} +
+
+

+ Bulk Operations +

+

+ Manage multiple agents at once +

+
+ {onClose && ( + + )} +
+ + {/* Result Message */} + {result && ( +
+
+ {result.type === 'success' ? ( + + ) : result.type === 'error' ? ( + + ) : ( + + )} +
+

+ {result.message} +

+ {result.details && result.details.length > 0 && ( +
    + {result.details.map((detail, index) => ( +
  • + • {detail} +
  • + ))} +
+ )} +
+
+
+ )} + + {/* Operations Grid */} +
+ {/* Export Operations */} +
+

+ + Export Configuration +

+ +
+ + + + + +
+
+ + {/* Import Operations */} +
+

+ + Import Configuration +

+ +
+ + + +
+
+
+ + {/* Status Operations */} +
+

+ + Status Operations +

+ + +
+ + {/* Import Modes Info */} +
+
+ Import Modes +
+
    +
  • Merge: Adds new agents, updates existing ones
  • +
  • Replace: Removes all agents and imports new ones
  • +
  • Skip Existing: Only adds new agents, keeps existing ones unchanged
  • +
+

+ Currently using: + + Merge Mode + +

+
+
+ ); +} \ No newline at end of file diff --git a/dashboard/components/AgentFormModal.tsx b/dashboard/components/AgentFormModal.tsx new file mode 100644 index 0000000..83c6c93 --- /dev/null +++ b/dashboard/components/AgentFormModal.tsx @@ -0,0 +1,281 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { Agent } from '@/lib/types/agent'; +import { useAgents } from '@/lib/contexts/AgentContext'; +import { AgentConfigManager } from '@/lib/agent-config-manager'; +import { Button } from '@/components/ui/button'; +import { X, AlertCircle } from 'lucide-react'; + +interface AgentFormModalProps { + isOpen: boolean; + onClose: () => void; + agent?: Agent | null; +} + +export default function AgentFormModal({ isOpen, onClose, agent }: AgentFormModalProps) { + const { addAgent, updateAgent } = useAgents(); + const [errors, setErrors] = useState([]); + + const [formData, setFormData] = useState({ + name: '', + url: '', + token: '', + location: 'on-site' as Agent['location'], + description: '', + tags: '', + }); + + useEffect(() => { + if (agent) { + setFormData({ + name: agent.name, + url: agent.url, + token: agent.token, + location: agent.location, + description: agent.description || '', + tags: agent.tags?.join(', ') || '', + }); + } else { + setFormData({ + name: '', + url: 'http://traefik-agent:5000', + token: '', + location: 'on-site', + description: '', + tags: '', + }); + } + setErrors([]); + }, [agent, isOpen]); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + setErrors([]); + + const agentData = { + name: formData.name.trim(), + url: formData.url.trim(), + token: formData.token.trim(), + location: formData.location, + description: formData.description.trim() || undefined, + tags: formData.tags + .split(',') + .map(t => t.trim()) + .filter(Boolean), + }; + + const validation = AgentConfigManager.validateAgent(agentData); + if (!validation.valid) { + setErrors(validation.errors); + return; + } + + try { + if (agent) { + updateAgent(agent.id, agentData); + } else { + addAgent(agentData); + } + onClose(); + } catch (error) { + setErrors([error instanceof Error ? error.message : 'Failed to save agent']); + } + }; + + if (!isOpen) return null; + + return ( +
+ {/* Backdrop */} +
+ + {/* Modal */} +
+ {/* Header */} +
+

+ {agent ? 'Edit Agent' : 'Add New Agent'} +

+ +
+ + {/* Form */} +
+ {/* Errors */} + {errors.length > 0 && ( +
+
+ +
+

+ Please fix the following errors: +

+
    + {errors.map((error, index) => ( +
  • {error}
  • + ))} +
+
+
+
+ )} + + {/* Agent Name */} +
+ + setFormData({ ...formData, name: e.target.value })} + placeholder="Production Server" + className="w-full px-4 py-2 border border-gray-300 rounded-lg bg-white text-gray-900 focus:ring-2 focus:ring-red-500 focus:border-transparent" + required + /> +

+ A friendly name to identify this agent +

+
+ + {/* Agent URL */} +
+ + setFormData({ ...formData, url: e.target.value })} + placeholder="http://traefik-agent:5000" + className="w-full px-4 py-2 border border-gray-300 rounded-lg bg-white text-gray-900 focus:ring-2 focus:ring-red-500 focus:border-transparent font-mono text-sm" + required + /> +

+ The base URL where the agent is running (include http:// or https://) +

+
+ + {/* Authentication Token */} +
+ + setFormData({ ...formData, token: e.target.value })} + placeholder="your-secret-token" + className="w-full px-4 py-2 border border-gray-300 rounded-lg bg-white text-gray-900 focus:ring-2 focus:ring-red-500 focus:border-transparent font-mono text-sm" + required + /> +

+ The TRAEFIK_LOG_DASHBOARD_AUTH_TOKEN configured on the agent +

+
+ + {/* Location */} +
+ +
+ + +
+

+ On-site: Same network · Off-site: Remote server or cloud +

+
+ + {/* Description */} +
+ +