diff --git a/.gitignore b/.gitignore index 9d02db8..238aa62 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ bin/ *.dll *.so *.dylib +metrics-analyzer # Test artifacts coverage.out @@ -27,6 +28,9 @@ go.work.sum *.tmp *.log +# Frontend build output +dist/ + # Demo GIFs (hosted on vhs.charm.sh) demo/*.gif diff --git a/Makefile b/Makefile index ccf92cc..76f40cf 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,22 @@ -.PHONY: build test lint clean validate-rules +.PHONY: build test lint clean validate-rules build-web release-build + +VERSION ?= $(shell cat VERSION 2>/dev/null || echo dev) +BUILD_TIME ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ") +WEB_LDFLAGS := -X 'main.buildVersion=$(VERSION)' -X 'main.buildTime=$(BUILD_TIME)' +RELEASE_OS ?= linux +RELEASE_ARCH ?= amd64 build: go build -o bin/metrics-analyzer ./cmd/metrics-analyzer +build-web: + go build -ldflags "$(WEB_LDFLAGS)" -o bin/web-server ./web/server + +release-build: + mkdir -p dist + GOOS=$(RELEASE_OS) GOARCH=$(RELEASE_ARCH) go build -o dist/metrics-analyzer-$(VERSION)-$(RELEASE_OS)-$(RELEASE_ARCH) ./cmd/metrics-analyzer + GOOS=$(RELEASE_OS) GOARCH=$(RELEASE_ARCH) go build -ldflags "$(WEB_LDFLAGS)" -o dist/web-server-$(VERSION)-$(RELEASE_OS)-$(RELEASE_ARCH) ./web/server + test: go test -v ./... diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..4e379d2 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.0.2 diff --git a/cmd/metrics-analyzer/main.go b/cmd/metrics-analyzer/main.go index 8029b65..52d9705 100644 --- a/cmd/metrics-analyzer/main.go +++ b/cmd/metrics-analyzer/main.go @@ -4,12 +4,9 @@ import ( "flag" "fmt" "os" - "path/filepath" "strings" - "github.com/stackrox/sensor-metrics-analyzer/internal/evaluator" - "github.com/stackrox/sensor-metrics-analyzer/internal/loadlevel" - "github.com/stackrox/sensor-metrics-analyzer/internal/parser" + "github.com/stackrox/sensor-metrics-analyzer/internal/analyzer" "github.com/stackrox/sensor-metrics-analyzer/internal/reporter" "github.com/stackrox/sensor-metrics-analyzer/internal/rules" "github.com/stackrox/sensor-metrics-analyzer/internal/tui" @@ -87,61 +84,18 @@ func analyzeCommand() { } } - // Extract cluster name from filename if not provided - if *clusterName == "" { - *clusterName = extractClusterName(metricsFile) - } - - // Load load detection rules - fmt.Fprintf(os.Stderr, "Loading load detection rules from %s...\n", *loadLevelDir) - loadRules, err := rules.LoadLoadDetectionRules(*loadLevelDir) - if err != nil { - fmt.Fprintf(os.Stderr, "Warning: Failed to load load detection rules: %v\n", err) - loadRules = []rules.LoadDetectionRule{} - } - - // Load evaluation rules - fmt.Fprintf(os.Stderr, "Loading rules from %s...\n", *rulesDir) - rulesList, err := rules.LoadRules(*rulesDir) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to load rules: %v\n", err) - os.Exit(1) - } - fmt.Fprintf(os.Stderr, "Loaded %d rules\n", len(rulesList)) - - // Parse metrics - fmt.Fprintf(os.Stderr, "Parsing metrics from %s...\n", metricsFile) - metrics, err := parser.ParseFile(metricsFile) + report, err := analyzer.AnalyzeFile(metricsFile, analyzer.Options{ + RulesDir: *rulesDir, + LoadLevelDir: *loadLevelDir, + ClusterName: *clusterName, + LoadLevelOverride: *loadLevelOverride, + ACSVersionOverride: *acsVersionOverride, + Logger: os.Stderr, + }) if err != nil { - fmt.Fprintf(os.Stderr, "Failed to parse metrics: %v\n", err) + fmt.Fprintf(os.Stderr, "Failed to analyze metrics: %v\n", err) os.Exit(1) } - fmt.Fprintf(os.Stderr, "Parsed %d metrics\n", len(metrics)) - - // Detect ACS version - acsVersion := *acsVersionOverride - if acsVersion == "" { - if detected, ok := metrics.DetectACSVersion(); ok { - acsVersion = detected - fmt.Fprintf(os.Stderr, "Detected ACS version: %s\n", acsVersion) - } else { - fmt.Fprintf(os.Stderr, "Warning: Could not detect ACS version\n") - } - } - - // Detect load level - loadDetector := loadlevel.NewDetector(loadRules) - detectedLoadLevel, err := loadlevel.DetectWithOverride(metrics, loadDetector, rules.LoadLevel(*loadLevelOverride)) - if err != nil { - fmt.Fprintf(os.Stderr, "Warning: Load level detection failed: %v\n", err) - detectedLoadLevel = rules.LoadLevelMedium - } - fmt.Fprintf(os.Stderr, "Detected load level: %s\n", detectedLoadLevel) - - // Evaluate all rules - fmt.Fprintf(os.Stderr, "Evaluating rules...\n") - report := evaluator.EvaluateAllRules(rulesList, metrics, detectedLoadLevel, acsVersion) - report.ClusterName = *clusterName // Generate report var outputContent string @@ -165,10 +119,12 @@ func analyzeCommand() { return } case "markdown": - outputContent = reporter.GenerateMarkdown(report, *templatePath) - if outputContent == "" { - fmt.Fprintf(os.Stderr, "Warning: Markdown generation returned empty content\n") + markdown, mdErr := reporter.GenerateMarkdown(report, *templatePath) + if mdErr != nil { + fmt.Fprintf(os.Stderr, "Markdown generation failed: %v\n", mdErr) + os.Exit(1) } + outputContent = markdown default: fmt.Fprintf(os.Stderr, "Unknown format: %s\n", *format) os.Exit(1) @@ -255,13 +211,7 @@ func listRulesCommand() { } func extractClusterName(filename string) string { - base := filepath.Base(filename) - // Remove extension - name := strings.TrimSuffix(base, filepath.Ext(base)) - // Remove common prefixes/suffixes - name = strings.TrimSuffix(name, "-sensor-metrics") - name = strings.TrimSuffix(name, "-metrics") - return name + return analyzer.ExtractClusterName(filename) } func printUsage() { diff --git a/go.mod b/go.mod index 46977b0..4f2fb62 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/charmbracelet/lipgloss v1.1.0 github.com/fatih/color v1.18.0 github.com/jedib0t/go-pretty/v6 v6.7.1 + github.com/stretchr/testify v1.11.1 golang.org/x/term v0.29.0 ) @@ -19,6 +20,7 @@ require ( github.com/charmbracelet/x/ansi v0.10.1 // indirect github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/mattn/go-colorable v0.1.13 // indirect @@ -28,8 +30,10 @@ require ( github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/termenv v0.16.0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect golang.org/x/sys v0.36.0 // indirect golang.org/x/text v0.22.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 0f2da70..447a254 100644 --- a/go.sum +++ b/go.sum @@ -48,8 +48,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561 h1:MDc5xs78ZrZr3HMQugiXOAkSZtfTpbJLDr/lwfgO53E= @@ -63,5 +63,7 @@ golang.org/x/term v0.29.0 h1:L6pJp37ocefwRRtYPKSWOWzOtWSxVajvz2ldH/xi3iU= golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/analyzer/analyzer.go b/internal/analyzer/analyzer.go new file mode 100644 index 0000000..4aaaa3d --- /dev/null +++ b/internal/analyzer/analyzer.go @@ -0,0 +1,100 @@ +package analyzer + +import ( + "fmt" + "io" + "path/filepath" + "strings" + + "github.com/stackrox/sensor-metrics-analyzer/internal/evaluator" + "github.com/stackrox/sensor-metrics-analyzer/internal/loadlevel" + "github.com/stackrox/sensor-metrics-analyzer/internal/parser" + "github.com/stackrox/sensor-metrics-analyzer/internal/rules" +) + +// Options controls analysis behavior and logging. +type Options struct { + RulesDir string + LoadLevelDir string + ClusterName string + LoadLevelOverride string + ACSVersionOverride string + Logger io.Writer +} + +// AnalyzeFile parses metrics and evaluates rules, returning the analysis report. +func AnalyzeFile(metricsFile string, opts Options) (rules.AnalysisReport, error) { + logOut := opts.Logger + if logOut == nil { + logOut = io.Discard + } + + rulesDir := opts.RulesDir + if rulesDir == "" { + return rules.AnalysisReport{}, fmt.Errorf("rules directory is required") + } + + loadLevelDir := opts.LoadLevelDir + if loadLevelDir == "" { + loadLevelDir = filepath.Join(rulesDir, "load-level") + } + + clusterName := opts.ClusterName + if clusterName == "" { + clusterName = ExtractClusterName(metricsFile) + } + + fmt.Fprintf(logOut, "Loading load detection rules from %s...\n", loadLevelDir) + loadRules, err := rules.LoadLoadDetectionRules(loadLevelDir) + if err != nil { + fmt.Fprintf(logOut, "Warning: Failed to load load detection rules: %v\n", err) + loadRules = []rules.LoadDetectionRule{} + } + + fmt.Fprintf(logOut, "Loading rules from %s...\n", rulesDir) + rulesList, err := rules.LoadRules(rulesDir) + if err != nil { + return rules.AnalysisReport{}, fmt.Errorf("failed to load rules: %w", err) + } + fmt.Fprintf(logOut, "Loaded %d rules\n", len(rulesList)) + + fmt.Fprintf(logOut, "Parsing metrics from %s...\n", metricsFile) + metrics, err := parser.ParseFile(metricsFile) + if err != nil { + return rules.AnalysisReport{}, fmt.Errorf("failed to parse metrics: %w", err) + } + fmt.Fprintf(logOut, "Parsed %d metrics\n", len(metrics)) + + acsVersion := opts.ACSVersionOverride + if acsVersion == "" { + if detected, ok := metrics.DetectACSVersion(); ok { + acsVersion = detected + fmt.Fprintf(logOut, "Detected ACS version: %s\n", acsVersion) + } else { + fmt.Fprintf(logOut, "Warning: Could not detect ACS version\n") + } + } + + loadDetector := loadlevel.NewDetector(loadRules) + detectedLoadLevel, err := loadlevel.DetectWithOverride(metrics, loadDetector, rules.LoadLevel(opts.LoadLevelOverride)) + if err != nil { + fmt.Fprintf(logOut, "Warning: Load level detection failed: %v\n", err) + detectedLoadLevel = rules.LoadLevelMedium + } + fmt.Fprintf(logOut, "Detected load level: %s\n", detectedLoadLevel) + + fmt.Fprintf(logOut, "Evaluating rules...\n") + report := evaluator.EvaluateAllRules(rulesList, metrics, detectedLoadLevel, acsVersion) + report.ClusterName = clusterName + + return report, nil +} + +// ExtractClusterName derives a cluster name from a file name. +func ExtractClusterName(filename string) string { + base := filepath.Base(filename) + name := strings.TrimSuffix(base, filepath.Ext(base)) + name = strings.TrimSuffix(name, "-sensor-metrics") + name = strings.TrimSuffix(name, "-metrics") + return name +} diff --git a/internal/analyzer/analyzer_test.go b/internal/analyzer/analyzer_test.go new file mode 100644 index 0000000..d57834d --- /dev/null +++ b/internal/analyzer/analyzer_test.go @@ -0,0 +1,37 @@ +package analyzer + +import ( + "bytes" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAnalyzeFile(t *testing.T) { + t.Parallel() + + _, thisFile, _, ok := runtime.Caller(0) + if !ok { + t.Fatal("AnalyzeFile() failed to resolve test file path") + } + repoRoot := filepath.Dir(filepath.Dir(filepath.Dir(thisFile))) + metricsFile := filepath.Join(repoRoot, "testdata", "fixtures", "sample_metrics.txt") + rulesDir := filepath.Join(repoRoot, "automated-rules") + + var logs bytes.Buffer + report, err := AnalyzeFile(metricsFile, Options{ + RulesDir: rulesDir, + Logger: &logs, + }) + assert.NoError(t, err) + + assert.NotEmpty(t, report.ClusterName, "AnalyzeFile() cluster name is empty") + assert.False(t, report.Timestamp.IsZero(), "AnalyzeFile() timestamp is zero") + assert.NotEmpty(t, report.LoadLevel, "AnalyzeFile() load level is empty") + assert.NotEmpty(t, report.Results, "AnalyzeFile() returned no results") + assert.Equal(t, report.Summary.TotalAnalyzed, len(report.Results), "AnalyzeFile() summary mismatch") + statusTotal := report.Summary.RedCount + report.Summary.YellowCount + report.Summary.GreenCount + assert.LessOrEqual(t, statusTotal, report.Summary.TotalAnalyzed, "AnalyzeFile() summary counts exceed total") +} diff --git a/internal/reporter/markdown.go b/internal/reporter/markdown.go index 0c1e7dd..ea990d2 100644 --- a/internal/reporter/markdown.go +++ b/internal/reporter/markdown.go @@ -7,96 +7,21 @@ import ( "github.com/stackrox/sensor-metrics-analyzer/internal/rules" ) -// GenerateMarkdown creates a markdown report from analysis results -func GenerateMarkdown(report rules.AnalysisReport, templatePath string) string { - // Try to use template if available - if templatePath != "" { - result, err := GenerateMarkdownFromTemplate(report, templatePath) - if err == nil && result != "" { - return result - } - // Fall back to default if template fails (don't log error, just fall back) - } - - // Default markdown generation (fallback) - return generateMarkdownDefault(report) -} - -// generateMarkdownDefault generates markdown without template (fallback) -func generateMarkdownDefault(report rules.AnalysisReport) string { - var result string - - // Header - result += "# Automated Metrics Analysis Report\n\n" - result += "**Cluster:** " + report.ClusterName + "\n" - result += "**ACS Version:** " + report.ACSVersion + "\n" - result += "**Load Level:** " + string(report.LoadLevel) + "\n" - result += "**Generated:** " + report.Timestamp.Format("2006-01-02 15:04:05") + "\n\n" - - // Summary - result += "## Summary\n\n" - result += "- 🔴 **RED:** " + formatInt(report.Summary.RedCount) + " metrics\n" - result += "- 🟡 **YELLOW:** " + formatInt(report.Summary.YellowCount) + " metrics\n" - result += "- 🟢 **GREEN:** " + formatInt(report.Summary.GreenCount) + " metrics\n\n" - - // Critical Issues (RED metrics) - redResults := filterByStatus(report.Results, rules.StatusRed) - if len(redResults) > 0 { - result += "## 🔴 Critical Issues\n\n" - for _, r := range redResults { - result += "### " + r.RuleName + "\n\n" - result += "**Status:** RED\n" - result += "**Message:** " + r.Message + "\n" - if len(r.Details) > 0 { - result += "**Details:**\n" - for _, detail := range r.Details { - result += "- " + detail + "\n" - } - } - if r.PotentialActionUser != "" { - result += "**Potential action:** " + r.PotentialActionUser + "\n" - } - if r.PotentialActionDeveloper != "" { - result += "**Potential action (developer):** " + r.PotentialActionDeveloper + "\n" - } - result += "\n" - } +// GenerateMarkdown creates a markdown report from analysis results. +// The markdown template is the single source of truth; if it is missing +// or fails to render, return an error. +func GenerateMarkdown(report rules.AnalysisReport, templatePath string) (string, error) { + if templatePath == "" { + return "", fmt.Errorf("markdown template path is empty") } - - // Warnings (YELLOW metrics) - yellowResults := filterByStatus(report.Results, rules.StatusYellow) - if len(yellowResults) > 0 { - result += "## 🟡 Warnings\n\n" - for _, r := range yellowResults { - result += "### " + r.RuleName + "\n\n" - result += "**Status:** YELLOW\n" - result += "**Message:** " + r.Message + "\n" - if len(r.Details) > 0 { - result += "**Details:**\n" - for _, detail := range r.Details { - result += "- " + detail + "\n" - } - } - if r.PotentialActionUser != "" { - result += "**Potential action:** " + r.PotentialActionUser + "\n" - } - if r.PotentialActionDeveloper != "" { - result += "**Potential action (developer):** " + r.PotentialActionDeveloper + "\n" - } - result += "\n" - } + result, err := GenerateMarkdownFromTemplate(report, templatePath) + if err != nil { + return "", err } - - // Healthy Metrics (GREEN) - greenResults := filterByStatus(report.Results, rules.StatusGreen) - if len(greenResults) > 0 { - result += "## 🟢 Healthy Metrics\n\n" - for _, r := range greenResults { - result += "- **" + r.RuleName + ":** " + r.Message + "\n" - } + if result == "" { + return "", fmt.Errorf("markdown template returned empty content") } - - return result + return result, nil } func filterByStatus(results []rules.EvaluationResult, status rules.Status) []rules.EvaluationResult { @@ -114,8 +39,3 @@ func filterByStatus(results []rules.EvaluationResult, status rules.Status) []rul return filtered } - -func formatInt(i int) string { - // Use fmt.Sprintf for simplicity - return fmt.Sprintf("%d", i) -} diff --git a/templates/markdown.tmpl b/templates/markdown.tmpl index a909ae2..7353844 100644 --- a/templates/markdown.tmpl +++ b/templates/markdown.tmpl @@ -15,15 +15,31 @@ ## 🔴 Critical Issues -{{ range .RedResults }} -### {{.RuleName}} -**Status:** RED -**Message:** {{.Message}} -{{ if .PotentialActionUser }} -**Potential action:** {{.PotentialActionUser}} +{{ range $i, $r := .RedResults }} +{{ if gt $i 0 }} +--- {{ end }} -{{ if .PotentialActionDeveloper }} -**Potential action (developer):** {{.PotentialActionDeveloper}} +### {{ $r.RuleName }} +#### Status +RED + +#### Message +{{ $r.Message }} +{{ if gt (len $r.Details) 0 }} +#### Details +{{ range $r.Details }} +- {{ . }} +{{ end }} +{{ end }} +{{ if $r.PotentialActionUser }} +#### Potential action +{{ $r.PotentialActionUser }} +{{ end }} +{{ if $r.PotentialActionDeveloper }} +#### Potential action (developer) +{{ $r.PotentialActionDeveloper }} +{{ end }} + {{ end }} {{ end }} @@ -32,15 +48,31 @@ ## 🟡 Warnings -{{ range .YellowResults }} -### {{.RuleName}} -**Status:** YELLOW -**Message:** {{.Message}} -{{ if .PotentialActionUser }} -**Potential action:** {{.PotentialActionUser}} +{{ range $i, $r := .YellowResults }} +{{ if gt $i 0 }} +--- {{ end }} -{{ if .PotentialActionDeveloper }} -**Potential action (developer):** {{.PotentialActionDeveloper}} +### {{ $r.RuleName }} +#### Status +YELLOW + +#### Message +{{ $r.Message }} +{{ if gt (len $r.Details) 0 }} +#### Details +{{ range $r.Details }} +- {{ . }} +{{ end }} +{{ end }} +{{ if $r.PotentialActionUser }} +#### Potential action +{{ $r.PotentialActionUser }} +{{ end }} +{{ if $r.PotentialActionDeveloper }} +#### Potential action (developer) +{{ $r.PotentialActionDeveloper }} +{{ end }} + {{ end }} {{ end }} @@ -52,3 +84,4 @@ - **{{.RuleName}}:** {{.Message}} {{ end }} +{{ end }} diff --git a/web/DEPLOYMENT.md b/web/DEPLOYMENT.md new file mode 100644 index 0000000..c19fa13 --- /dev/null +++ b/web/DEPLOYMENT.md @@ -0,0 +1,207 @@ +# Deployment Guide + +This guide covers deploying the Sensor Metrics Analyzer Web service on a Linux server. + +## Prerequisites + +- Linux server with systemd +- Nginx installed +- Go 1.21+ (for building, or use pre-built binaries) +- Root or sudo access + +## Step 1: Prepare the Application + +1. Clone or copy the repository to the server: + ```bash + cd /opt + git clone sensor-metrics-analyzer + # Or copy the files to /opt/sensor-metrics-analyzer + ``` + +2. Download the precompiled binary from GitHub Releases: + ```bash + cd /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go + curl -L -o bin/web-server https://github.com/stackrox/sensor-metrics-analyzer/releases/latest/download/web-server-linux-amd64 + chmod +x bin/web-server + ``` + +3. Verify the binary exists: + ```bash + ls -lh /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/ + # Should show: web-server + ``` + +## Step 2: Create System User + +Create a dedicated user for running the service: + +```bash +sudo useradd -r -s /bin/false -d /opt/sensor-metrics-analyzer sensor-metrics +sudo chown -R sensor-metrics:sensor-metrics /opt/sensor-metrics-analyzer +``` + +## Step 3: Configure Systemd Service + +1. Copy the service file: + ```bash + sudo cp /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/sensor-metrics-web.service \ + /etc/systemd/system/ + ``` + +2. Edit the service file to match your paths: + ```bash + sudo nano /etc/systemd/system/sensor-metrics-web.service + ``` + + Update these paths if different: + - `WorkingDirectory`: `/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/server` + - `ExecStart`: `/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server` + - `RULES_DIR`: `/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/automated-rules` + - `LOAD_LEVEL_DIR`: `/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/automated-rules/load-level` + - `TEMPLATE_PATH`: `/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/templates/markdown.tmpl` + +3. Reload systemd and start the service: + ```bash + sudo systemctl daemon-reload + sudo systemctl enable sensor-metrics-web + sudo systemctl start sensor-metrics-web + ``` + +4. Verify the service is running: + ```bash + sudo systemctl status sensor-metrics-web + ``` + +5. Check logs: + ```bash + sudo journalctl -u sensor-metrics-web -f + ``` + +## Step 4: Configure Nginx + +1. Copy the nginx configuration: + ```bash + sudo cp /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/nginx.conf \ + /etc/nginx/sites-available/sensor-metrics-web + ``` + +2. Edit the configuration: + ```bash + sudo nano /etc/nginx/sites-available/sensor-metrics-web + ``` + + Update: + - `root`: `/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/static` + - `server_name`: Your domain name (or leave as `_` for default) + +3. Enable the site and disable the default nginx site: + ```bash + sudo rm /etc/nginx/sites-enabled/default + sudo ln -s /etc/nginx/sites-available/sensor-metrics-web \ + /etc/nginx/sites-enabled/ + ``` + +4. Test nginx configuration: + ```bash + sudo nginx -t + ``` + +5. Reload nginx: + ```bash + sudo systemctl reload nginx + ``` + +## Step 5: Verify Deployment + +1. Test the backend health endpoint directly: + ```bash + curl http://localhost:8080/health + ``` + +2. Test the nginx health endpoint: + ```bash + curl http://localhost/health + # Or: curl http://your-domain/health + ``` + +3. Test the API directly: + ```bash + curl -X POST http://localhost/api/analyze/both \ + -F "file=@/path/to/test-metrics.prom" + ``` + +4. Access the web interface: + Open `http://your-server-ip` or `http://your-domain` in a browser + + Note: The backend on `:8080` only serves API endpoints, so `/` will return 404. + +## Step 6: Firewall Configuration + +If using a firewall, allow HTTP traffic: + +```bash +# For UFW +sudo ufw allow 80/tcp + +# For firewalld +sudo firewall-cmd --permanent --add-service=http +sudo firewall-cmd --reload +``` + +## Troubleshooting + +### Service won't start + +1. Check service status: + ```bash + sudo systemctl status sensor-metrics-web + ``` + +2. Check logs: + ```bash + sudo journalctl -u sensor-metrics-web -n 50 + ``` + +3. Verify binary exists and is executable: + ```bash + ls -l /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server + ``` + +4. Test running manually: + ```bash + sudo -u sensor-metrics /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server \ + --rules /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/automated-rules + ``` + +### Nginx 502 Bad Gateway + +1. Verify backend is running: + ```bash + curl http://localhost:8080/health + ``` + +2. Check nginx error logs: + ```bash + sudo tail -f /var/log/nginx/error.log + ``` + +3. Verify proxy_pass URL in nginx config matches backend listen address + +### File upload fails + +1. Check nginx `client_max_body_size` setting +2. Verify file size is within limits (default: 50MB) +3. Check backend logs for detailed errors + +## Security Hardening + +1. **Use HTTPS**: Set up SSL/TLS certificates (Let's Encrypt recommended) +2. **Restrict access**: Use firewall rules to limit access to specific IPs if needed +3. **Regular updates**: Keep the application and system updated +4. **Monitor logs**: Set up log monitoring for suspicious activity + +## Next Steps + +- See [UPDATE.md](./UPDATE.md) for update procedures +- Configure log rotation if needed +- Set up monitoring/alerting for the service diff --git a/web/README.md b/web/README.md new file mode 100644 index 0000000..4e402d2 --- /dev/null +++ b/web/README.md @@ -0,0 +1,141 @@ +# Sensor Metrics Analyzer Web + +Web interface for the Sensor Metrics Analyzer, allowing users to upload Prometheus metrics files and view analysis reports in both console and markdown formats. + +## Architecture + +- **Backend**: Go HTTP server running as a systemd service +- **Frontend**: Static HTML/JavaScript served by Nginx +- **API**: RESTful endpoint `/api/analyze/both` that accepts file uploads and returns JSON with both console and markdown outputs + +## Components + +- `server/` - Go HTTP server source code +- `static/` - Frontend HTML/JS/CSS files +- `nginx.conf` - Nginx configuration template +- `sensor-metrics-web.service` - Systemd service unit file + +## Quick Start + +### Prerequisites + +- Nginx installed and running +- Linux system with systemd + +### Download Precompiled Binaries + +We publish precompiled binaries in GitHub Releases. Download the latest `web-server` binary for your platform and place it in `sensor-metrics-analyzer-go/bin/`. + +Example: +```bash +curl -L -o bin/web-server https://github.com/stackrox/sensor-metrics-analyzer/releases/latest/download/web-server-linux-amd64 +chmod +x bin/web-server +``` + +### Deployment + +See [DEPLOYMENT.md](./DEPLOYMENT.md) for detailed deployment instructions. + +### Updating + +See [UPDATE.md](./UPDATE.md) for update procedures. + +## Development + +### Running Locally + +1. Start the backend server: + ```bash + cd sensor-metrics-analyzer-go/web/server + go run main.go \ + --listen :8080 \ + --rules ../automated-rules \ + --load-level-dir ../automated-rules/load-level + ``` + +2. Serve the frontend (in another terminal): + ```bash + cd sensor-metrics-analyzer-go/web/static + python3 -m http.server 8000 + ``` + +3. Access the web interface at `http://localhost:8000` + + Note: For local development, you may need to update the API endpoint in `index.html` to point to `http://localhost:8080/api/analyze/both` or use a local proxy. + +### Testing + +Test the API endpoint directly: +```bash +curl -X POST http://localhost:8080/api/analyze/both \ + -F "file=@/path/to/metrics.prom" +``` + +## Configuration + +The web server can be configured via command-line flags or environment variables: + +- `--listen` / `LISTEN_ADDR`: Listen address (default: `:8080`) +- `--rules` / `RULES_DIR`: Rules directory (default: `./automated-rules`) +- `--load-level-dir` / `LOAD_LEVEL_DIR`: Load level rules directory (default: `./automated-rules/load-level`) +- `--template` / `TEMPLATE_PATH`: Path to markdown template (default: `./templates/markdown.tmpl`) +- `--max-size` / `MAX_FILE_SIZE`: Maximum upload size in bytes (default: 50MB) +- `--timeout` / `REQUEST_TIMEOUT`: Request timeout duration (default: 60s) + +## API Endpoints + +### POST /api/analyze/both + +Upload a metrics file and receive both console and markdown outputs. + +**Request:** +- Method: `POST` +- Content-Type: `multipart/form-data` +- Body: Form field `file` containing the metrics file + +**Response:** +```json +{ + "console": "...", + "markdown": "...", + "error": "" // Optional, present if there were errors +} +``` + +### GET /health + +Health check endpoint. + +**Response:** +```json +{ + "status": "ok" +} +``` + +## Troubleshooting + +### Server won't start + +- Verify the rules directory exists and contains valid TOML files +- Check systemd logs: `journalctl -u sensor-metrics-web -f` + +### File upload fails + +- Check nginx `client_max_body_size` setting +- Verify the file size is within limits +- Check server logs for detailed error messages + +### Analysis returns errors + +- Ensure the uploaded file is a valid Prometheus metrics file +- Check that rules are properly configured +- Review server logs for analyzer output + +## Security Considerations + +- The service runs as a dedicated user (`sensor-metrics`) +- Temporary files are automatically cleaned up after processing +- File size limits prevent resource exhaustion +- Request timeouts prevent long-running requests +- No persistent storage of uploaded files or reports diff --git a/web/UPDATE.md b/web/UPDATE.md new file mode 100644 index 0000000..5f05695 --- /dev/null +++ b/web/UPDATE.md @@ -0,0 +1,208 @@ +# Update Guide + +This guide covers updating the Sensor Metrics Analyzer Web service. + +## Update Procedure + +### Step 1: Stop the Service + +```bash +sudo systemctl stop sensor-metrics-web +``` + +### Step 2: Backup Current Version (Optional but Recommended) + +```bash +# Backup binaries +sudo cp /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server \ + /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server.backup +``` + +### Step 3: Update the Application + +**Option A: Git Pull (if using git)** + +```bash +cd /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go +git pull origin main # or your branch name +``` + +**Option B: Manual Copy** + +Copy the new files to the server, preserving the directory structure. + +git add -### Step 4: Download Updated Binary + +```bash +cd /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go +curl -L -o bin/web-server https://github.com/stackrox/sensor-metrics-analyzer/releases/latest/download/web-server-linux-amd64 +chmod +x bin/web-server +``` + +### Step 5: Update Configuration Files (if needed) + +Check if any configuration files have changed: + +```bash +# Compare service file +diff /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/sensor-metrics-web.service \ + /etc/systemd/system/sensor-metrics-web.service + +# Compare nginx config +diff /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/nginx.conf \ + /etc/nginx/sites-available/sensor-metrics-web +``` + +If there are differences, update the files in `/etc/`: + +```bash +# Update systemd service (review changes first!) +sudo cp /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/sensor-metrics-web.service \ + /etc/systemd/system/ +sudo systemctl daemon-reload + +# Update nginx config (review changes first!) +sudo cp /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/nginx.conf \ + /etc/nginx/sites-available/sensor-metrics-web +sudo nginx -t # Test configuration +sudo systemctl reload nginx +``` + +### Step 6: Update Frontend Files + +```bash +sudo cp -r /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/static/* \ + /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/web/static/ +``` + +### Step 7: Verify Binaries + +```bash +# Check binaries exist and are executable +ls -lh /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/ + +# Test web server binary (should show usage) +/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server --help +``` + +### Step 8: Start the Service + +```bash +sudo systemctl start sensor-metrics-web +sudo systemctl status sensor-metrics-web +``` + +### Step 9: Verify Health + +```bash +# Check health endpoint +curl http://localhost:8080/health + +# Check service logs +sudo journalctl -u sensor-metrics-web -f +``` + +### Step 10: Test the Web Interface + +1. Open the web interface in a browser +2. Upload a test metrics file +3. Verify both console and markdown outputs are generated correctly + +## Rollback Procedure + +If something goes wrong, rollback to the previous version: + +```bash +# Stop service +sudo systemctl stop sensor-metrics-web + +# Restore binaries +sudo cp /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server.backup \ + /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server + +# Start service +sudo systemctl start sensor-metrics-web +``` + +## Automated Update Script + +You can create a simple update script: + +```bash +#!/bin/bash +# /opt/sensor-metrics-analyzer/update.sh + +set -e + +SERVICE_NAME="sensor-metrics-web" +APP_DIR="/opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go" +BIN_DIR="$APP_DIR/bin" + +echo "Stopping service..." +sudo systemctl stop $SERVICE_NAME + +echo "Backing up binaries..." +sudo cp $BIN_DIR/web-server $BIN_DIR/web-server.backup.$(date +%Y%m%d_%H%M%S) + +echo "Updating application..." +cd $APP_DIR +# git pull # Uncomment if using git + +echo "Downloading binary..." +curl -L -o $BIN_DIR/web-server https://github.com/stackrox/sensor-metrics-analyzer/releases/latest/download/web-server-linux-amd64 +chmod +x $BIN_DIR/web-server + +echo "Starting service..." +sudo systemctl start $SERVICE_NAME + +echo "Waiting for service to start..." +sleep 2 + +echo "Checking service status..." +sudo systemctl status $SERVICE_NAME --no-pager + +echo "Testing health endpoint..." +curl -s http://localhost:8080/health || echo "Health check failed!" + +echo "Update complete!" +``` + +Make it executable: +```bash +chmod +x /opt/sensor-metrics-analyzer/update.sh +``` + +## Update Checklist + +- [ ] Stop the service +- [ ] Backup current binaries +- [ ] Update application files +- [ ] Rebuild binaries +- [ ] Update configuration files (if changed) +- [ ] Update frontend files +- [ ] Verify binaries +- [ ] Start the service +- [ ] Verify health endpoint +- [ ] Test web interface +- [ ] Monitor logs for errors + +## Troubleshooting Updates + +### Service fails to start after update + +1. Check logs: `sudo journalctl -u sensor-metrics-web -n 50` +2. Verify binary exists: `ls -l /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server` +3. Test binary manually: `sudo -u sensor-metrics /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/web-server --help` +4. Rollback if needed + +### Binary not found errors + +- Verify the build completed successfully +- Check file permissions: `sudo chown sensor-metrics:sensor-metrics /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/*` +- Ensure binaries are executable: `sudo chmod +x /opt/sensor-metrics-analyzer/sensor-metrics-analyzer-go/bin/*` + +### Configuration errors + +- Review configuration file changes before applying +- Test nginx config: `sudo nginx -t` +- Reload systemd after service file changes: `sudo systemctl daemon-reload` diff --git a/web/nginx.conf b/web/nginx.conf new file mode 100644 index 0000000..0aedd86 --- /dev/null +++ b/web/nginx.conf @@ -0,0 +1,46 @@ +# Nginx configuration for Sensor Metrics Analyzer Web +# Place this file at: /etc/nginx/sites-available/sensor-metrics-web +# Then create symlink: ln -s /etc/nginx/sites-available/sensor-metrics-web /etc/nginx/sites-enabled/ +# Reload nginx: sudo systemctl reload nginx + +server { + listen 80; + server_name _; # Replace with your domain name if needed + + # Maximum upload size (adjust as needed) + client_max_body_size 50m; + + # Frontend static files + root /path/to/sensor-metrics-analyzer-go/web/static; + index index.html; + + # Serve static files + location / { + try_files $uri $uri/ /index.html; + } + + # API proxy to Go backend + location /api/ { + proxy_pass http://localhost:8080; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_cache_bypass $http_upgrade; + + # Increase timeouts for large file processing + proxy_read_timeout 120s; + proxy_connect_timeout 120s; + proxy_send_timeout 120s; + } + + # Health check endpoint + location /health { + proxy_pass http://localhost:8080/health; + proxy_http_version 1.1; + proxy_set_header Host $host; + } +} diff --git a/web/sensor-metrics-web.service b/web/sensor-metrics-web.service new file mode 100644 index 0000000..5b68a3b --- /dev/null +++ b/web/sensor-metrics-web.service @@ -0,0 +1,36 @@ +[Unit] +Description=Sensor Metrics Analyzer Web Service +After=network.target + +[Service] +Type=simple +User=sensor-metrics +Group=sensor-metrics +WorkingDirectory=/opt/sensor-metrics-analyzer/web/server +ExecStart=/opt/sensor-metrics-analyzer/bin/web-server + +# Environment variables (adjust paths as needed) +Environment="LISTEN_ADDR=:8080" +Environment="RULES_DIR=/opt/sensor-metrics-analyzer/automated-rules" +Environment="LOAD_LEVEL_DIR=/opt/sensor-metrics-analyzer/automated-rules/load-level" +Environment="TEMPLATE_PATH=/opt/sensor-metrics-analyzer/templates/markdown.tmpl" +Environment="MAX_FILE_SIZE=52428800" + +# Security settings +NoNewPrivileges=true +PrivateTmp=true +ProtectSystem=strict +ProtectHome=true +ReadWritePaths=/tmp + +# Restart policy +Restart=on-failure +RestartSec=5s + +# Logging +StandardOutput=journal +StandardError=journal +SyslogIdentifier=sensor-metrics-web + +[Install] +WantedBy=multi-user.target diff --git a/web/server/main.go b/web/server/main.go new file mode 100644 index 0000000..0a04e7a --- /dev/null +++ b/web/server/main.go @@ -0,0 +1,225 @@ +package main + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "io" + "log" + "net/http" + "os" + "time" + + "github.com/stackrox/sensor-metrics-analyzer/internal/analyzer" + "github.com/stackrox/sensor-metrics-analyzer/internal/reporter" +) + +const ( + defaultListenAddr = ":8080" + defaultMaxFileSize = 50 * 1024 * 1024 // 50MB + defaultRequestTimeout = 60 * time.Second + defaultRulesDir = "./automated-rules" + defaultLoadLevelDir = "./automated-rules/load-level" +) + +type Config struct { + ListenAddr string + MaxFileSize int64 + RequestTimeout time.Duration + RulesDir string + LoadLevelDir string + TemplatePath string +} + +type AnalyzeResponse struct { + Markdown string `json:"markdown"` + Console string `json:"console"` + Error string `json:"error,omitempty"` +} + +type VersionResponse struct { + Version string `json:"version"` + LastUpdate string `json:"lastUpdate"` +} + +var ( + buildVersion = "dev" + buildTime = "" +) + +func main() { + cfg := parseFlags() + + log.Printf("Starting server on %s", cfg.ListenAddr) + log.Printf("Rules directory: %s", cfg.RulesDir) + log.Printf("Load level directory: %s", cfg.LoadLevelDir) + log.Printf("Max file size: %d bytes", cfg.MaxFileSize) + + http.HandleFunc("/api/analyze/both", handleAnalyzeBoth(cfg)) + http.HandleFunc("/health", handleHealth) + http.HandleFunc("/version", handleVersion()) + + if err := http.ListenAndServe(cfg.ListenAddr, nil); err != nil { + log.Fatalf("Server failed: %v", err) + } +} + +func parseFlags() *Config { + cfg := &Config{ + ListenAddr: defaultListenAddr, + MaxFileSize: defaultMaxFileSize, + RequestTimeout: defaultRequestTimeout, + RulesDir: defaultRulesDir, + LoadLevelDir: defaultLoadLevelDir, + TemplatePath: "./templates/markdown.tmpl", + } + + flag.StringVar(&cfg.ListenAddr, "listen", defaultListenAddr, "Listen address") + flag.Int64Var(&cfg.MaxFileSize, "max-size", defaultMaxFileSize, "Max upload file size (bytes)") + flag.DurationVar(&cfg.RequestTimeout, "timeout", defaultRequestTimeout, "Request timeout") + flag.StringVar(&cfg.RulesDir, "rules", defaultRulesDir, "Rules directory") + flag.StringVar(&cfg.LoadLevelDir, "load-level-dir", defaultLoadLevelDir, "Load level rules directory") + flag.StringVar(&cfg.TemplatePath, "template", cfg.TemplatePath, "Path to markdown template") + + flag.Parse() + + // Override with environment variables if set + if envAddr := os.Getenv("LISTEN_ADDR"); envAddr != "" { + cfg.ListenAddr = envAddr + } + if envSize := os.Getenv("MAX_FILE_SIZE"); envSize != "" { + var size int64 + if _, err := fmt.Sscanf(envSize, "%d", &size); err == nil { + cfg.MaxFileSize = size + } + } + if envRules := os.Getenv("RULES_DIR"); envRules != "" { + cfg.RulesDir = envRules + } + if envLoadLevel := os.Getenv("LOAD_LEVEL_DIR"); envLoadLevel != "" { + cfg.LoadLevelDir = envLoadLevel + } + if envTimeout := os.Getenv("REQUEST_TIMEOUT"); envTimeout != "" { + if parsed, err := time.ParseDuration(envTimeout); err == nil { + cfg.RequestTimeout = parsed + } + } + if envTemplate := os.Getenv("TEMPLATE_PATH"); envTemplate != "" { + cfg.TemplatePath = envTemplate + } + + return cfg +} + +func handleHealth(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "ok"}) +} + +func handleVersion() http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + version := "Unknown" + lastUpdate := "Unknown" + + if buildVersion != "" { + version = buildVersion + } + if buildTime != "" { + lastUpdate = buildTime + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(VersionResponse{ + Version: version, + LastUpdate: lastUpdate, + }) + } +} + +func handleAnalyzeBoth(cfg *Config) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + ctx, cancel := context.WithTimeout(r.Context(), cfg.RequestTimeout) + defer cancel() + if err := ctx.Err(); err != nil { + respondError(w, http.StatusRequestTimeout, "Request timed out") + return + } + + // Set max file size + r.Body = http.MaxBytesReader(w, r.Body, cfg.MaxFileSize) + + // Parse multipart form + if err := r.ParseMultipartForm(cfg.MaxFileSize); err != nil { + respondError(w, http.StatusBadRequest, fmt.Sprintf("Failed to parse form: %v", err)) + return + } + + // Get uploaded file + file, header, err := r.FormFile("file") + if err != nil { + respondError(w, http.StatusBadRequest, fmt.Sprintf("No file uploaded: %v", err)) + return + } + defer file.Close() + + // Create temporary file + tmpFile, err := os.CreateTemp("", "metrics-*.prom") + if err != nil { + respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to create temp file: %v", err)) + return + } + defer os.Remove(tmpFile.Name()) + defer tmpFile.Close() + + // Copy uploaded file to temp file + if _, err := io.Copy(tmpFile, file); err != nil { + respondError(w, http.StatusInternalServerError, fmt.Sprintf("Failed to save file: %v", err)) + return + } + tmpFile.Close() + + log.Printf("Processing file: %s (%d bytes)", header.Filename, header.Size) + + response := AnalyzeResponse{} + report, err := analyzer.AnalyzeFile(tmpFile.Name(), analyzer.Options{ + RulesDir: cfg.RulesDir, + LoadLevelDir: cfg.LoadLevelDir, + ClusterName: analyzer.ExtractClusterName(header.Filename), + Logger: io.Discard, + }) + if err := ctx.Err(); err != nil { + respondError(w, http.StatusRequestTimeout, "Request timed out") + return + } + if err != nil { + response.Error = fmt.Sprintf("Analysis failed: %v", err) + } else { + response.Console = reporter.GenerateConsole(report) + markdown, mdErr := reporter.GenerateMarkdown(report, cfg.TemplatePath) + if mdErr != nil { + response.Error = fmt.Sprintf("Markdown generation failed: %v", mdErr) + } else { + response.Markdown = markdown + } + } + + // Return response + w.Header().Set("Content-Type", "application/json") + if response.Error != "" && response.Console == "" && response.Markdown == "" { + w.WriteHeader(http.StatusInternalServerError) + } + json.NewEncoder(w).Encode(response) + } +} + +func respondError(w http.ResponseWriter, status int, message string) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + json.NewEncoder(w).Encode(AnalyzeResponse{Error: message}) +} diff --git a/web/static/index.html b/web/static/index.html new file mode 100644 index 0000000..d67ad88 --- /dev/null +++ b/web/static/index.html @@ -0,0 +1,477 @@ + + + + + + Sensor Metrics Analyzer + + + +
+

Sensor Metrics Analyzer

+

Upload a Prometheus metrics file to analyze sensor metrics

+
+
Version: Unknown
+
Last update: Unknown
+
GitHub Releases
+
+
+ Privacy: Uploaded files are analyzed and not retained. Temporary files are deleted immediately after processing, and metric data is not stored. +
+
+ Disclaimer: This project is AI-generated and only a small fraction of the code and metric rules were verified by a human. Analysis results may be inaccurate and, in extreme cases, totally wrong. +
+ +
+ + +
+ +
+ + + + + +
+
+ + +
+
+

+            
+
+
+

+            
+
+
+ + + + + diff --git a/web/static/vendor/marked.min.js b/web/static/vendor/marked.min.js new file mode 100644 index 0000000..b4e0d73 --- /dev/null +++ b/web/static/vendor/marked.min.js @@ -0,0 +1,69 @@ +/** + * marked v15.0.12 - a markdown parser + * Copyright (c) 2011-2025, Christopher Jeffrey. (MIT Licensed) + * https://github.com/markedjs/marked + */ + +/** + * DO NOT EDIT THIS FILE + * The code in this file is generated from files in ./src/ + */ +(function(g,f){if(typeof exports=="object"&&typeof module<"u"){module.exports=f()}else if("function"==typeof define && define.amd){define("marked",f)}else {g["marked"]=f()}}(typeof globalThis < "u" ? globalThis : typeof self < "u" ? self : this,function(){var exports={};var __exports=exports;var module={exports}; +"use strict";var H=Object.defineProperty;var be=Object.getOwnPropertyDescriptor;var Te=Object.getOwnPropertyNames;var we=Object.prototype.hasOwnProperty;var ye=(l,e)=>{for(var t in e)H(l,t,{get:e[t],enumerable:!0})},Re=(l,e,t,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of Te(e))!we.call(l,s)&&s!==t&&H(l,s,{get:()=>e[s],enumerable:!(n=be(e,s))||n.enumerable});return l};var Se=l=>Re(H({},"__esModule",{value:!0}),l);var kt={};ye(kt,{Hooks:()=>L,Lexer:()=>x,Marked:()=>E,Parser:()=>b,Renderer:()=>$,TextRenderer:()=>_,Tokenizer:()=>S,defaults:()=>w,getDefaults:()=>z,lexer:()=>ht,marked:()=>k,options:()=>it,parse:()=>pt,parseInline:()=>ct,parser:()=>ut,setOptions:()=>ot,use:()=>lt,walkTokens:()=>at});module.exports=Se(kt);function z(){return{async:!1,breaks:!1,extensions:null,gfm:!0,hooks:null,pedantic:!1,renderer:null,silent:!1,tokenizer:null,walkTokens:null}}var w=z();function N(l){w=l}var I={exec:()=>null};function h(l,e=""){let t=typeof l=="string"?l:l.source,n={replace:(s,i)=>{let r=typeof i=="string"?i:i.source;return r=r.replace(m.caret,"$1"),t=t.replace(s,r),n},getRegex:()=>new RegExp(t,e)};return n}var m={codeRemoveIndent:/^(?: {1,4}| {0,3}\t)/gm,outputLinkReplace:/\\([\[\]])/g,indentCodeCompensation:/^(\s+)(?:```)/,beginningSpace:/^\s+/,endingHash:/#$/,startingSpaceChar:/^ /,endingSpaceChar:/ $/,nonSpaceChar:/[^ ]/,newLineCharGlobal:/\n/g,tabCharGlobal:/\t/g,multipleSpaceGlobal:/\s+/g,blankLine:/^[ \t]*$/,doubleBlankLine:/\n[ \t]*\n[ \t]*$/,blockquoteStart:/^ {0,3}>/,blockquoteSetextReplace:/\n {0,3}((?:=+|-+) *)(?=\n|$)/g,blockquoteSetextReplace2:/^ {0,3}>[ \t]?/gm,listReplaceTabs:/^\t+/,listReplaceNesting:/^ {1,4}(?=( {4})*[^ ])/g,listIsTask:/^\[[ xX]\] /,listReplaceTask:/^\[[ xX]\] +/,anyLine:/\n.*\n/,hrefBrackets:/^<(.*)>$/,tableDelimiter:/[:|]/,tableAlignChars:/^\||\| *$/g,tableRowBlankLine:/\n[ \t]*$/,tableAlignRight:/^ *-+: *$/,tableAlignCenter:/^ *:-+: *$/,tableAlignLeft:/^ *:-+ *$/,startATag:/^/i,startPreScriptTag:/^<(pre|code|kbd|script)(\s|>)/i,endPreScriptTag:/^<\/(pre|code|kbd|script)(\s|>)/i,startAngleBracket:/^$/,pedanticHrefTitle:/^([^'"]*[^\s])\s+(['"])(.*)\2/,unicodeAlphaNumeric:/[\p{L}\p{N}]/u,escapeTest:/[&<>"']/,escapeReplace:/[&<>"']/g,escapeTestNoEncode:/[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/,escapeReplaceNoEncode:/[<>"']|&(?!(#\d{1,7}|#[Xx][a-fA-F0-9]{1,6}|\w+);)/g,unescapeTest:/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/ig,caret:/(^|[^\[])\^/g,percentDecode:/%25/g,findPipe:/\|/g,splitPipe:/ \|/,slashPipe:/\\\|/g,carriageReturn:/\r\n|\r/g,spaceLine:/^ +$/gm,notSpaceStart:/^\S*/,endingNewline:/\n$/,listItemRegex:l=>new RegExp(`^( {0,3}${l})((?:[ ][^\\n]*)?(?:\\n|$))`),nextBulletRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}(?:[*+-]|\\d{1,9}[.)])((?:[ ][^\\n]*)?(?:\\n|$))`),hrRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}((?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$)`),fencesBeginRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}(?:\`\`\`|~~~)`),headingBeginRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}#`),htmlBeginRegex:l=>new RegExp(`^ {0,${Math.min(3,l-1)}}<(?:[a-z].*>|!--)`,"i")},$e=/^(?:[ \t]*(?:\n|$))+/,_e=/^((?: {4}| {0,3}\t)[^\n]+(?:\n(?:[ \t]*(?:\n|$))*)?)+/,Le=/^ {0,3}(`{3,}(?=[^`\n]*(?:\n|$))|~{3,})([^\n]*)(?:\n|$)(?:|([\s\S]*?)(?:\n|$))(?: {0,3}\1[~`]* *(?=\n|$)|$)/,O=/^ {0,3}((?:-[\t ]*){3,}|(?:_[ \t]*){3,}|(?:\*[ \t]*){3,})(?:\n+|$)/,ze=/^ {0,3}(#{1,6})(?=\s|$)(.*)(?:\n+|$)/,F=/(?:[*+-]|\d{1,9}[.)])/,ie=/^(?!bull |blockCode|fences|blockquote|heading|html|table)((?:.|\n(?!\s*?\n|bull |blockCode|fences|blockquote|heading|html|table))+?)\n {0,3}(=+|-+) *(?:\n+|$)/,oe=h(ie).replace(/bull/g,F).replace(/blockCode/g,/(?: {4}| {0,3}\t)/).replace(/fences/g,/ {0,3}(?:`{3,}|~{3,})/).replace(/blockquote/g,/ {0,3}>/).replace(/heading/g,/ {0,3}#{1,6}/).replace(/html/g,/ {0,3}<[^\n>]+>\n/).replace(/\|table/g,"").getRegex(),Me=h(ie).replace(/bull/g,F).replace(/blockCode/g,/(?: {4}| {0,3}\t)/).replace(/fences/g,/ {0,3}(?:`{3,}|~{3,})/).replace(/blockquote/g,/ {0,3}>/).replace(/heading/g,/ {0,3}#{1,6}/).replace(/html/g,/ {0,3}<[^\n>]+>\n/).replace(/table/g,/ {0,3}\|?(?:[:\- ]*\|)+[\:\- ]*\n/).getRegex(),Q=/^([^\n]+(?:\n(?!hr|heading|lheading|blockquote|fences|list|html|table| +\n)[^\n]+)*)/,Pe=/^[^\n]+/,U=/(?!\s*\])(?:\\.|[^\[\]\\])+/,Ae=h(/^ {0,3}\[(label)\]: *(?:\n[ \t]*)?([^<\s][^\s]*|<.*?>)(?:(?: +(?:\n[ \t]*)?| *\n[ \t]*)(title))? *(?:\n+|$)/).replace("label",U).replace("title",/(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/).getRegex(),Ee=h(/^( {0,3}bull)([ \t][^\n]+?)?(?:\n|$)/).replace(/bull/g,F).getRegex(),v="address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|search|section|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul",K=/|$))/,Ce=h("^ {0,3}(?:<(script|pre|style|textarea)[\\s>][\\s\\S]*?(?:[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?(?:\\?>\\n*|$)|\\n*|$)|\\n*|$)|)[\\s\\S]*?(?:(?:\\n[ ]*)+\\n|$)|<(?!script|pre|style|textarea)([a-z][\\w-]*)(?:attribute)*? */?>(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n[ ]*)+\\n|$)|(?=[ \\t]*(?:\\n|$))[\\s\\S]*?(?:(?:\\n[ ]*)+\\n|$))","i").replace("comment",K).replace("tag",v).replace("attribute",/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(),le=h(Q).replace("hr",O).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("|lheading","").replace("|table","").replace("blockquote"," {0,3}>").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html",")|<(?:script|pre|style|textarea|!--)").replace("tag",v).getRegex(),Ie=h(/^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/).replace("paragraph",le).getRegex(),X={blockquote:Ie,code:_e,def:Ae,fences:Le,heading:ze,hr:O,html:Ce,lheading:oe,list:Ee,newline:$e,paragraph:le,table:I,text:Pe},re=h("^ *([^\\n ].*)\\n {0,3}((?:\\| *)?:?-+:? *(?:\\| *:?-+:? *)*(?:\\| *)?)(?:\\n((?:(?! *\\n|hr|heading|blockquote|code|fences|list|html).*(?:\\n|$))*)\\n*|$)").replace("hr",O).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("blockquote"," {0,3}>").replace("code","(?: {4}| {0,3} )[^\\n]").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html",")|<(?:script|pre|style|textarea|!--)").replace("tag",v).getRegex(),Oe={...X,lheading:Me,table:re,paragraph:h(Q).replace("hr",O).replace("heading"," {0,3}#{1,6}(?:\\s|$)").replace("|lheading","").replace("table",re).replace("blockquote"," {0,3}>").replace("fences"," {0,3}(?:`{3,}(?=[^`\\n]*\\n)|~{3,})[^\\n]*\\n").replace("list"," {0,3}(?:[*+-]|1[.)]) ").replace("html",")|<(?:script|pre|style|textarea|!--)").replace("tag",v).getRegex()},Be={...X,html:h(`^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+? *(?:\\n{2,}|\\s*$)|\\s]*)*?/?> *(?:\\n{2,}|\\s*$))`).replace("comment",K).replace(/tag/g,"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b").getRegex(),def:/^ *\[([^\]]+)\]: *]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/,heading:/^(#{1,6})(.*)(?:\n+|$)/,fences:I,lheading:/^(.+?)\n {0,3}(=+|-+) *(?:\n+|$)/,paragraph:h(Q).replace("hr",O).replace("heading",` *#{1,6} *[^ +]`).replace("lheading",oe).replace("|table","").replace("blockquote"," {0,3}>").replace("|fences","").replace("|list","").replace("|html","").replace("|tag","").getRegex()},qe=/^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,ve=/^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/,ae=/^( {2,}|\\)\n(?!\s*$)/,De=/^(`+|[^`])(?:(?= {2,}\n)|[\s\S]*?(?:(?=[\\]*?>/g,ue=/^(?:\*+(?:((?!\*)punct)|[^\s*]))|^_+(?:((?!_)punct)|([^\s_]))/,je=h(ue,"u").replace(/punct/g,D).getRegex(),Fe=h(ue,"u").replace(/punct/g,pe).getRegex(),he="^[^_*]*?__[^_*]*?\\*[^_*]*?(?=__)|[^*]+(?=[^*])|(?!\\*)punct(\\*+)(?=[\\s]|$)|notPunctSpace(\\*+)(?!\\*)(?=punctSpace|$)|(?!\\*)punctSpace(\\*+)(?=notPunctSpace)|[\\s](\\*+)(?!\\*)(?=punct)|(?!\\*)punct(\\*+)(?!\\*)(?=punct)|notPunctSpace(\\*+)(?=notPunctSpace)",Qe=h(he,"gu").replace(/notPunctSpace/g,ce).replace(/punctSpace/g,W).replace(/punct/g,D).getRegex(),Ue=h(he,"gu").replace(/notPunctSpace/g,He).replace(/punctSpace/g,Ge).replace(/punct/g,pe).getRegex(),Ke=h("^[^_*]*?\\*\\*[^_*]*?_[^_*]*?(?=\\*\\*)|[^_]+(?=[^_])|(?!_)punct(_+)(?=[\\s]|$)|notPunctSpace(_+)(?!_)(?=punctSpace|$)|(?!_)punctSpace(_+)(?=notPunctSpace)|[\\s](_+)(?!_)(?=punct)|(?!_)punct(_+)(?!_)(?=punct)","gu").replace(/notPunctSpace/g,ce).replace(/punctSpace/g,W).replace(/punct/g,D).getRegex(),Xe=h(/\\(punct)/,"gu").replace(/punct/g,D).getRegex(),We=h(/^<(scheme:[^\s\x00-\x1f<>]*|email)>/).replace("scheme",/[a-zA-Z][a-zA-Z0-9+.-]{1,31}/).replace("email",/[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/).getRegex(),Je=h(K).replace("(?:-->|$)","-->").getRegex(),Ve=h("^comment|^|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^|^").replace("comment",Je).replace("attribute",/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/).getRegex(),q=/(?:\[(?:\\.|[^\[\]\\])*\]|\\.|`[^`]*`|[^\[\]\\`])*?/,Ye=h(/^!?\[(label)\]\(\s*(href)(?:(?:[ \t]*(?:\n[ \t]*)?)(title))?\s*\)/).replace("label",q).replace("href",/<(?:\\.|[^\n<>\\])+>|[^ \t\n\x00-\x1f]*/).replace("title",/"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/).getRegex(),ke=h(/^!?\[(label)\]\[(ref)\]/).replace("label",q).replace("ref",U).getRegex(),ge=h(/^!?\[(ref)\](?:\[\])?/).replace("ref",U).getRegex(),et=h("reflink|nolink(?!\\()","g").replace("reflink",ke).replace("nolink",ge).getRegex(),J={_backpedal:I,anyPunctuation:Xe,autolink:We,blockSkip:Ne,br:ae,code:ve,del:I,emStrongLDelim:je,emStrongRDelimAst:Qe,emStrongRDelimUnd:Ke,escape:qe,link:Ye,nolink:ge,punctuation:Ze,reflink:ke,reflinkSearch:et,tag:Ve,text:De,url:I},tt={...J,link:h(/^!?\[(label)\]\((.*?)\)/).replace("label",q).getRegex(),reflink:h(/^!?\[(label)\]\s*\[([^\]]*)\]/).replace("label",q).getRegex()},j={...J,emStrongRDelimAst:Ue,emStrongLDelim:Fe,url:h(/^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,"i").replace("email",/[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/).getRegex(),_backpedal:/(?:[^?!.,:;*_'"~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_'"~)]+(?!$))+/,del:/^(~~?)(?=[^\s~])((?:\\.|[^\\])*?(?:\\.|[^\s~\\]))\1(?=[^~]|$)/,text:/^([`~]+|[^`~])(?:(?= {2,}\n)|(?=[a-zA-Z0-9.!#$%&'*+\/=?_`{\|}~-]+@)|[\s\S]*?(?:(?=[\\":">",'"':""","'":"'"},fe=l=>st[l];function R(l,e){if(e){if(m.escapeTest.test(l))return l.replace(m.escapeReplace,fe)}else if(m.escapeTestNoEncode.test(l))return l.replace(m.escapeReplaceNoEncode,fe);return l}function V(l){try{l=encodeURI(l).replace(m.percentDecode,"%")}catch{return null}return l}function Y(l,e){let t=l.replace(m.findPipe,(i,r,o)=>{let a=!1,c=r;for(;--c>=0&&o[c]==="\\";)a=!a;return a?"|":" |"}),n=t.split(m.splitPipe),s=0;if(n[0].trim()||n.shift(),n.length>0&&!n.at(-1)?.trim()&&n.pop(),e)if(n.length>e)n.splice(e);else for(;n.length0?-2:-1}function me(l,e,t,n,s){let i=e.href,r=e.title||null,o=l[1].replace(s.other.outputLinkReplace,"$1");n.state.inLink=!0;let a={type:l[0].charAt(0)==="!"?"image":"link",raw:t,href:i,title:r,text:o,tokens:n.inlineTokens(o)};return n.state.inLink=!1,a}function rt(l,e,t){let n=l.match(t.other.indentCodeCompensation);if(n===null)return e;let s=n[1];return e.split(` +`).map(i=>{let r=i.match(t.other.beginningSpace);if(r===null)return i;let[o]=r;return o.length>=s.length?i.slice(s.length):i}).join(` +`)}var S=class{options;rules;lexer;constructor(e){this.options=e||w}space(e){let t=this.rules.block.newline.exec(e);if(t&&t[0].length>0)return{type:"space",raw:t[0]}}code(e){let t=this.rules.block.code.exec(e);if(t){let n=t[0].replace(this.rules.other.codeRemoveIndent,"");return{type:"code",raw:t[0],codeBlockStyle:"indented",text:this.options.pedantic?n:A(n,` +`)}}}fences(e){let t=this.rules.block.fences.exec(e);if(t){let n=t[0],s=rt(n,t[3]||"",this.rules);return{type:"code",raw:n,lang:t[2]?t[2].trim().replace(this.rules.inline.anyPunctuation,"$1"):t[2],text:s}}}heading(e){let t=this.rules.block.heading.exec(e);if(t){let n=t[2].trim();if(this.rules.other.endingHash.test(n)){let s=A(n,"#");(this.options.pedantic||!s||this.rules.other.endingSpaceChar.test(s))&&(n=s.trim())}return{type:"heading",raw:t[0],depth:t[1].length,text:n,tokens:this.lexer.inline(n)}}}hr(e){let t=this.rules.block.hr.exec(e);if(t)return{type:"hr",raw:A(t[0],` +`)}}blockquote(e){let t=this.rules.block.blockquote.exec(e);if(t){let n=A(t[0],` +`).split(` +`),s="",i="",r=[];for(;n.length>0;){let o=!1,a=[],c;for(c=0;c1,i={type:"list",raw:"",ordered:s,start:s?+n.slice(0,-1):"",loose:!1,items:[]};n=s?`\\d{1,9}\\${n.slice(-1)}`:`\\${n}`,this.options.pedantic&&(n=s?n:"[*+-]");let r=this.rules.other.listItemRegex(n),o=!1;for(;e;){let c=!1,p="",u="";if(!(t=r.exec(e))||this.rules.block.hr.test(e))break;p=t[0],e=e.substring(p.length);let d=t[2].split(` +`,1)[0].replace(this.rules.other.listReplaceTabs,Z=>" ".repeat(3*Z.length)),g=e.split(` +`,1)[0],T=!d.trim(),f=0;if(this.options.pedantic?(f=2,u=d.trimStart()):T?f=t[1].length+1:(f=t[2].search(this.rules.other.nonSpaceChar),f=f>4?1:f,u=d.slice(f),f+=t[1].length),T&&this.rules.other.blankLine.test(g)&&(p+=g+` +`,e=e.substring(g.length+1),c=!0),!c){let Z=this.rules.other.nextBulletRegex(f),te=this.rules.other.hrRegex(f),ne=this.rules.other.fencesBeginRegex(f),se=this.rules.other.headingBeginRegex(f),xe=this.rules.other.htmlBeginRegex(f);for(;e;){let G=e.split(` +`,1)[0],C;if(g=G,this.options.pedantic?(g=g.replace(this.rules.other.listReplaceNesting," "),C=g):C=g.replace(this.rules.other.tabCharGlobal," "),ne.test(g)||se.test(g)||xe.test(g)||Z.test(g)||te.test(g))break;if(C.search(this.rules.other.nonSpaceChar)>=f||!g.trim())u+=` +`+C.slice(f);else{if(T||d.replace(this.rules.other.tabCharGlobal," ").search(this.rules.other.nonSpaceChar)>=4||ne.test(d)||se.test(d)||te.test(d))break;u+=` +`+g}!T&&!g.trim()&&(T=!0),p+=G+` +`,e=e.substring(G.length+1),d=C.slice(f)}}i.loose||(o?i.loose=!0:this.rules.other.doubleBlankLine.test(p)&&(o=!0));let y=null,ee;this.options.gfm&&(y=this.rules.other.listIsTask.exec(u),y&&(ee=y[0]!=="[ ] ",u=u.replace(this.rules.other.listReplaceTask,""))),i.items.push({type:"list_item",raw:p,task:!!y,checked:ee,loose:!1,text:u,tokens:[]}),i.raw+=p}let a=i.items.at(-1);if(a)a.raw=a.raw.trimEnd(),a.text=a.text.trimEnd();else return;i.raw=i.raw.trimEnd();for(let c=0;cd.type==="space"),u=p.length>0&&p.some(d=>this.rules.other.anyLine.test(d.raw));i.loose=u}if(i.loose)for(let c=0;c({text:a,tokens:this.lexer.inline(a),header:!1,align:r.align[c]})));return r}}lheading(e){let t=this.rules.block.lheading.exec(e);if(t)return{type:"heading",raw:t[0],depth:t[2].charAt(0)==="="?1:2,text:t[1],tokens:this.lexer.inline(t[1])}}paragraph(e){let t=this.rules.block.paragraph.exec(e);if(t){let n=t[1].charAt(t[1].length-1)===` +`?t[1].slice(0,-1):t[1];return{type:"paragraph",raw:t[0],text:n,tokens:this.lexer.inline(n)}}}text(e){let t=this.rules.block.text.exec(e);if(t)return{type:"text",raw:t[0],text:t[0],tokens:this.lexer.inline(t[0])}}escape(e){let t=this.rules.inline.escape.exec(e);if(t)return{type:"escape",raw:t[0],text:t[1]}}tag(e){let t=this.rules.inline.tag.exec(e);if(t)return!this.lexer.state.inLink&&this.rules.other.startATag.test(t[0])?this.lexer.state.inLink=!0:this.lexer.state.inLink&&this.rules.other.endATag.test(t[0])&&(this.lexer.state.inLink=!1),!this.lexer.state.inRawBlock&&this.rules.other.startPreScriptTag.test(t[0])?this.lexer.state.inRawBlock=!0:this.lexer.state.inRawBlock&&this.rules.other.endPreScriptTag.test(t[0])&&(this.lexer.state.inRawBlock=!1),{type:"html",raw:t[0],inLink:this.lexer.state.inLink,inRawBlock:this.lexer.state.inRawBlock,block:!1,text:t[0]}}link(e){let t=this.rules.inline.link.exec(e);if(t){let n=t[2].trim();if(!this.options.pedantic&&this.rules.other.startAngleBracket.test(n)){if(!this.rules.other.endAngleBracket.test(n))return;let r=A(n.slice(0,-1),"\\");if((n.length-r.length)%2===0)return}else{let r=de(t[2],"()");if(r===-2)return;if(r>-1){let a=(t[0].indexOf("!")===0?5:4)+t[1].length+r;t[2]=t[2].substring(0,r),t[0]=t[0].substring(0,a).trim(),t[3]=""}}let s=t[2],i="";if(this.options.pedantic){let r=this.rules.other.pedanticHrefTitle.exec(s);r&&(s=r[1],i=r[3])}else i=t[3]?t[3].slice(1,-1):"";return s=s.trim(),this.rules.other.startAngleBracket.test(s)&&(this.options.pedantic&&!this.rules.other.endAngleBracket.test(n)?s=s.slice(1):s=s.slice(1,-1)),me(t,{href:s&&s.replace(this.rules.inline.anyPunctuation,"$1"),title:i&&i.replace(this.rules.inline.anyPunctuation,"$1")},t[0],this.lexer,this.rules)}}reflink(e,t){let n;if((n=this.rules.inline.reflink.exec(e))||(n=this.rules.inline.nolink.exec(e))){let s=(n[2]||n[1]).replace(this.rules.other.multipleSpaceGlobal," "),i=t[s.toLowerCase()];if(!i){let r=n[0].charAt(0);return{type:"text",raw:r,text:r}}return me(n,i,n[0],this.lexer,this.rules)}}emStrong(e,t,n=""){let s=this.rules.inline.emStrongLDelim.exec(e);if(!s||s[3]&&n.match(this.rules.other.unicodeAlphaNumeric))return;if(!(s[1]||s[2]||"")||!n||this.rules.inline.punctuation.exec(n)){let r=[...s[0]].length-1,o,a,c=r,p=0,u=s[0][0]==="*"?this.rules.inline.emStrongRDelimAst:this.rules.inline.emStrongRDelimUnd;for(u.lastIndex=0,t=t.slice(-1*e.length+r);(s=u.exec(t))!=null;){if(o=s[1]||s[2]||s[3]||s[4]||s[5]||s[6],!o)continue;if(a=[...o].length,s[3]||s[4]){c+=a;continue}else if((s[5]||s[6])&&r%3&&!((r+a)%3)){p+=a;continue}if(c-=a,c>0)continue;a=Math.min(a,a+c+p);let d=[...s[0]][0].length,g=e.slice(0,r+s.index+d+a);if(Math.min(r,a)%2){let f=g.slice(1,-1);return{type:"em",raw:g,text:f,tokens:this.lexer.inlineTokens(f)}}let T=g.slice(2,-2);return{type:"strong",raw:g,text:T,tokens:this.lexer.inlineTokens(T)}}}}codespan(e){let t=this.rules.inline.code.exec(e);if(t){let n=t[2].replace(this.rules.other.newLineCharGlobal," "),s=this.rules.other.nonSpaceChar.test(n),i=this.rules.other.startingSpaceChar.test(n)&&this.rules.other.endingSpaceChar.test(n);return s&&i&&(n=n.substring(1,n.length-1)),{type:"codespan",raw:t[0],text:n}}}br(e){let t=this.rules.inline.br.exec(e);if(t)return{type:"br",raw:t[0]}}del(e){let t=this.rules.inline.del.exec(e);if(t)return{type:"del",raw:t[0],text:t[2],tokens:this.lexer.inlineTokens(t[2])}}autolink(e){let t=this.rules.inline.autolink.exec(e);if(t){let n,s;return t[2]==="@"?(n=t[1],s="mailto:"+n):(n=t[1],s=n),{type:"link",raw:t[0],text:n,href:s,tokens:[{type:"text",raw:n,text:n}]}}}url(e){let t;if(t=this.rules.inline.url.exec(e)){let n,s;if(t[2]==="@")n=t[0],s="mailto:"+n;else{let i;do i=t[0],t[0]=this.rules.inline._backpedal.exec(t[0])?.[0]??"";while(i!==t[0]);n=t[0],t[1]==="www."?s="http://"+t[0]:s=t[0]}return{type:"link",raw:t[0],text:n,href:s,tokens:[{type:"text",raw:n,text:n}]}}}inlineText(e){let t=this.rules.inline.text.exec(e);if(t){let n=this.lexer.state.inRawBlock;return{type:"text",raw:t[0],text:t[0],escaped:n}}}};var x=class l{tokens;options;state;tokenizer;inlineQueue;constructor(e){this.tokens=[],this.tokens.links=Object.create(null),this.options=e||w,this.options.tokenizer=this.options.tokenizer||new S,this.tokenizer=this.options.tokenizer,this.tokenizer.options=this.options,this.tokenizer.lexer=this,this.inlineQueue=[],this.state={inLink:!1,inRawBlock:!1,top:!0};let t={other:m,block:B.normal,inline:P.normal};this.options.pedantic?(t.block=B.pedantic,t.inline=P.pedantic):this.options.gfm&&(t.block=B.gfm,this.options.breaks?t.inline=P.breaks:t.inline=P.gfm),this.tokenizer.rules=t}static get rules(){return{block:B,inline:P}}static lex(e,t){return new l(t).lex(e)}static lexInline(e,t){return new l(t).inlineTokens(e)}lex(e){e=e.replace(m.carriageReturn,` +`),this.blockTokens(e,this.tokens);for(let t=0;t(s=r.call({lexer:this},e,t))?(e=e.substring(s.raw.length),t.push(s),!0):!1))continue;if(s=this.tokenizer.space(e)){e=e.substring(s.raw.length);let r=t.at(-1);s.raw.length===1&&r!==void 0?r.raw+=` +`:t.push(s);continue}if(s=this.tokenizer.code(e)){e=e.substring(s.raw.length);let r=t.at(-1);r?.type==="paragraph"||r?.type==="text"?(r.raw+=` +`+s.raw,r.text+=` +`+s.text,this.inlineQueue.at(-1).src=r.text):t.push(s);continue}if(s=this.tokenizer.fences(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.heading(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.hr(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.blockquote(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.list(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.html(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.def(e)){e=e.substring(s.raw.length);let r=t.at(-1);r?.type==="paragraph"||r?.type==="text"?(r.raw+=` +`+s.raw,r.text+=` +`+s.raw,this.inlineQueue.at(-1).src=r.text):this.tokens.links[s.tag]||(this.tokens.links[s.tag]={href:s.href,title:s.title});continue}if(s=this.tokenizer.table(e)){e=e.substring(s.raw.length),t.push(s);continue}if(s=this.tokenizer.lheading(e)){e=e.substring(s.raw.length),t.push(s);continue}let i=e;if(this.options.extensions?.startBlock){let r=1/0,o=e.slice(1),a;this.options.extensions.startBlock.forEach(c=>{a=c.call({lexer:this},o),typeof a=="number"&&a>=0&&(r=Math.min(r,a))}),r<1/0&&r>=0&&(i=e.substring(0,r+1))}if(this.state.top&&(s=this.tokenizer.paragraph(i))){let r=t.at(-1);n&&r?.type==="paragraph"?(r.raw+=` +`+s.raw,r.text+=` +`+s.text,this.inlineQueue.pop(),this.inlineQueue.at(-1).src=r.text):t.push(s),n=i.length!==e.length,e=e.substring(s.raw.length);continue}if(s=this.tokenizer.text(e)){e=e.substring(s.raw.length);let r=t.at(-1);r?.type==="text"?(r.raw+=` +`+s.raw,r.text+=` +`+s.text,this.inlineQueue.pop(),this.inlineQueue.at(-1).src=r.text):t.push(s);continue}if(e){let r="Infinite loop on byte: "+e.charCodeAt(0);if(this.options.silent){console.error(r);break}else throw new Error(r)}}return this.state.top=!0,t}inline(e,t=[]){return this.inlineQueue.push({src:e,tokens:t}),t}inlineTokens(e,t=[]){let n=e,s=null;if(this.tokens.links){let o=Object.keys(this.tokens.links);if(o.length>0)for(;(s=this.tokenizer.rules.inline.reflinkSearch.exec(n))!=null;)o.includes(s[0].slice(s[0].lastIndexOf("[")+1,-1))&&(n=n.slice(0,s.index)+"["+"a".repeat(s[0].length-2)+"]"+n.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex))}for(;(s=this.tokenizer.rules.inline.anyPunctuation.exec(n))!=null;)n=n.slice(0,s.index)+"++"+n.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);for(;(s=this.tokenizer.rules.inline.blockSkip.exec(n))!=null;)n=n.slice(0,s.index)+"["+"a".repeat(s[0].length-2)+"]"+n.slice(this.tokenizer.rules.inline.blockSkip.lastIndex);let i=!1,r="";for(;e;){i||(r=""),i=!1;let o;if(this.options.extensions?.inline?.some(c=>(o=c.call({lexer:this},e,t))?(e=e.substring(o.raw.length),t.push(o),!0):!1))continue;if(o=this.tokenizer.escape(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.tag(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.link(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.reflink(e,this.tokens.links)){e=e.substring(o.raw.length);let c=t.at(-1);o.type==="text"&&c?.type==="text"?(c.raw+=o.raw,c.text+=o.text):t.push(o);continue}if(o=this.tokenizer.emStrong(e,n,r)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.codespan(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.br(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.del(e)){e=e.substring(o.raw.length),t.push(o);continue}if(o=this.tokenizer.autolink(e)){e=e.substring(o.raw.length),t.push(o);continue}if(!this.state.inLink&&(o=this.tokenizer.url(e))){e=e.substring(o.raw.length),t.push(o);continue}let a=e;if(this.options.extensions?.startInline){let c=1/0,p=e.slice(1),u;this.options.extensions.startInline.forEach(d=>{u=d.call({lexer:this},p),typeof u=="number"&&u>=0&&(c=Math.min(c,u))}),c<1/0&&c>=0&&(a=e.substring(0,c+1))}if(o=this.tokenizer.inlineText(a)){e=e.substring(o.raw.length),o.raw.slice(-1)!=="_"&&(r=o.raw.slice(-1)),i=!0;let c=t.at(-1);c?.type==="text"?(c.raw+=o.raw,c.text+=o.text):t.push(o);continue}if(e){let c="Infinite loop on byte: "+e.charCodeAt(0);if(this.options.silent){console.error(c);break}else throw new Error(c)}}return t}};var $=class{options;parser;constructor(e){this.options=e||w}space(e){return""}code({text:e,lang:t,escaped:n}){let s=(t||"").match(m.notSpaceStart)?.[0],i=e.replace(m.endingNewline,"")+` +`;return s?'
'+(n?i:R(i,!0))+`
+`:"
"+(n?i:R(i,!0))+`
+`}blockquote({tokens:e}){return`
+${this.parser.parse(e)}
+`}html({text:e}){return e}heading({tokens:e,depth:t}){return`${this.parser.parseInline(e)} +`}hr(e){return`
+`}list(e){let t=e.ordered,n=e.start,s="";for(let o=0;o +`+s+" +`}listitem(e){let t="";if(e.task){let n=this.checkbox({checked:!!e.checked});e.loose?e.tokens[0]?.type==="paragraph"?(e.tokens[0].text=n+" "+e.tokens[0].text,e.tokens[0].tokens&&e.tokens[0].tokens.length>0&&e.tokens[0].tokens[0].type==="text"&&(e.tokens[0].tokens[0].text=n+" "+R(e.tokens[0].tokens[0].text),e.tokens[0].tokens[0].escaped=!0)):e.tokens.unshift({type:"text",raw:n+" ",text:n+" ",escaped:!0}):t+=n+" "}return t+=this.parser.parse(e.tokens,!!e.loose),`
  • ${t}
  • +`}checkbox({checked:e}){return"'}paragraph({tokens:e}){return`

    ${this.parser.parseInline(e)}

    +`}table(e){let t="",n="";for(let i=0;i${s}`),` + +`+t+` +`+s+`
    +`}tablerow({text:e}){return` +${e} +`}tablecell(e){let t=this.parser.parseInline(e.tokens),n=e.header?"th":"td";return(e.align?`<${n} align="${e.align}">`:`<${n}>`)+t+` +`}strong({tokens:e}){return`${this.parser.parseInline(e)}`}em({tokens:e}){return`${this.parser.parseInline(e)}`}codespan({text:e}){return`${R(e,!0)}`}br(e){return"
    "}del({tokens:e}){return`${this.parser.parseInline(e)}`}link({href:e,title:t,tokens:n}){let s=this.parser.parseInline(n),i=V(e);if(i===null)return s;e=i;let r='
    ",r}image({href:e,title:t,text:n,tokens:s}){s&&(n=this.parser.parseInline(s,this.parser.textRenderer));let i=V(e);if(i===null)return R(n);e=i;let r=`${n}{let o=i[r].flat(1/0);n=n.concat(this.walkTokens(o,t))}):i.tokens&&(n=n.concat(this.walkTokens(i.tokens,t)))}}return n}use(...e){let t=this.defaults.extensions||{renderers:{},childTokens:{}};return e.forEach(n=>{let s={...n};if(s.async=this.defaults.async||s.async||!1,n.extensions&&(n.extensions.forEach(i=>{if(!i.name)throw new Error("extension name required");if("renderer"in i){let r=t.renderers[i.name];r?t.renderers[i.name]=function(...o){let a=i.renderer.apply(this,o);return a===!1&&(a=r.apply(this,o)),a}:t.renderers[i.name]=i.renderer}if("tokenizer"in i){if(!i.level||i.level!=="block"&&i.level!=="inline")throw new Error("extension level must be 'block' or 'inline'");let r=t[i.level];r?r.unshift(i.tokenizer):t[i.level]=[i.tokenizer],i.start&&(i.level==="block"?t.startBlock?t.startBlock.push(i.start):t.startBlock=[i.start]:i.level==="inline"&&(t.startInline?t.startInline.push(i.start):t.startInline=[i.start]))}"childTokens"in i&&i.childTokens&&(t.childTokens[i.name]=i.childTokens)}),s.extensions=t),n.renderer){let i=this.defaults.renderer||new $(this.defaults);for(let r in n.renderer){if(!(r in i))throw new Error(`renderer '${r}' does not exist`);if(["options","parser"].includes(r))continue;let o=r,a=n.renderer[o],c=i[o];i[o]=(...p)=>{let u=a.apply(i,p);return u===!1&&(u=c.apply(i,p)),u||""}}s.renderer=i}if(n.tokenizer){let i=this.defaults.tokenizer||new S(this.defaults);for(let r in n.tokenizer){if(!(r in i))throw new Error(`tokenizer '${r}' does not exist`);if(["options","rules","lexer"].includes(r))continue;let o=r,a=n.tokenizer[o],c=i[o];i[o]=(...p)=>{let u=a.apply(i,p);return u===!1&&(u=c.apply(i,p)),u}}s.tokenizer=i}if(n.hooks){let i=this.defaults.hooks||new L;for(let r in n.hooks){if(!(r in i))throw new Error(`hook '${r}' does not exist`);if(["options","block"].includes(r))continue;let o=r,a=n.hooks[o],c=i[o];L.passThroughHooks.has(r)?i[o]=p=>{if(this.defaults.async)return Promise.resolve(a.call(i,p)).then(d=>c.call(i,d));let u=a.call(i,p);return c.call(i,u)}:i[o]=(...p)=>{let u=a.apply(i,p);return u===!1&&(u=c.apply(i,p)),u}}s.hooks=i}if(n.walkTokens){let i=this.defaults.walkTokens,r=n.walkTokens;s.walkTokens=function(o){let a=[];return a.push(r.call(this,o)),i&&(a=a.concat(i.call(this,o))),a}}this.defaults={...this.defaults,...s}}),this}setOptions(e){return this.defaults={...this.defaults,...e},this}lexer(e,t){return x.lex(e,t??this.defaults)}parser(e,t){return b.parse(e,t??this.defaults)}parseMarkdown(e){return(n,s)=>{let i={...s},r={...this.defaults,...i},o=this.onError(!!r.silent,!!r.async);if(this.defaults.async===!0&&i.async===!1)return o(new Error("marked(): The async option was set to true by an extension. Remove async: false from the parse options object to return a Promise."));if(typeof n>"u"||n===null)return o(new Error("marked(): input parameter is undefined or null"));if(typeof n!="string")return o(new Error("marked(): input parameter is of type "+Object.prototype.toString.call(n)+", string expected"));r.hooks&&(r.hooks.options=r,r.hooks.block=e);let a=r.hooks?r.hooks.provideLexer():e?x.lex:x.lexInline,c=r.hooks?r.hooks.provideParser():e?b.parse:b.parseInline;if(r.async)return Promise.resolve(r.hooks?r.hooks.preprocess(n):n).then(p=>a(p,r)).then(p=>r.hooks?r.hooks.processAllTokens(p):p).then(p=>r.walkTokens?Promise.all(this.walkTokens(p,r.walkTokens)).then(()=>p):p).then(p=>c(p,r)).then(p=>r.hooks?r.hooks.postprocess(p):p).catch(o);try{r.hooks&&(n=r.hooks.preprocess(n));let p=a(n,r);r.hooks&&(p=r.hooks.processAllTokens(p)),r.walkTokens&&this.walkTokens(p,r.walkTokens);let u=c(p,r);return r.hooks&&(u=r.hooks.postprocess(u)),u}catch(p){return o(p)}}}onError(e,t){return n=>{if(n.message+=` +Please report this to https://github.com/markedjs/marked.`,e){let s="

    An error occurred:

    "+R(n.message+"",!0)+"
    ";return t?Promise.resolve(s):s}if(t)return Promise.reject(n);throw n}}};var M=new E;function k(l,e){return M.parse(l,e)}k.options=k.setOptions=function(l){return M.setOptions(l),k.defaults=M.defaults,N(k.defaults),k};k.getDefaults=z;k.defaults=w;k.use=function(...l){return M.use(...l),k.defaults=M.defaults,N(k.defaults),k};k.walkTokens=function(l,e){return M.walkTokens(l,e)};k.parseInline=M.parseInline;k.Parser=b;k.parser=b.parse;k.Renderer=$;k.TextRenderer=_;k.Lexer=x;k.lexer=x.lex;k.Tokenizer=S;k.Hooks=L;k.parse=k;var it=k.options,ot=k.setOptions,lt=k.use,at=k.walkTokens,ct=k.parseInline,pt=k,ut=b.parse,ht=x.lex; + +if(__exports != exports)module.exports = exports;return module.exports}));