diff --git a/api-server/LICENSE b/api-server/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/api-server/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/api-server/README.md b/api-server/README.md new file mode 100644 index 00000000..2c4e4362 --- /dev/null +++ b/api-server/README.md @@ -0,0 +1,237 @@ +# Ilab API Server + +## Overview + +This is an Ilab API Server that is a temporary set of APIs for service developing apps against [InstructLab](https://github.com/instructlab/). It provides endpoints for model management, data generation, training, job tracking and job logging. + +## Quickstart + +On a node with CUDA/GPUs and `ilab` in the $PATH, run: + +```bash +go mod download +go build +./api-server --taxonomy-path /var/home/cloud-user/.local/share/instructlab/taxonomy/ --cuda --rhelai --vllm +``` + +### Prerequisites + +- Ensure that the required directories (`base-dir` and `taxonomy-path`) exist and are accessible and Go is installed in the $PATH. + +### Install Dependencies + +To install the necessary dependencies, run: + +```bash +go mod download +``` + +### Run the Server + +#### For macOS with Metal (MPS): + +```bash +go run main.go --base-dir /path/to/base-dir --taxonomy-path /path/to/taxonomy --osx +``` + +#### For CUDA-enabled environments: + +```bash +go run main.go --base-dir /path/to/base-dir --taxonomy-path /path/to/taxonomy --cuda +``` + +#### For a RHEL AI machine: + +- If you're operating on a Red Hat Enterprise Linux AI (RHEL AI) machine, and the ilab binary is already available in your $PATH, you don't need to specify the --base-dir. Additionally, pass CUDA support with `--cuda`. + +```bash +go run main.go --taxonomy-path ~/.local/share/instructlab/taxonomy/ --rhelai --cuda +``` + +The `--rhelai` flag indicates that the ilab binary is available in the system's $PATH and does not require a virtual environment. +When using `--rhelai`, the `--base-dir` flag is not required since it will be in a known location at least for meow. + +### Example command with paths: + +Here's an example command for running the server on a macOS machine with Metal support: + +```bash +go run main.go --base-dir /Users/user/code/instructlab --taxonomy-path ~/.local/share/instructlab/taxonomy/ --osx +``` + +## API Doc + +### Models + +#### Get Models +**Endpoint**: `GET /models` +Fetches the list of available models. + +- **Response**: + ```json + [ + { + "name": "model-name", + "last_modified": "timestamp", + "size": "size-string" + } + ] + ``` + +### Data + +#### Get Data +**Endpoint**: `GET /data` +Fetches the list of datasets. + +- **Response**: + ```json + [ + { + "dataset": "dataset-name", + "created_at": "timestamp", + "file_size": "size-string" + } + ] + ``` + +#### Generate Data +**Endpoint**: `POST /data/generate` +Starts a data generation job. + +- **Request**: None +- **Response**: + ```json + { + "job_id": "generated-job-id" + } + ``` + +### Jobs + +#### List Jobs +**Endpoint**: `GET /jobs` +Fetches the list of all jobs. + +- **Response**: + ```json + [ + { + "job_id": "job-id", + "status": "running/finished/failed", + "cmd": "command", + "branch": "branch-name", + "start_time": "timestamp", + "end_time": "timestamp" + } + ] + ``` + +#### Job Status +**Endpoint**: `GET /jobs/{job_id}/status` +Fetches the status of a specific job. + +- **Response**: + ```json + { + "job_id": "job-id", + "status": "running/finished/failed", + "branch": "branch-name", + "command": "command" + } + ``` + +#### Job Logs +**Endpoint**: `GET /jobs/{job_id}/logs` +Fetches the logs of a specific job. + +- **Response**: Text logs of the job. + +### Training + +#### Start Training +**Endpoint**: `POST /model/train` +Starts a training job. + +- **Request**: + ```json + { + "modelName": "name-of-the-model", + "branchName": "name-of-the-branch" + } + ``` + + **Note**: The `modelName` can be provided **with or without** the `models/` prefix. Examples: + + - Without prefix: `"granite-7b-lab-Q4_K_M.gguf"` + - With prefix: `"models/granite-7b-starter"` + + The server will handle the prefix to construct the correct model path. + +- **Response**: + ```json + { + "job_id": "training-job-id" + } + ``` + +### Pipeline + +#### Generate and Train Pipeline +**Endpoint**: `POST /pipeline/generate-train` +Combines data generation and training into a single pipeline job. + +- **Request**: + ```json + { + "modelName": "name-of-the-model", + "branchName": "name-of-the-branch" + } + ``` + + **Note**: Similar to the training endpoint, `modelName` can be with or without the `models/` prefix. + +- **Response**: + ```json + { + "pipeline_job_id": "pipeline-job-id" + } + ``` + +### Model Serving + +#### Serve Latest Checkpoint +**Endpoint**: `POST /model/serve-latest` +Serves the latest model checkpoint on port `8001`. + +- **Response**: + ```json + { + "status": "model process started", + "job_id": "serve-job-id" + } + ``` + +#### Serve Base Model +**Endpoint**: `POST /model/serve-base` +Serves the base model on port `8000`. + +- **Response**: + ```json + { + "status": "model process started", + "job_id": "serve-job-id" + } + ``` + +## Handling Model Names with or without `models/` Prefix + +The server is designed to handle `modelName` inputs **both with and without** the `models/` prefix to prevent path duplication. Here’s how it works: + +- **Without Prefix**: + - **Input**: `"granite-7b-lab-Q4_K_M.gguf"` + - **Constructed Path**: `~/.cache/instructlab/models/granite-7b-lab-Q4_K_M.gguf` + +- **With Prefix**: + - **Input**: `"models/granite-7b-starter"` + - **Constructed Path**: `~/.cache/instructlab/models/granite-7b-starter` diff --git a/api-server/go.mod b/api-server/go.mod new file mode 100644 index 00000000..370b0592 --- /dev/null +++ b/api-server/go.mod @@ -0,0 +1,14 @@ +module ilab-api-router + +go 1.21.6 + +require ( + github.com/google/uuid v1.6.0 + github.com/gorilla/mux v1.8.1 + github.com/spf13/cobra v1.8.1 +) + +require ( + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect +) diff --git a/api-server/go.sum b/api-server/go.sum new file mode 100644 index 00000000..74d83707 --- /dev/null +++ b/api-server/go.sum @@ -0,0 +1,14 @@ +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/api-server/main.go b/api-server/main.go new file mode 100644 index 00000000..e3c2e58a --- /dev/null +++ b/api-server/main.go @@ -0,0 +1,1767 @@ +package main + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "log" + "net/http" + "os" + "os/exec" + "path/filepath" + "strings" + "sync" + "syscall" + "time" + + "github.com/gorilla/mux" + "github.com/spf13/cobra" +) + +type Model struct { + Name string `json:"name"` + LastModified string `json:"last_modified"` + Size string `json:"size"` +} + +type Data struct { + Dataset string `json:"dataset"` + CreatedAt string `json:"created_at"` + FileSize string `json:"file_size"` +} + +type Job struct { + JobID string `json:"job_id"` + Cmd string `json:"cmd"` + Args []string `json:"args"` + Status string `json:"status"` // "running", "finished", "failed" + PID int `json:"pid"` + LogFile string `json:"log_file"` + StartTime time.Time `json:"start_time"` + EndTime *time.Time `json:"end_time,omitempty"` + Branch string `json:"branch"` + Lock sync.Mutex `json:"-"` +} + +// ModelCache encapsulates the cached models and related metadata. +type ModelCache struct { + Models []Model + Time time.Time + Mutex sync.Mutex +} + +type QnaEvalRequest struct { + ModelPath string `json:"model_path"` + YamlFile string `json:"yaml_file"` +} + +type VllmContainerResponse struct { + Containers []VllmContainer `json:"containers"` +} + +type UnloadModelRequest struct { + ModelName string `json:"model_name"` // Expected values: "pre-train" or "post-train" +} + +var ( + baseDir string + taxonomyPath string + rhelai bool + ilabCmd string + isOSX bool + isCuda bool + useVllm bool + pipelineType string + jobs = make(map[string]*Job) + jobsLock = sync.Mutex{} + modelLock = sync.Mutex{} + modelProcessBase *exec.Cmd // Process for base model + modelProcessLatest *exec.Cmd // Process for latest model + baseModel = "instructlab/granite-7b-lab" + servedModelJobIDs = make(map[string]string) // Maps "pre-train"/"post-train" => jobID + // Cache variables + modelCache = ModelCache{} +) + +const jobsFile = "jobs.json" + +func main() { + rootCmd := &cobra.Command{ + Use: "ilab-server", + Short: "ILab Server Application", + Run: runServer, + } + + rootCmd.Flags().BoolVar(&rhelai, "rhelai", false, "Use ilab binary from PATH instead of Python virtual environment") + rootCmd.Flags().StringVar(&baseDir, "base-dir", "", "Base directory for ilab operations (required if --rhelai is not set)") + rootCmd.Flags().StringVar(&taxonomyPath, "taxonomy-path", "", "Path to the taxonomy repository for Git operations (required)") + rootCmd.Flags().BoolVar(&isOSX, "osx", false, "Enable OSX-specific settings (default: false)") + rootCmd.Flags().BoolVar(&isCuda, "cuda", false, "Enable Cuda (default: false)") + rootCmd.Flags().BoolVar(&useVllm, "vllm", false, "Enable VLLM model serving using podman containers") + rootCmd.Flags().StringVar(&pipelineType, "pipeline", "", "Pipeline type (simple, accelerated, full)") + + // Mark flags as required based on --rhelai + rootCmd.PreRunE = func(cmd *cobra.Command, args []string) error { + if !rhelai && baseDir == "" { + return fmt.Errorf("--base-dir is required unless --rhelai is set") + } + if taxonomyPath == "" { + return fmt.Errorf("--taxonomy-path is required") + } + + // Validate or set pipelineType based on --rhelai + if !rhelai { + if pipelineType == "" { + return fmt.Errorf("--pipeline is required unless --rhelai is set") + } + switch pipelineType { + case "simple", "full", "accelerated": + // Valid pipeline types + default: + return fmt.Errorf("--pipeline must be 'simple', 'accelerated' or 'full'; got '%s'", pipelineType) + } + } else { + // When --rhelai is set and --pipeline is not provided, set a default pipelineType + if pipelineType == "" { + pipelineType = "accelerated" // Default pipeline when --rhelai is enabled + log.Println("--rhelai is set; defaulting --pipeline to 'accelerated'") + } else { + // If pipelineType is provided, validate it + switch pipelineType { + case "simple", "full", "accelerated": + // Valid pipeline types + default: + return fmt.Errorf("--pipeline must be 'simple', 'accelerated' or 'full'; got '%s'", pipelineType) + } + } + } + + return nil + } + + if err := rootCmd.Execute(); err != nil { + log.Fatalf("Error executing command: %v", err) + } +} + +func runServer(cmd *cobra.Command, args []string) { + // Determine ilab command path + if rhelai { + // Use ilab from PATH + ilabPath, err := exec.LookPath("ilab") + if err != nil { + log.Fatalf("ilab binary not found in PATH. Please ensure ilab is installed and in your PATH.") + } + ilabCmd = ilabPath + } else { + // Use ilab from virtual environment + ilabCmd = filepath.Join(baseDir, "venv", "bin", "ilab") + if _, err := os.Stat(ilabCmd); os.IsNotExist(err) { + log.Fatalf("ilab binary not found at %s. Please ensure the virtual environment is set up correctly.", ilabCmd) + } + } + + log.Printf("Using ilab command: %s", ilabCmd) + + // Validate mandatory arguments if not using rhelai + if !rhelai { + if _, err := os.Stat(baseDir); os.IsNotExist(err) { + log.Fatalf("Base directory does not exist: %s", baseDir) + } + } + + if _, err := os.Stat(taxonomyPath); os.IsNotExist(err) { + log.Fatalf("Taxonomy path does not exist: %s", taxonomyPath) + } + + log.Printf("Running with baseDir=%s, taxonomyPath=%s, isOSX=%v, isCuda=%v, useVllm=%v, pipeline=%s", + baseDir, taxonomyPath, isOSX, isCuda, useVllm, pipelineType) + log.Printf("Current working directory: %s", mustGetCwd()) + + // Load existing jobs from file + loadJobs() + + // Check statuses of running jobs from previous sessions + checkRunningJobs() + + // Initialize the model cache + initializeModelCache() + + // Create the logs directory if it doesn't exist + err := os.MkdirAll("logs", os.ModePerm) + if err != nil { + log.Fatalf("Failed to create logs directory: %v", err) + } + + // Setup HTTP routes + r := mux.NewRouter() + r.HandleFunc("/models", getModels).Methods("GET") + r.HandleFunc("/data", getData).Methods("GET") + r.HandleFunc("/data/generate", generateData).Methods("POST") + r.HandleFunc("/model/train", trainModel).Methods("POST") + r.HandleFunc("/jobs/{job_id}/status", getJobStatus).Methods("GET") + r.HandleFunc("/jobs/{job_id}/logs", getJobLogs).Methods("GET") + r.HandleFunc("/jobs", listJobs).Methods("GET") + r.HandleFunc("/pipeline/generate-train", generateTrainPipeline).Methods("POST") + r.HandleFunc("/model/serve-latest", serveLatestCheckpoint).Methods("POST") + r.HandleFunc("/model/serve-base", serveBaseModel).Methods("POST") + r.HandleFunc("/qna-eval", runQnaEval).Methods("POST") + r.HandleFunc("/checkpoints", listCheckpoints).Methods("GET") + r.HandleFunc("/vllm-containers", listVllmContainersHandler).Methods("GET") + r.HandleFunc("/vllm-unload", unloadVllmContainerHandler).Methods("POST") + r.HandleFunc("/vllm-status", getVllmStatusHandler).Methods("GET") + r.HandleFunc("/gpu-free", getGpuFreeHandler).Methods("GET") + + // Start the server with logging + log.Printf("Server starting on port 8080... (Taxonomy path: %s)", taxonomyPath) + if err := http.ListenAndServe("0.0.0.0:8080", r); err != nil { + log.Fatalf("Server failed to start: %v", err) + } +} + +// sanitizeModelName checks if the modelName starts with "model/" and replaces it with "models/". +func sanitizeModelName(modelName string) string { + if strings.HasPrefix(modelName, "model/") { + return strings.Replace(modelName, "model/", "models/", 1) + } + return modelName +} + +// mustGetCwd returns the current working directory or "unknown" if it fails. +func mustGetCwd() string { + cwd, err := os.Getwd() + if err != nil { + return "unknown" + } + return cwd +} + +// Load jobs from the jobs.json file +func loadJobs() { + jobsLock.Lock() + defer jobsLock.Unlock() + + if _, err := os.Stat(jobsFile); os.IsNotExist(err) { + // No jobs file exists + return + } + + data, err := ioutil.ReadFile(jobsFile) + if err != nil { + log.Printf("Error reading jobs file: %v", err) + return + } + + err = json.Unmarshal(data, &jobs) + if err != nil { + log.Printf("Error unmarshalling jobs data: %v", err) + return + } + + log.Printf("Loaded %d jobs from %s", len(jobs), jobsFile) +} + +// Save jobs to the jobs.json file +func saveJobs() { + jobsLock.Lock() + defer jobsLock.Unlock() + + data, err := json.MarshalIndent(jobs, "", " ") + if err != nil { + log.Printf("Error marshalling jobs data: %v", err) + return + } + + err = ioutil.WriteFile(jobsFile, data, 0644) + if err != nil { + log.Printf("Error writing jobs file: %v", err) + } +} + +// Check the status of running jobs after server restart +func checkRunningJobs() { + jobsLock.Lock() + changed := false + for _, job := range jobs { + if job.Status == "running" { + // Check if the process is still running + processRunning := isProcessRunning(job.PID) + if !processRunning { + job.Status = "failed" + changed = true + log.Printf("Job %s marked as failed (process not running)", job.JobID) + } + } + } + jobsLock.Unlock() + + if changed { + saveJobs() + } +} + +// Check if a process with the given PID is running +func isProcessRunning(pid int) bool { + process, err := os.FindProcess(pid) + if err != nil { + return false + } + err = process.Signal(syscall.Signal(0)) + return err == nil +} + +// getIlabCommand returns the ilab command based on the --rhelai flag +func getIlabCommand() string { + return ilabCmd +} + +// getBaseCacheDir returns the base cache directory path: ~/.cache/instructlab/ +func getBaseCacheDir() (string, error) { + homeDir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("failed to get user home directory: %v", err) + } + return filepath.Join(homeDir, ".cache", "instructlab"), nil +} + +// Helper function to get the latest dataset file +func getLatestDatasetFile() (string, error) { + homeDir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("failed to get user home directory: %v", err) + } + datasetDir := filepath.Join(homeDir, ".local", "share", "instructlab", "datasets") + files, err := ioutil.ReadDir(datasetDir) + if err != nil { + return "", fmt.Errorf("failed to read dataset directory: %v", err) + } + + var latestFile os.FileInfo + for _, file := range files { + if strings.HasPrefix(file.Name(), "knowledge_train_msgs_") && strings.HasSuffix(file.Name(), ".jsonl") { + if latestFile == nil || file.ModTime().After(latestFile.ModTime()) { + latestFile = file + } + } + } + + if latestFile == nil { + return "", fmt.Errorf("no dataset file found with the prefix 'knowledge_train_msgs_'") + } + return filepath.Join(datasetDir, latestFile.Name()), nil +} + +// Initialize the model cache on server startup and start periodic refresh +func initializeModelCache() { + // Initial cache refresh + refreshModelCache() + + // Start a goroutine to refresh the cache every 20 minutes + go func() { + for { + time.Sleep(20 * time.Minute) + refreshModelCache() + } + }() +} + +// Refresh the model cache if it's older than 20 minutes +// TODO: needs to be more realtime, ilab command delays on RHEL make it problematic so it needs to be cached +// until the delay is resolved in rhelai podman caching +func refreshModelCache() { + modelCache.Mutex.Lock() + defer modelCache.Mutex.Unlock() + + // Check if cache is valid + if time.Since(modelCache.Time) < 20*time.Minute && len(modelCache.Models) > 0 { + log.Println("Model cache is still valid; no refresh needed.") + return + } + + log.Println("Refreshing model cache...") + output, err := runIlabCommand("model", "list") + if err != nil { + log.Printf("Error refreshing model cache: %v", err) + return + } + + models, err := parseModelList(output) + if err != nil { + log.Printf("Error parsing model list during cache refresh: %v", err) + return + } + + modelCache.Models = models + modelCache.Time = time.Now() + log.Printf("Model cache refreshed at %v with %d models.", modelCache.Time, len(modelCache.Models)) +} + +// GetModels is the HTTP handler for the /models endpoint. +// It serves cached model data, refreshing the cache if necessary. +func getModels(w http.ResponseWriter, r *http.Request) { + log.Println("GET /models called") + + // Lock the cache for reading + modelCache.Mutex.Lock() + cachedTime := modelCache.Time + cachedModels := make([]Model, len(modelCache.Models)) + copy(cachedModels, modelCache.Models) + modelCache.Mutex.Unlock() + + // Check if cache is valid + if len(cachedModels) > 0 && time.Since(cachedTime) < 20*time.Minute { + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(cachedModels); err != nil { + log.Printf("Error encoding cached models: %v", err) + http.Error(w, "Failed to encode models", http.StatusInternalServerError) + return + } + log.Println("GET /models returned cached models.") + return + } + + // If cache is empty or stale, refresh the cache + log.Println("Cache is empty or stale. Refreshing model cache, blocking until complete ~15s...") + refreshModelCache() + + // After refresh, attempt to serve the cache + modelCache.Mutex.Lock() + cachedTime = modelCache.Time + cachedModels = make([]Model, len(modelCache.Models)) + copy(cachedModels, modelCache.Models) + modelCache.Mutex.Unlock() + + if len(cachedModels) > 0 { + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(cachedModels); err != nil { + log.Printf("Error encoding refreshed models: %v", err) + http.Error(w, "Failed to encode models", http.StatusInternalServerError) + return + } + log.Println("GET /models returned refreshed models.") + } else { + http.Error(w, "Failed to retrieve models", http.StatusInternalServerError) + log.Println("GET /models failed to retrieve models.") + } +} + +// runIlabCommand executes the ilab command with the provided arguments. +func runIlabCommand(args ...string) (string, error) { + cmdPath := getIlabCommand() + cmd := exec.Command(cmdPath, args...) + if !rhelai { + cmd.Dir = baseDir + } + out, err := cmd.CombinedOutput() + return string(out), err +} + +// parseModelList parses the output of the "ilab model list" command into a slice of Model structs. +func parseModelList(output string) ([]Model, error) { + var models []Model + lines := strings.Split(output, "\n") + for _, line := range lines { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "+") || strings.HasPrefix(line, "| Model Name") || line == "" { + continue + } + if strings.HasPrefix(line, "|") { + line = strings.Trim(line, "|") + fields := strings.Split(line, "|") + if len(fields) != 3 { + continue + } + model := Model{ + Name: strings.TrimSpace(fields[0]), + LastModified: strings.TrimSpace(fields[1]), + Size: strings.TrimSpace(fields[2]), + } + models = append(models, model) + } + } + return models, nil +} + +// getData is the HTTP handler for the /data endpoint. +func getData(w http.ResponseWriter, r *http.Request) { + log.Println("GET /data called") + output, err := runIlabCommand("data", "list") + if err != nil { + log.Printf("Error running 'ilab data list': %v", err) + http.Error(w, string(output), http.StatusInternalServerError) + return + } + dataList, err := parseDataList(output) + if err != nil { + log.Printf("Error parsing data list: %v", err) + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(dataList) + log.Println("GET /data successful") +} + +// parseDataList parses the output of the "ilab data list" command into a slice of Data structs. +func parseDataList(output string) ([]Data, error) { + var dataList []Data + lines := strings.Split(output, "\n") + for _, line := range lines { + line = strings.TrimSpace(line) + if strings.HasPrefix(line, "+") || strings.HasPrefix(line, "| Dataset") || line == "" { + continue + } + if strings.HasPrefix(line, "|") { + line = strings.Trim(line, "|") + fields := strings.Split(line, "|") + if len(fields) != 3 { + continue + } + data := Data{ + Dataset: strings.TrimSpace(fields[0]), + CreatedAt: strings.TrimSpace(fields[1]), + FileSize: strings.TrimSpace(fields[2]), + } + dataList = append(dataList, data) + } + } + return dataList, nil +} + +// generateData is the HTTP handler for the /data/generate endpoint. +func generateData(w http.ResponseWriter, r *http.Request) { + log.Println("POST /data/generate called") + jobID, err := startGenerateJob() + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"job_id": jobID}) + log.Printf("POST /data/generate successful, job_id: %s", jobID) +} + +// startGenerateJob starts the data generation job and returns the job ID. +func startGenerateJob() (string, error) { + ilabPath := getIlabCommand() + + //cmdArgs := []string{"data", "generate", "--pipeline", pipelineType} + // TODO: for now, focus on accelerated pipeline. + // Should GPUs be variable or just the default? + cmdArgs := []string{"data", "generate"} + + cmd := exec.Command(ilabPath, cmdArgs...) + + if !rhelai { + cmd.Dir = baseDir + } + + jobID := fmt.Sprintf("g-%d", time.Now().UnixNano()) + logFilePath := filepath.Join("logs", fmt.Sprintf("%s.log", jobID)) + log.Printf("Starting generateData job: %s, logs: %s", jobID, logFilePath) + logFile, err := os.Create(logFilePath) + if err != nil { + log.Printf("Error creating log file: %v", err) + return "", fmt.Errorf("Failed to create log file") + } + + cmd.Stdout = logFile + cmd.Stderr = logFile + + log.Printf("Running command: %s %v", ilabPath, cmdArgs) + if err := cmd.Start(); err != nil { + log.Printf("Error starting data generation command: %v", err) + logFile.Close() + return "", err + } + + job := &Job{ + JobID: jobID, + Cmd: ilabPath, + Args: cmdArgs, + Status: "running", + PID: cmd.Process.Pid, + LogFile: logFilePath, + StartTime: time.Now(), + } + + jobsLock.Lock() + jobs[jobID] = job + jobsLock.Unlock() + + saveJobs() + + go func() { + err := cmd.Wait() + logFile.Close() + + job.Lock.Lock() + defer job.Lock.Unlock() + + if err != nil { + job.Status = "failed" + log.Printf("Job %s failed with error: %v", job.JobID, err) + } else { + if cmd.ProcessState.Success() { + job.Status = "finished" + log.Printf("Job %s finished successfully", job.JobID) + } else { + job.Status = "failed" + log.Printf("Job %s failed", job.JobID) + } + } + + now := time.Now() + job.EndTime = &now + saveJobs() + }() + + return jobID, nil +} + +// trainModel is the HTTP handler for the /model/train endpoint. +func trainModel(w http.ResponseWriter, r *http.Request) { + log.Println("POST /model/train called") + + var reqBody struct { + ModelName string `json:"modelName"` + BranchName string `json:"branchName"` + Epochs *int `json:"epochs,omitempty"` // Optional + } + + // Parse the request body + if err := json.NewDecoder(r.Body).Decode(&reqBody); err != nil { + log.Printf("Error parsing request body: %v", err) + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + log.Printf("Received train request with modelName: '%s', branchName: '%s', epochs: '%v'", + reqBody.ModelName, reqBody.BranchName, reqBody.Epochs) + + // Ensure required fields are provided + if reqBody.ModelName == "" || reqBody.BranchName == "" { + log.Println("Missing required parameters: modelName or branchName") + http.Error(w, "Missing required parameters: modelName or branchName", http.StatusBadRequest) + return + } + + // If epochs is provided, ensure it's a positive integer + if reqBody.Epochs != nil && *reqBody.Epochs <= 0 { + log.Println("Invalid 'epochs' parameter: must be a positive integer") + http.Error(w, "'epochs' must be a positive integer", http.StatusBadRequest) + return + } + + // Sanitize the modelName (still important in some cases) + sanitizedModelName := sanitizeModelName(reqBody.ModelName) + log.Printf("Sanitized modelName: '%s'", sanitizedModelName) + + // Perform Git checkout + gitCheckoutCmd := exec.Command("git", "checkout", reqBody.BranchName) + gitCheckoutCmd.Dir = taxonomyPath + gitOutput, err := gitCheckoutCmd.CombinedOutput() + + log.Printf("Git checkout output: %s", string(gitOutput)) + + if err != nil { + log.Printf("Error checking out branch '%s': %v", reqBody.BranchName, err) + http.Error(w, fmt.Sprintf("Failed to checkout branch '%s': %s", reqBody.BranchName, string(gitOutput)), http.StatusInternalServerError) + return + } + + log.Printf("Successfully checked out branch: '%s'", reqBody.BranchName) + + // Start the training job, passing the sanitized model name, branch name, and epochs + jobID, err := startTrainJob(sanitizedModelName, reqBody.BranchName, reqBody.Epochs) + if err != nil { + log.Printf("Error starting train job: %v", err) + http.Error(w, "Failed to start train job", http.StatusInternalServerError) + return + } + + log.Printf("Train job started successfully with job_id: '%s'", jobID) + + // Return the job ID in the response + response := map[string]string{ + "job_id": jobID, + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Error encoding response: %v", err) + http.Error(w, "Failed to send response", http.StatusInternalServerError) + return + } + + log.Println("POST /model/train response sent successfully") +} + +// listVllmContainersHandler handles the GET /vllm-containers endpoint. +func listVllmContainersHandler(w http.ResponseWriter, r *http.Request) { + log.Println("GET /vllm-containers called") + + containers, err := ListVllmContainers() + if err != nil { + log.Printf("Error listing vllm containers: %v", err) + http.Error(w, "Failed to list vllm containers", http.StatusInternalServerError) + return + } + + response := VllmContainerResponse{ + Containers: containers, + } + + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Error encoding vllm containers response: %v", err) + http.Error(w, "Failed to encode response", http.StatusInternalServerError) + return + } + + log.Printf("GET /vllm-containers returned %d containers", len(containers)) +} + +// unloadVllmContainerHandler handles the POST /vllm-unload endpoint. +func unloadVllmContainerHandler(w http.ResponseWriter, r *http.Request) { + log.Println("POST /vllm-unload called") + + var req UnloadModelRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding unload model request: %v", err) + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Validate the model name + modelName := strings.TrimSpace(req.ModelName) + if modelName != "pre-train" && modelName != "post-train" { + log.Printf("Invalid model_name provided: %s", modelName) + http.Error(w, "Invalid model_name. Must be 'pre-train' or 'post-train'", http.StatusBadRequest) + return + } + + // Attempt to stop the vllm container + err := StopVllmContainer(modelName) + if err != nil { + log.Printf("Error unloading model '%s': %v", modelName, err) + http.Error(w, fmt.Sprintf("Failed to unload model '%s': %v", modelName, err), http.StatusInternalServerError) + return + } + + // Respond with success + response := map[string]string{ + "status": "success", + "message": fmt.Sprintf("Model '%s' unloaded successfully", modelName), + "modelName": modelName, + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) + + log.Printf("POST /vllm-unload successfully unloaded model '%s'", modelName) +} + +func getVllmStatusHandler(w http.ResponseWriter, r *http.Request) { + // e.g. GET /vllm-status?model_name=pre-train + modelName := r.URL.Query().Get("model_name") + if modelName != "pre-train" && modelName != "post-train" { + http.Error(w, "Invalid model_name (must be 'pre-train' or 'post-train')", http.StatusBadRequest) + return + } + + // 1) Check if container with that served-model-name is running + containers, err := ListVllmContainers() + if err != nil { + log.Printf("Error listing vllm containers: %v", err) + http.Error(w, "Failed to list vllm containers", http.StatusInternalServerError) + return + } + + var containerRunning bool + for _, c := range containers { + if c.ServedModelName == modelName { + containerRunning = true + break + } + } + + // If the container is not running => status = "stopped" (or respond however you want) + if !containerRunning { + // Optionally return JSON with { "status": "stopped" } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "stopped"}) + return + } + + // Container is indeed running. Let's find the job logs for that container: + jobsLock.Lock() + jobID, ok := servedModelJobIDs[modelName] + jobsLock.Unlock() + if !ok { + // If for some reason we don't have a job ID stored, we can do "loading" or "unknown" + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "loading"}) + return + } + + // Retrieve the job to find the log file + jobsLock.Lock() + job, exists := jobs[jobID] + jobsLock.Unlock() + if !exists { + // If the job no longer exists, handle it as you wish + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "loading"}) + return + } + + // Attempt to see if the log contains "Uvicorn running on" + if job.LogFile == "" { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "loading"}) + return + } + + logBytes, err := ioutil.ReadFile(job.LogFile) + if err != nil { + // If there's an error reading logs, treat it as "loading" + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "loading"}) + return + } + + logContent := string(logBytes) + if strings.Contains(logContent, "Uvicorn running on") { + // if found => "running" + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "running"}) + } else { + // not found => "loading" + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "loading"}) + } +} + +// getQpuFreeHandler checks how many GPUs are "free" based on nvidia-smi output. +func getGpuFreeHandler(w http.ResponseWriter, r *http.Request) { + log.Println("GET /gpu-free called") + + cmd := exec.Command("nvidia-smi", "--query-gpu=memory.used", "--format=csv,noheader") + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + + err := cmd.Run() + if err != nil { + log.Printf("Error running nvidia-smi: %v, stderr: %s", err, stderr.String()) + w.Header().Set("Content-Type", "application/json") + // Return zero for both free_gpus & total_gpus on error + json.NewEncoder(w).Encode(map[string]int{"free_gpus": 0, "total_gpus": 0}) + return + } + + lines := strings.Split(strings.TrimSpace(out.String()), "\n") + freeCount := 0 + + // Track total GPUs + totalCount := 0 + + for _, line := range lines { + line = strings.TrimSpace(line) + if line == "" { + continue + } + // Increment total GPUs for each non-empty line + totalCount++ + + // If it's "1 MiB" we consider that GPU free + if strings.HasPrefix(line, "1 ") { + freeCount++ + } + } + + // Return both free_gpus and total_gpus + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]int{ + "free_gpus": freeCount, + "total_gpus": totalCount, + }) + + log.Printf("GET /gpu-free => free_gpus=%d, total_gpus=%d", freeCount, totalCount) +} + +// startTrainJob starts a training job with the given parameters. +func startTrainJob(modelName, branchName string, epochs *int) (string, error) { + log.Printf("Starting training job for model: '%s', branch: '%s'", modelName, branchName) + + // Generate a unique job ID + jobID := fmt.Sprintf("t-%d", time.Now().UnixNano()) + logFilePath := filepath.Join("logs", fmt.Sprintf("%s.log", jobID)) + + // Get the full model path. This ensures ~/.cache/instructlab/models/... is always used. + fullModelPath, err := getFullModelPath(modelName) + if err != nil { + return "", fmt.Errorf("failed to get full model path: %v", err) + } + log.Printf("Resolved fullModelPath: '%s'", fullModelPath) + + // Ensure the model directory exists (e.g., ~/.cache/instructlab/models/...) + modelDir := filepath.Dir(fullModelPath) + if err := os.MkdirAll(modelDir, os.ModePerm); err != nil { + return "", fmt.Errorf("failed to create model directory '%s': %v", modelDir, err) + } + + ilabPath := getIlabCommand() + + // Initialize command arguments + cmdArgs := []string{ + "model", "train", + } + + // Conditionally add the --pipeline argument only if not rhelai and pipelineType is set + if !rhelai && pipelineType != "" { + cmdArgs = append(cmdArgs, "--pipeline", pipelineType) + } + + // Always include --model-path + cmdArgs = append(cmdArgs, fmt.Sprintf("--model-path=%s", fullModelPath)) + + // Append device flags based on configuration + if isOSX { + cmdArgs = append(cmdArgs, "--device=mps") + } + if isCuda { + cmdArgs = append(cmdArgs, "--device=cuda") + } + + // Conditionally add the --num-epochs flag if epochs is specified + if epochs != nil { + cmdArgs = append(cmdArgs, fmt.Sprintf("--num-epochs=%d", *epochs)) + log.Printf("Number of epochs specified: %d", *epochs) + } else { + log.Println("No epochs specified; using default number of epochs.") + } + + // ------------------------------------------------------------------------- + // SPECIAL LOGIC for pipelineType == "simple" (when not rhelai). + // 1) Copy the latest "knowledge_train_msgs_*.jsonl" => train_gen.jsonl + // 2) Copy the latest "test_ggml-model-*.jsonl" => test_gen.jsonl + // 3) Pass only the dataset directory to --data-path + // ------------------------------------------------------------------------- + if pipelineType == "simple" && !rhelai { + homeDir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("failed to get user home directory: %v", err) + } + datasetDir := filepath.Join(homeDir, ".local", "share", "instructlab", "datasets") + + // 1) Find the latest "knowledge_train_msgs_*.jsonl" + latestTrainFile, err := findLatestFileWithPrefix(datasetDir, "knowledge_train_msgs_") + if err != nil { + return "", fmt.Errorf("failed to find knowledge_train_msgs_*.jsonl file: %v", err) + } + // Copy it to train_gen.jsonl + trainGenPath := filepath.Join(datasetDir, "train_gen.jsonl") + if err := overwriteCopy(latestTrainFile, trainGenPath); err != nil { + return "", fmt.Errorf("failed to copy %s to %s: %v", latestTrainFile, trainGenPath, err) + } + + // 2) Find the latest "test_ggml-model-*.jsonl" + latestTestFile, err := findLatestFileWithPrefix(datasetDir, "test_ggml-model") + if err != nil { + return "", fmt.Errorf("failed to find test_ggml-model*.jsonl file: %v", err) + } + // Copy it to test_gen.jsonl + testGenPath := filepath.Join(datasetDir, "test_gen.jsonl") + if err := overwriteCopy(latestTestFile, testGenPath); err != nil { + return "", fmt.Errorf("failed to copy %s to %s: %v", latestTestFile, testGenPath, err) + } + + // Finally, pass only the dataset directory to --data-path + cmdArgs = []string{ + "model", "train", + "--pipeline", pipelineType, + fmt.Sprintf("--data-path=%s", datasetDir), + fmt.Sprintf("--model-path=%s", fullModelPath), + } + if isOSX { + cmdArgs = append(cmdArgs, "--device=mps") + } + if isCuda { + cmdArgs = append(cmdArgs, "--device=cuda") + } + + // Re-apply the epoch flag if epochs were specified + if epochs != nil { + cmdArgs = append(cmdArgs, fmt.Sprintf("--num-epochs=%d", *epochs)) + log.Printf("Number of epochs specified for simple pipeline: %d", *epochs) + } else { + log.Println("No epochs specified for simple pipeline; using default number of epochs.") + } + } + + // Handle rhelai-specific logic + if rhelai { + latestDataset, err := getLatestDatasetFile() + if err != nil { + return "", fmt.Errorf("failed to get latest dataset file: %v", err) + } + cmdArgs = []string{ + "model", "train", + fmt.Sprintf("--data-path=%s", latestDataset), + "--max-batch-len=5000", + "--gpus=4", + "--device=cuda", + "--save-samples=1000", + fmt.Sprintf("--model-path=%s", fullModelPath), + "--pipeline", pipelineType, // Include the pipelineType set in PreRunE + } + if epochs != nil { + cmdArgs = append(cmdArgs, fmt.Sprintf("--num-epochs=%d", *epochs)) + log.Printf("Number of epochs specified for rhelai pipeline: %d", *epochs) + } else { + log.Println("No epochs specified for rhelai pipeline; using default number of epochs.") + } + } + + log.Printf("[ILAB TRAIN COMMAND] %s %v", ilabPath, cmdArgs) + + // Create the exec.Command + cmd := exec.Command(ilabPath, cmdArgs...) + if !rhelai { + cmd.Dir = baseDir + } + + logFile, err := os.Create(logFilePath) + if err != nil { + return "", fmt.Errorf("failed to create log file '%s': %v", logFilePath, err) + } + defer logFile.Close() + + // Redirect command output to log file + cmd.Stdout = logFile + cmd.Stderr = logFile + + // Start the command + if err := cmd.Start(); err != nil { + return "", fmt.Errorf("error starting training command: %v", err) + } + log.Printf("Training process started with PID: %d", cmd.Process.Pid) + + // Save job details + job := &Job{ + JobID: jobID, + Cmd: ilabPath, + Args: cmdArgs, + Status: "running", + PID: cmd.Process.Pid, + LogFile: logFilePath, + Branch: branchName, + StartTime: time.Now(), + } + + jobsLock.Lock() + jobs[jobID] = job + jobsLock.Unlock() + saveJobs() + + // Wait for process completion in a goroutine + go func() { + err := cmd.Wait() + logFile.Close() + + job.Lock.Lock() + defer job.Lock.Unlock() + + if err != nil { + job.Status = "failed" + log.Printf("Training job '%s' failed: %v", job.JobID, err) + } else if cmd.ProcessState.Success() { + job.Status = "finished" + log.Printf("Training job '%s' finished successfully", job.JobID) + } else { + job.Status = "failed" + log.Printf("Training job '%s' failed (unknown reason)", job.JobID) + } + + now := time.Now() + job.EndTime = &now + saveJobs() + }() + + return jobID, nil +} + +// getJobStatus is the HTTP handler for the /jobs/{job_id}/status endpoint. +func getJobStatus(w http.ResponseWriter, r *http.Request) { + vars := mux.Vars(r) + jobID := vars["job_id"] + log.Printf("GET /jobs/%s/status called", jobID) + jobsLock.Lock() + job, exists := jobs[jobID] + jobsLock.Unlock() + if !exists { + log.Printf("Job %s not found", jobID) + http.Error(w, "Job not found", http.StatusNotFound) + return + } + job.Lock.Lock() + status := job.Status + job.Lock.Unlock() + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]interface{}{ + "job_id": job.JobID, + "status": job.Status, + "branch": job.Branch, + "command": job.Cmd, + }) + log.Printf("GET /jobs/%s/status successful, status: %s", jobID, status) +} + +// listJobs is the HTTP handler for the /jobs endpoint. +func listJobs(w http.ResponseWriter, r *http.Request) { + log.Println("GET /jobs called") + jobsLock.Lock() + defer jobsLock.Unlock() + var jobList []Job + for _, job := range jobs { + job.Lock.Lock() + jobCopy := *job + job.Lock.Unlock() + jobList = append(jobList, jobCopy) + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(jobList) +} + +// getJobLogs is the HTTP handler for the /jobs/{job_id}/logs endpoint. +func getJobLogs(w http.ResponseWriter, r *http.Request) { + vars := mux.Vars(r) + jobID := vars["job_id"] + log.Printf("GET /jobs/%s/logs called", jobID) + + jobsLock.Lock() + job, exists := jobs[jobID] + jobsLock.Unlock() + + if !exists { + log.Printf("Job %s not found", jobID) + http.Error(w, "Job not found", http.StatusNotFound) + return + } + + if _, err := os.Stat(job.LogFile); os.IsNotExist(err) { + log.Printf("Log file for job %s not found", jobID) + http.Error(w, "Log file not found", http.StatusNotFound) + return + } + + logContent, err := ioutil.ReadFile(job.LogFile) + if err != nil { + log.Printf("Error reading log file for job %s: %v", jobID, err) + http.Error(w, "Failed to read log file", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "text/plain") + w.Write(logContent) + log.Printf("GET /jobs/%s/logs successful", jobID) +} + +// generateTrainPipeline is the HTTP handler for the /pipeline/generate-train endpoint. +func generateTrainPipeline(w http.ResponseWriter, r *http.Request) { + log.Println("POST /pipeline/generate-train called") + var reqBody struct { + ModelName string `json:"modelName"` + BranchName string `json:"branchName"` + Epochs *int `json:"epochs,omitempty"` + } + if err := json.NewDecoder(r.Body).Decode(&reqBody); err != nil { + log.Printf("Error parsing request body: %v", err) + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Ensure required fields are provided + if reqBody.ModelName == "" || reqBody.BranchName == "" { + log.Println("Missing required parameters: modelName or branchName") + http.Error(w, "Missing required parameters: modelName or branchName", http.StatusBadRequest) + return + } + + // Sanitize the modelName + sanitizedModelName := sanitizeModelName(reqBody.ModelName) + log.Printf("Sanitized modelName for pipeline: '%s'", sanitizedModelName) + + // Create a unique pipeline job ID + pipelineJobID := fmt.Sprintf("p-%d", time.Now().UnixNano()) + log.Printf("Starting pipeline job with ID: %s", pipelineJobID) + + // Save the pipeline job as a placeholder + job := &Job{ + JobID: pipelineJobID, + Cmd: "pipeline-generate-train", + Args: []string{sanitizedModelName, reqBody.BranchName}, + Status: "running", + PID: 0, + LogFile: fmt.Sprintf("logs/%s.log", pipelineJobID), + Branch: reqBody.BranchName, + StartTime: time.Now(), + } + + jobsLock.Lock() + jobs[pipelineJobID] = job + jobsLock.Unlock() + + saveJobs() + + // Start the pipeline in a separate goroutine + go runPipelineJob(job, sanitizedModelName, reqBody.BranchName, reqBody.Epochs) + + // Respond immediately with the pipeline job ID + response := map[string]string{ + "pipeline_job_id": pipelineJobID, + } + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(response); err != nil { + log.Printf("Error encoding response: %v", err) + http.Error(w, "Failed to send response", http.StatusInternalServerError) + return + } + + log.Printf("POST /pipeline/generate-train response sent successfully with job_id: %s", pipelineJobID) +} + +// listCheckpoints is the HTTP handler for the /checkpoints endpoint. +// It lists all directories within the default checkpoints directory. +func listCheckpoints(w http.ResponseWriter, r *http.Request) { + log.Println("GET /checkpoints called") + + // Get the user's home directory + homeDir, err := os.UserHomeDir() + if err != nil { + log.Printf("Error getting user home directory: %v", err) + http.Error(w, "Failed to get user home directory", http.StatusInternalServerError) + return + } + + // Define the default checkpoints directory + checkpointsDir := filepath.Join(homeDir, ".local", "share", "instructlab", "checkpoints", "hf_format") + + // Check if the checkpoints directory exists + if _, err := os.Stat(checkpointsDir); os.IsNotExist(err) { + log.Printf("Checkpoints directory does not exist: %s", checkpointsDir) + http.Error(w, "Checkpoints directory does not exist", http.StatusNotFound) + return + } + + // Read the contents of the checkpoints directory + entries, err := ioutil.ReadDir(checkpointsDir) + if err != nil { + log.Printf("Error reading checkpoints directory: %v", err) + http.Error(w, "Failed to read checkpoints directory", http.StatusInternalServerError) + return + } + + // Filter out files, retaining only directories + var directories []string + for _, entry := range entries { + if entry.IsDir() { + directories = append(directories, entry.Name()) + } + } + + // Return the list of directories as JSON + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(directories); err != nil { + log.Printf("Error encoding directories to JSON: %v", err) + http.Error(w, "Failed to encode directories", http.StatusInternalServerError) + return + } + + log.Printf("GET /checkpoints successful, %d directories returned", len(directories)) +} + +// serveModel starts serving a model on the specified port. +func serveModel(modelPath, port string, w http.ResponseWriter) { + modelLock.Lock() + defer modelLock.Unlock() + + log.Printf("serveModel called with modelPath=%s, port=%s", modelPath, port) + + // Determine which model we are serving based on port + var targetProcess **exec.Cmd + if port == "8000" { + targetProcess = &modelProcessBase + } else if port == "8001" { + targetProcess = &modelProcessLatest + } else { + http.Error(w, "Invalid port specified", http.StatusBadRequest) + return + } + + // Check model file existence + if _, err := os.Stat(modelPath); os.IsNotExist(err) { + log.Printf("Model path does not exist: %s", modelPath) + http.Error(w, fmt.Sprintf("Model path does not exist: %s", modelPath), http.StatusNotFound) + return + } + log.Printf("Model file found at: %s", modelPath) + + // Kill only the process corresponding to this port + if *targetProcess != nil && (*targetProcess).Process != nil { + log.Printf("Stopping existing model process on port %s...", port) + if err := (*targetProcess).Process.Kill(); err != nil { + log.Printf("Failed to kill existing model process on port %s: %v", port, err) + http.Error(w, "Failed to stop existing model process", http.StatusInternalServerError) + return + } + *targetProcess = nil + } + + var cmdArgs []string + cmdArgs = []string{ + "serve", "model", + "--model", modelPath, + "--host", "0.0.0.0", + "--port", port, + } + + cmdPath := getIlabCommand() + cmd := exec.Command(cmdPath, cmdArgs...) + if !rhelai { + cmd.Dir = baseDir + } + + jobID := fmt.Sprintf("ml-%d", time.Now().UnixNano()) + logFilePath := filepath.Join("logs", fmt.Sprintf("%s.log", jobID)) + log.Printf("Model serve logs: %s", logFilePath) + logFile, err := os.Create(logFilePath) + if err != nil { + log.Printf("Error creating model run log file: %v", err) + http.Error(w, "Failed to create log file", http.StatusInternalServerError) + return + } + + cmd.Stdout = logFile + cmd.Stderr = logFile + + log.Println("Attempting to start model process...") + if err := cmd.Start(); err != nil { + log.Printf("Error starting model process: %v", err) + logFile.Close() + http.Error(w, "Failed to start model process", http.StatusInternalServerError) + return + } + + *targetProcess = cmd + log.Printf("Model process started with PID %d on port %s", cmd.Process.Pid, port) + + // Save job details + job := &Job{ + JobID: jobID, + Cmd: cmdPath, + Args: cmdArgs, + Status: "running", + PID: cmd.Process.Pid, + LogFile: logFilePath, + StartTime: time.Now(), + } + log.Printf("Model serve job details: %+v", job) + + jobsLock.Lock() + jobs[jobID] = job + jobsLock.Unlock() + saveJobs() + + // Monitor the model process + go func() { + log.Printf("Waiting for model process to finish (job_id: %s, port: %s)", jobID, port) + err := cmd.Wait() + logFile.Sync() + logFile.Close() + + job.Lock.Lock() + defer job.Lock.Unlock() + + if err != nil { + job.Status = "failed" + log.Printf("Model run job '%s' on port %s failed: %v", jobID, port, err) + } else if cmd.ProcessState.Success() { + job.Status = "finished" + log.Printf("Model run job '%s' on port %s finished successfully", jobID, port) + } else { + job.Status = "failed" + log.Printf("Model run job '%s' on port %s failed (unknown reason)", jobID, port) + } + + now := time.Now() + job.EndTime = &now + saveJobs() + + // If the process ends, clear the reference + modelLock.Lock() + defer modelLock.Unlock() + if port == "8000" { + if modelProcessBase == cmd { + modelProcessBase = nil + } + } else if port == "8001" { + if modelProcessLatest == cmd { + modelProcessLatest = nil + } + } + }() + + log.Printf("Model serve started successfully on port %s, returning job_id: %s", port, jobID) + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{"status": "model process started", "job_id": jobID}) +} + +// serveLatestCheckpoint serves the latest checkpoint model on port 8001. +func serveLatestCheckpoint(w http.ResponseWriter, r *http.Request) { + log.Println("POST /model/serve-latest called, loading the latest checkpoint") + + homeDir, err := os.UserHomeDir() + if err != nil { + log.Printf("Error getting user home directory: %v", err) + http.Error(w, "Failed to get home directory", http.StatusInternalServerError) + return + } + + if useVllm { + // Spawn podman container for latest checkpoint + latestModelPath := filepath.Join(homeDir, ".local", "share", "instructlab", "checkpoints", "hf_format") + log.Printf("Serving latest model using vllm at %s on port 8001", latestModelPath) + runVllmContainer( + fmt.Sprintf("%s/%s", latestModelPath, "samples_1192378"), + "8001", + "post-train", + 1, // GPU device index + "/var/home/cloud-user", + "/var/home/cloud-user", + w, + ) + } else { + // Default serving behavior + latestModelPath := filepath.Join(homeDir, ".local", "share", "instructlab", "checkpoints", "ggml-model-f16.gguf") + log.Printf("Serving latest model at %s on port 8001", latestModelPath) + serveModel(latestModelPath, "8001", w) + } +} + +// serveBaseModel serves the "base" model on port 8000. +func serveBaseModel(w http.ResponseWriter, r *http.Request) { + log.Println("POST /model/serve-base called") + + homeDir, err := os.UserHomeDir() + if err != nil { + log.Printf("Error getting user home directory: %v", err) + http.Error(w, "Failed to get home directory", http.StatusInternalServerError) + return + } + + if useVllm { + // Spawn podman container for base model + baseModelPath := filepath.Join(homeDir, ".cache", "instructlab", "models", "granite-8b-starter-v1") + log.Printf("Serving base model using vllm at %s on port 8000", baseModelPath) + runVllmContainer( + baseModelPath, + "8000", + "pre-train", + 0, // GPU device index + "/var/home/cloud-user", + "/var/home/cloud-user", + w, + ) + } else { + // Default serving behavior + baseModelPath := filepath.Join(homeDir, ".cache", "instructlab", "models", "granite-7b-lab-Q4_K_M.gguf") + log.Printf("Serving base model at %s on port 8000", baseModelPath) + serveModel(baseModelPath, "8000", w) + } +} + +// runVllmContainer spawns a podman container running vllm +func runVllmContainer(modelPath, port, servedModelName string, gpuIndex int, hostVolume, containerVolume string, w http.ResponseWriter) { + cmdArgs := []string{ + "run", "--rm", + fmt.Sprintf("--device=nvidia.com/gpu=%d", gpuIndex), + fmt.Sprintf("-e=NVIDIA_VISIBLE_DEVICES=%d", gpuIndex), + "-v", "/usr/local/cuda-12.4/lib64:/usr/local/cuda-12.4/lib64", + "-v", fmt.Sprintf("%s:%s", hostVolume, containerVolume), + "-p", fmt.Sprintf("%s:%s", port, port), + "--ipc=host", + "vllm/vllm-openai:latest", + "--host", "0.0.0.0", + "--port", port, + "--model", modelPath, + "--load-format", "safetensors", + "--config-format", "hf", + "--trust-remote-code", + "--device", "cuda", + "--served-model-name", servedModelName, + } + + fullCmd := fmt.Sprintf("podman %s", strings.Join(cmdArgs, " ")) + log.Printf("Executing Podman command: %s", fullCmd) + + jobID := fmt.Sprintf("v-%d", time.Now().UnixNano()) + logFilePath := filepath.Join("logs", fmt.Sprintf("%s.log", jobID)) + log.Printf("Starting vllm-openai container with job_id: %s, logs: %s", jobID, logFilePath) + + cmd := exec.Command("podman", cmdArgs...) + + // Redirect command output to log file + logFile, err := os.Create(logFilePath) + if err != nil { + log.Printf("Error creating log file for vllm job %s: %v", jobID, err) + http.Error(w, "Failed to create log file for vllm job", http.StatusInternalServerError) + return + } + cmd.Stdout = logFile + cmd.Stderr = logFile + + if err := cmd.Start(); err != nil { + log.Printf("Error starting podman container for vllm job %s: %v", jobID, err) + logFile.Close() + http.Error(w, "Failed to start vllm container", http.StatusInternalServerError) + return + } + + log.Printf("Vllm container started with PID %d for job_id: %s", cmd.Process.Pid, jobID) + + // Save job details + job := &Job{ + JobID: jobID, + Cmd: "podman", + Args: cmdArgs, + Status: "running", + PID: cmd.Process.Pid, + LogFile: logFilePath, + StartTime: time.Now(), + } + + jobsLock.Lock() + jobs[jobID] = job + jobsLock.Unlock() + saveJobs() + + servedModelJobIDs[servedModelName] = jobID + + go func() { + err := cmd.Wait() + logFile.Sync() + logFile.Close() + + job.Lock.Lock() + defer job.Lock.Unlock() + + if err != nil { + job.Status = "failed" + log.Printf("Vllm job '%s' failed: %v", job.JobID, err) + } else if cmd.ProcessState.Success() { + job.Status = "finished" + log.Printf("Vllm job '%s' finished successfully", job.JobID) + } else { + job.Status = "failed" + log.Printf("Vllm job '%s' failed (unknown reason)", job.JobID) + } + + now := time.Now() + job.EndTime = &now + saveJobs() + }() + + // Respond with the job ID + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "status": "vllm container started", + "job_id": jobID, + }) + log.Printf("POST /model/serve-%s response sent successfully with job_id: %s", servedModelName, jobID) +} + +// runPipelineJob executes the pipeline steps: Git checkout, data generation, and training. +func runPipelineJob(job *Job, modelName, branchName string, epochs *int) { + logFile, err := os.Create(job.LogFile) + if err != nil { + log.Printf("Error creating pipeline log file for job %s: %v", job.JobID, err) + jobsLock.Lock() + job.Status = "failed" + jobsLock.Unlock() + saveJobs() + return + } + defer logFile.Close() + + logger := log.New(logFile, "", log.LstdFlags) + + logger.Printf("Starting pipeline job: %s, model: %s, branch: %s, epochs: %v", job.JobID, modelName, branchName, epochs) + + // Perform Git checkout + gitCheckoutCmd := exec.Command("git", "checkout", branchName) + gitCheckoutCmd.Dir = taxonomyPath + gitOutput, gitErr := gitCheckoutCmd.CombinedOutput() + logger.Printf("Git checkout output: %s", string(gitOutput)) + if gitErr != nil { + logger.Printf("Failed to checkout branch '%s': %v", branchName, gitErr) + jobsLock.Lock() + job.Status = "failed" + jobsLock.Unlock() + saveJobs() + return + } + + // Start data generation step + logger.Println("Starting data generation step...") + genJobID, genErr := startGenerateJob() + if genErr != nil { + logger.Printf("Data generation step failed: %v", genErr) + jobsLock.Lock() + job.Status = "failed" + jobsLock.Unlock() + saveJobs() + return + } + logger.Printf("Data generation step started successfully with job_id: '%s'", genJobID) + + // Wait for data generation to finish + for { + time.Sleep(5 * time.Second) + jobsLock.Lock() + genJob, exists := jobs[genJobID] + jobsLock.Unlock() + + if !exists || genJob.Status == "failed" { + logger.Println("Data generation step failed.") + jobsLock.Lock() + job.Status = "failed" + jobsLock.Unlock() + saveJobs() + return + } + + if genJob.Status == "finished" { + logger.Println("Data generation step completed successfully.") + break + } + } + + // Start training step + logger.Println("Starting training step...") + trainJobID, trainErr := startTrainJob(modelName, branchName, epochs) + if trainErr != nil { + logger.Printf("Training step failed: %v", trainErr) + jobsLock.Lock() + job.Status = "failed" + jobsLock.Unlock() + saveJobs() + return + } + logger.Printf("Training step started successfully with job_id: '%s'", trainJobID) + + // Wait for training to finish + for { + time.Sleep(5 * time.Second) + jobsLock.Lock() + tJob, tExists := jobs[trainJobID] + jobsLock.Unlock() + + if !tExists || tJob.Status == "failed" { + logger.Println("Training step failed.") + jobsLock.Lock() + job.Status = "failed" + jobsLock.Unlock() + saveJobs() + return + } + + if tJob.Status == "finished" { + logger.Println("Training step completed successfully.") + break + } + } + + // Pipeline completed successfully + jobsLock.Lock() + job.Status = "finished" + jobsLock.Unlock() + saveJobs() + logger.Println("Pipeline job completed successfully.") +} + +func runQnaEval(w http.ResponseWriter, r *http.Request) { + log.Println("POST /qna-eval called") + + // Decode the JSON request body + var req QnaEvalRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + log.Printf("Error decoding request body: %v", err) + http.Error(w, "Invalid request body", http.StatusBadRequest) + return + } + + // Validate that model_path exists + if _, err := os.Stat(req.ModelPath); os.IsNotExist(err) { + log.Printf("Model path does not exist: %s", req.ModelPath) + http.Error(w, fmt.Sprintf("Model path does not exist: %s", req.ModelPath), http.StatusBadRequest) + return + } + + // Validate that yaml_file exists + if _, err := os.Stat(req.YamlFile); os.IsNotExist(err) { + log.Printf("YAML file does not exist: %s", req.YamlFile) + http.Error(w, fmt.Sprintf("YAML file does not exist: %s", req.YamlFile), http.StatusBadRequest) + return + } + + homeDir, err := os.UserHomeDir() + if err != nil { + log.Printf("Failed to get user's home directory: %v", err) + http.Error(w, "Internal server error", http.StatusInternalServerError) + return + } + + // Construct the Podman command + cmd := exec.Command("podman", "run", "--rm", + "--device", "nvidia.com/gpu=all", + "-v", fmt.Sprintf("%s:%s", homeDir, homeDir), + "quay.io/bsalisbu/qna-eval", + "--model_path", req.ModelPath, + "--yaml_file", req.YamlFile, + ) + + // Capture stdout and stderr + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + log.Printf("Executing Podman command: %v", cmd.Args) + + err = cmd.Run() + + if err != nil { + log.Printf("Podman command failed: %v", err) + log.Printf("Error Output: %s", stderr.String()) + + // Respond with error logs + response := map[string]string{ + "error": stderr.String(), + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusInternalServerError) + json.NewEncoder(w).Encode(response) + return + } + + // Command was successful, return the output + response := map[string]string{ + "result": stdout.String(), + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(response) + log.Println("POST /qna-eval completed successfully") +} diff --git a/api-server/podman.go b/api-server/podman.go new file mode 100644 index 00000000..3c7a61d5 --- /dev/null +++ b/api-server/podman.go @@ -0,0 +1,184 @@ +// podman.go + +package main + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "log" + "os/exec" + "strings" +) + +// VllmContainer are details of a vllm container. +type VllmContainer struct { + ContainerID string `json:"container_id"` + Image string `json:"image"` + Command string `json:"command"` + CreatedAt string `json:"created_at"` + Status string `json:"status"` + Ports string `json:"ports"` + Names string `json:"names"` + ServedModelName string `json:"served_model_name"` + ModelPath string `json:"model_path"` +} + +// ListVllmContainers retrieves all running vllm containers and extracts the --served-model-name and --model values. +func ListVllmContainers() ([]VllmContainer, error) { + + format := "{{.ID}}|{{.Image}}|{{.Command}}|{{.CreatedAt}}|{{.Status}}|{{.Ports}}|{{.Names}}" + + // Execute 'podman ps' with the specified format + cmd := exec.Command("podman", "ps", "--filter", "ancestor=vllm/vllm-openai:latest", "--format", format) + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + + err := cmd.Run() + if err != nil { + return nil, fmt.Errorf("error running podman ps: %v, stderr: %s", err, stderr.String()) + } + + lines := strings.Split(strings.TrimSpace(out.String()), "\n") + var containers []VllmContainer + + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue // Skip empty lines + } + + // Split the line into parts based on the delimiter '|' + parts := strings.Split(line, "|") + if len(parts) != 7 { + log.Printf("Skipping malformed podman ps line: %s", line) + continue + } + + containerID := strings.TrimSpace(parts[0]) + image := strings.TrimSpace(parts[1]) + command := strings.TrimSpace(parts[2]) + createdAt := strings.TrimSpace(parts[3]) + status := strings.TrimSpace(parts[4]) + ports := strings.TrimSpace(parts[5]) + names := strings.TrimSpace(parts[6]) + + // Inspect the container to get the full command and extract args + servedModelName, modelPath, err := ExtractVllmArgs(containerID) + if err != nil { + log.Printf("Error extracting vllm args for container %s: %v", containerID, err) + continue + } + + container := VllmContainer{ + ContainerID: containerID, + Image: image, + Command: command, + CreatedAt: createdAt, + Status: status, + Ports: ports, + Names: names, + ServedModelName: servedModelName, + ModelPath: modelPath, + } + + containers = append(containers, container) + } + + return containers, nil +} + +// ExtractVllmArgs inspects a container and extracts --served-model-name and --model values. +func ExtractVllmArgs(containerID string) (string, string, error) { + // Execute 'podman inspect' with a JSON format to get the full command + inspectCmd := exec.Command("podman", "inspect", "--format", "{{json .Config.Cmd}}", containerID) + var inspectOut bytes.Buffer + var inspectErr bytes.Buffer + inspectCmd.Stdout = &inspectOut + inspectCmd.Stderr = &inspectErr + + err := inspectCmd.Run() + if err != nil { + return "", "", fmt.Errorf("error inspecting container %s: %v, stderr: %s", containerID, err, inspectErr.String()) + } + + // The command is a JSON array, e.g., ["--host", "0.0.0.0", "--port", "8000", "--model", "/path/to/model", "--served-model-name", "pre-train"] + var cmdArgs []string + if err := json.Unmarshal(inspectOut.Bytes(), &cmdArgs); err != nil { + return "", "", fmt.Errorf("error unmarshalling command args for container %s: %v", containerID, err) + } + + servedModelName, modelPath, err := parseVllmArgs(cmdArgs) + if err != nil { + return "", "", fmt.Errorf("error parsing vllm args for container %s: %v", containerID, err) + } + + return servedModelName, modelPath, nil +} + +// parseVllmArgs parses the command-line arguments to extract --served-model-name and --model values. +func parseVllmArgs(args []string) (string, string, error) { + var servedModelName, modelPath string + + for i := 0; i < len(args); i++ { + switch args[i] { + case "--served-model-name": + if i+1 < len(args) { + servedModelName = args[i+1] + i++ + } else { + return "", "", errors.New("missing value for --served-model-name") + } + case "--model": + if i+1 < len(args) { + modelPath = args[i+1] + i++ + } else { + return "", "", errors.New("missing value for --model") + } + } + } + + if servedModelName == "" || modelPath == "" { + return "", "", errors.New("required arguments --served-model-name or --model not found") + } + + return servedModelName, modelPath, nil +} + +// StopVllmContainer stops a running vllm container based on the served model name. +func StopVllmContainer(servedModelName string) error { + containers, err := ListVllmContainers() + if err != nil { + return fmt.Errorf("failed to list vllm containers: %v", err) + } + + var targetContainer *VllmContainer + for _, container := range containers { + if container.ServedModelName == servedModelName { + targetContainer = &container + break + } + } + + if targetContainer == nil { + return fmt.Errorf("no vllm container found with served-model-name '%s'", servedModelName) + } + + // Execute 'podman stop ' + stopCmd := exec.Command("podman", "stop", targetContainer.ContainerID) + var stopOut bytes.Buffer + var stopErr bytes.Buffer + stopCmd.Stdout = &stopOut + stopCmd.Stderr = &stopErr + + err = stopCmd.Run() + if err != nil { + return fmt.Errorf("error stopping container %s: %v, stderr: %s", targetContainer.ContainerID, err, stopErr.String()) + } + + log.Printf("Successfully stopped vllm container '%s' with served-model-name '%s'", targetContainer.ContainerID, servedModelName) + return nil +} diff --git a/api-server/qna-eval/Containerfile b/api-server/qna-eval/Containerfile new file mode 100644 index 00000000..02a5fdca --- /dev/null +++ b/api-server/qna-eval/Containerfile @@ -0,0 +1,40 @@ +# Podman container tookit support https://docs.nvidia.com/ai-enterprise/deployment/rhel-with-kvm/latest/podman.html +FROM python:3.11-slim + +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Set the working directory inside the container +WORKDIR /app + +# Copy the requirements.txt to the container +COPY requirements.txt . + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the Python script to the container +COPY validate-qna.py . + +# Set the entrypoint to execute the Python script +ENTRYPOINT ["python", "validate-qna.py"] + +# Build the container image +# podman build -t qna-eval . + +# Run the container with the necessary arguments and volume mount +# podman run --rm \ +# --device nvidia.com/gpu=1 \ +# -v /var/home/cloud-user/:/var/home/cloud-user/ \ +# qna-eval \ +# --model_path "/var/home/cloud-user/.local/share/instructlab/checkpoints/samples_134632/" \ +# --yaml_file "/var/home/cloud-user/.local/share/instructlab/taxonomy/knowledge/history/foo/qna.yaml" + +# Or run from this quay repo +# podman run --rm \ +# --device nvidia.com/gpu=1 \ +# -v /var/home/cloud-user/:/var/home/cloud-user/ \ +# quay.io/bsalisbu/qna-eval \ +# --model_path "/var/home/cloud-user/.local/share/instructlab/checkpoints/samples_134632/" \ +# --yaml_file "/var/home/cloud-user/.local/share/instructlab/taxonomy/knowledge/history/foo/qna.yaml" diff --git a/api-server/qna-eval/qna-eval.py b/api-server/qna-eval/qna-eval.py new file mode 100644 index 00000000..825b594a --- /dev/null +++ b/api-server/qna-eval/qna-eval.py @@ -0,0 +1,101 @@ +import argparse +import yaml +from vllm import LLM, SamplingParams + +def extract_questions(yaml_file): + """ + Extracts all questions from the 'questions_and_answers' sections of the YAML file. + + Args: + yaml_file (str): Path to the qna.yaml file. + + Returns: + list: A list of questions extracted from the YAML file. + """ + with open(yaml_file, 'r') as f: + data = yaml.safe_load(f) + + questions = [] + # Navigate through the YAML structure to find all questions + seed_examples = data.get('seed_examples', []) + for example in seed_examples: + qna_list = example.get('questions_and_answers', []) + for qna in qna_list: + question = qna.get('question') + if question: + # Clean up the question if it starts with 'Q: ' or similar prefixes + if question.lower().startswith('q:'): + question = question[2:].strip() + questions.append(question) + return questions + +def query_model(llm, system_prompt, question): + """ + Constructs the prompt and queries the model to get the answer. + + Args: + llm (LLM): The language model instance. + system_prompt (str): The system prompt to set the model's context. + question (str): The question to query. + + Returns: + str: The answer generated by the model. + """ + prompt = f"<|system|>{system_prompt}<|user|>{question}<|assistant|>" + + sampling_params = SamplingParams( + max_tokens=200, + temperature=0, + ) + + response_generator = llm.generate(prompt, sampling_params) + answer = "" + + for response in response_generator: + # Debugging: Print the entire response object + #print("\n--- Debugging Response ---") + #print(response) + #print("--- End of Response ---\n") + + # Check if 'outputs' exist and have at least one CompletionOutput + if hasattr(response, 'outputs') and len(response.outputs) > 0: + completion = response.outputs[0] + if hasattr(completion, 'text'): + answer += completion.text.strip() + else: + print("Debug: 'text' attribute not found in CompletionOutput.") + else: + print("Debug: 'outputs' not found or empty in the response.") + + return answer + +def main(): + # Set up command-line argument parsing + parser = argparse.ArgumentParser(description='Query model with questions from a YAML file.') + parser.add_argument('--model_path', type=str, required=True, help='Path to the language model.') + parser.add_argument('--yaml_file', type=str, required=True, help='Path to the qna.yaml file.') + args = parser.parse_args() + + # Extract questions from the YAML file + questions = extract_questions(args.yaml_file) + + # Initialize the language model + llm = LLM( + model=args.model_path, + # dtype="bfloat16", # Adjust dtype as needed + ) + + # Define the system prompt + system_prompt = ( + "I am a Red Hat® Instruct Model, an AI language model developed by Red Hat and IBM Research " + "based on the granite-3.0-8b-base model. My primary role is to serve as a chat assistant." + ) + + # Iterate over each question, query the model, and print the Q&A + for idx, question in enumerate(questions, 1): + answer = query_model(llm, system_prompt, question) + print(f"Q{idx}: {question}") + print(f"A{idx}: {answer}\n") + +if __name__ == '__main__': + main() diff --git a/api-server/qna-eval/requirements.txt b/api-server/qna-eval/requirements.txt new file mode 100644 index 00000000..a6182302 --- /dev/null +++ b/api-server/qna-eval/requirements.txt @@ -0,0 +1,176 @@ +aiohappyeyeballs==2.4.4 +aiohttp==3.11.11 +aiohttp-cors==0.7.0 +aiosignal==1.3.2 +airportsdata==20241001 +annotated-types==0.7.0 +anyio==4.8.0 +astor==0.8.1 +asttokens==3.0.0 +attrs==24.3.0 +backcall==0.2.0 +beautifulsoup4==4.12.3 +blake3==1.0.1 +bleach==6.2.0 +cachetools==5.5.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +click==8.1.8 +cloudpickle==3.1.0 +colorful==0.5.6 +compressed-tensors==0.8.1 +decorator==5.1.1 +defusedxml==0.7.1 +depyf==0.18.0 +dill==0.3.9 +diskcache==5.6.3 +distlib==0.3.9 +distro==1.9.0 +docopt==0.6.2 +einops==0.8.0 +executing==2.1.0 +fastapi==0.115.6 +fastjsonschema==2.21.1 +filelock==3.16.1 +frozenlist==1.5.0 +fsspec==2024.12.0 +gguf==0.10.0 +google-api-core==2.24.0 +google-auth==2.37.0 +googleapis-common-protos==1.66.0 +grpcio==1.69.0 +h11==0.14.0 +httpcore==1.0.7 +httptools==0.6.4 +httpx==0.28.1 +huggingface-hub==0.27.1 +idna==3.10 +importlib_metadata==8.5.0 +iniconfig==2.0.0 +interegular==0.3.3 +ipython==8.12.3 +jedi==0.19.2 +Jinja2==3.1.5 +jiter==0.8.2 +jsonschema==4.23.0 +jsonschema-specifications==2024.10.1 +jupyter_client==8.6.3 +jupyter_core==5.7.2 +jupyterlab_pygments==0.3.0 +lark==1.2.2 +linkify-it-py==2.0.3 +lm-format-enforcer==0.10.9 +markdown-it-py==3.0.0 +MarkupSafe==3.0.2 +matplotlib-inline==0.1.7 +mdit-py-plugins==0.4.2 +mdurl==0.1.2 +memray==1.15.0 +mistral_common==1.5.1 +mistune==3.1.0 +mpmath==1.3.0 +msgpack==1.1.0 +msgspec==0.19.0 +multidict==6.1.0 +nbclient==0.10.2 +nbconvert==7.16.5 +nbformat==5.10.4 +nest-asyncio==1.6.0 +networkx==3.4.2 +numpy==1.26.4 +nvidia-cublas-cu12==12.4.5.8 +nvidia-cuda-cupti-cu12==12.4.127 +nvidia-cuda-nvrtc-cu12==12.4.127 +nvidia-cuda-runtime-cu12==12.4.127 +nvidia-cudnn-cu12==9.1.0.70 +nvidia-cufft-cu12==11.2.1.3 +nvidia-curand-cu12==10.3.5.147 +nvidia-cusolver-cu12==11.6.1.9 +nvidia-cusparse-cu12==12.3.1.170 +nvidia-ml-py==12.560.30 +nvidia-nccl-cu12==2.21.5 +nvidia-nvjitlink-cu12==12.4.127 +nvidia-nvtx-cu12==12.4.127 +openai==1.59.5 +opencensus==0.11.4 +opencensus-context==0.1.3 +opencv-python-headless==4.10.0.84 +outlines==0.1.11 +outlines_core==0.1.26 +packaging==24.2 +pandocfilters==1.5.1 +parso==0.8.4 +partial-json-parser==0.2.1.1.post5 +pexpect==4.9.0 +pickleshare==0.7.5 +pillow==10.4.0 +pipreqs==0.5.0 +platformdirs==4.3.6 +pluggy==1.5.0 +prometheus-fastapi-instrumentator==7.0.0 +prometheus_client==0.21.1 +prompt_toolkit==3.0.48 +propcache==0.2.1 +proto-plus==1.25.0 +protobuf==5.29.3 +psutil==6.1.1 +ptyprocess==0.7.0 +pure_eval==0.2.3 +py-cpuinfo==9.0.0 +py-spy==0.4.0 +pyasn1==0.6.1 +pyasn1_modules==0.4.1 +pybind11==2.13.6 +pycountry==24.6.1 +pydantic==2.10.4 +pydantic_core==2.27.2 +Pygments==2.19.1 +pytest==8.3.4 +python-dateutil==2.9.0.post0 +python-dotenv==1.0.1 +PyYAML==6.0.2 +pyzmq==26.2.0 +ray==2.40.0 +referencing==0.35.1 +regex==2024.11.6 +requests==2.32.3 +rich==13.9.4 +rpds-py==0.22.3 +rsa==4.9 +safetensors==0.5.2 +sentencepiece==0.2.0 +six==1.17.0 +smart-open==7.1.0 +sniffio==1.3.1 +soupsieve==2.6 +stack-data==0.6.3 +starlette==0.41.3 +sympy==1.13.1 +textual==1.0.0 +tiktoken==0.7.0 +tinycss2==1.4.0 +tokenizers==0.21.0 +torch==2.5.1 +torchvision==0.20.1 +tornado==6.4.2 +tqdm==4.67.1 +traitlets==5.14.3 +transformers==4.47.1 +triton==3.1.0 +typing_extensions==4.12.2 +uc-micro-py==1.0.3 +urllib3==2.3.0 +uvicorn==0.34.0 +uvloop==0.21.0 +virtualenv==20.28.1 +vllm==0.6.6.post1 +watchfiles==1.0.3 +wcwidth==0.2.13 +webencodings==0.5.1 +websockets==14.1 +wrapt==1.17.0 +xformers==0.0.28.post3 +xgrammar==0.1.9 +yarg==0.1.9 +yarl==1.18.3 +zipp==3.21.0 diff --git a/api-server/utils.go b/api-server/utils.go new file mode 100644 index 00000000..f9327831 --- /dev/null +++ b/api-server/utils.go @@ -0,0 +1,78 @@ +package main + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" +) + +// findLatestFileWithPrefix scans `dir` for all files whose name starts with `prefix`, +// and returns the path of the latest modified file. Returns an error if none is found. +func findLatestFileWithPrefix(dir, prefix string) (string, error) { + files, err := ioutil.ReadDir(dir) + if err != nil { + return "", fmt.Errorf("failed to read directory '%s': %v", dir, err) + } + + var latestFile os.FileInfo + for _, f := range files { + if strings.HasPrefix(f.Name(), prefix) && strings.HasSuffix(f.Name(), ".jsonl") { + if latestFile == nil || f.ModTime().After(latestFile.ModTime()) { + latestFile = f + } + } + } + if latestFile == nil { + return "", fmt.Errorf("no file found matching prefix '%s' in '%s'", prefix, dir) + } + return filepath.Join(dir, latestFile.Name()), nil +} + +// overwriteCopy removes `destPath` if it exists, then copies srcPath -> destPath. +func overwriteCopy(srcPath, destPath string) error { + // If the destination file already exists, remove it + if _, err := os.Stat(destPath); err == nil { + if err := os.Remove(destPath); err != nil { + return fmt.Errorf("could not remove existing file '%s': %v", destPath, err) + } + } + + // Open the source + in, err := os.Open(srcPath) + if err != nil { + return fmt.Errorf("could not open source file '%s': %v", srcPath, err) + } + defer in.Close() + + // Create the destination + out, err := os.Create(destPath) + if err != nil { + return fmt.Errorf("could not create dest file '%s': %v", destPath, err) + } + defer out.Close() + + // Copy contents + if _, err := io.Copy(out, in); err != nil { + return fmt.Errorf("failed to copy '%s' to '%s': %v", srcPath, destPath, err) + } + + return nil +} + +// getFullModelPath converts a user-supplied model name into a fully qualified path: +// +// ~/.cache/instructlab/models/ +func getFullModelPath(modelName string) (string, error) { + baseCacheDir, err := getBaseCacheDir() + if err != nil { + return "", err + } + // If user-supplied name already starts with "models/", don't prepend again + if strings.HasPrefix(modelName, "models/") { + return filepath.Join(baseCacheDir, modelName), nil + } + return filepath.Join(baseCacheDir, "models", modelName), nil +} diff --git a/package-lock.json b/package-lock.json index 97830d4f..20620027 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,11 +11,15 @@ "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.7.1", "@next/env": "^15.0.3", + "@patternfly/chatbot": "^2.1.0-prerelease.17", "@patternfly/react-core": "^6.0.0", "@patternfly/react-icons": "^6.0.0", "@patternfly/react-styles": "^6.0.0", "@patternfly/react-table": "^6.0.0", + "@patternfly/virtual-assistant": "^2.0.2", "axios": "^1.7.9", + "date-fns": "^4.1.0", + "dompurify": "^3.2.2", "fs": "^0.0.1-security", "isomorphic-git": "^1.27.2", "js-yaml": "^4.1.0", @@ -343,15 +347,19 @@ "kuler": "^2.0.0" } }, - "node_modules/@emnapi/runtime": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz", - "integrity": "sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==", - "optional": true, + "node_modules/@emotion/is-prop-valid": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.7.3.tgz", + "integrity": "sha512-uxJqm/sqwXw3YPA5GXX365OBcJGFtxUVkB6WyezqFHlNe9jqUWH5ur2O2M8dGBz61kn1g3ZBlzUunFQXQIClhA==", "dependencies": { - "tslib": "^2.4.0" + "@emotion/memoize": "0.7.1" } }, + "node_modules/@emotion/memoize": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.1.tgz", + "integrity": "sha512-Qv4LTqO11jepd5Qmlp3M1YEjBumoTHcHFdgPTQ+sFlIL5myi/7xu/POwP7IRu6odBdmLXdtIs1D6TuW6kbwbbg==" + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", @@ -469,7 +477,6 @@ "version": "6.7.1", "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.7.1.tgz", "integrity": "sha512-gbDz3TwRrIPT3i0cDfujhshnXO9z03IT1UKRIVi/VEjpNHtSBIP2o5XSm+e816FzzCFEzAxPw09Z13n20PaQJQ==", - "license": "MIT", "engines": { "node": ">=6" } @@ -490,7 +497,6 @@ "resolved": "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-6.7.1.tgz", "integrity": "sha512-BTKc0b0mgjWZ2UDKVgmwaE0qt0cZs6ITcDgjrti5f/ki7aF5zs+N91V6hitGo3TItCFtnKg6cUVGdTmBFICFRg==", "dev": true, - "license": "(CC-BY-4.0 AND MIT)", "dependencies": { "@fortawesome/fontawesome-common-types": "6.7.1" }, @@ -594,27 +600,6 @@ "@img/sharp-libvips-darwin-arm64": "1.0.4" } }, - "node_modules/@img/sharp-darwin-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", - "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-darwin-x64": "1.0.4" - } - }, "node_modules/@img/sharp-libvips-darwin-arm64": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", @@ -630,291 +615,6 @@ "url": "https://opencollective.com/libvips" } }, - "node_modules/@img/sharp-libvips-darwin-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", - "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-arm": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", - "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", - "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-s390x": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", - "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", - "cpu": [ - "s390x" - ], - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linux-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", - "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linuxmusl-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", - "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-libvips-linuxmusl-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", - "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-linux-arm": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", - "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", - "cpu": [ - "arm" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-arm": "1.0.5" - } - }, - "node_modules/@img/sharp-linux-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", - "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-arm64": "1.0.4" - } - }, - "node_modules/@img/sharp-linux-s390x": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", - "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", - "cpu": [ - "s390x" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-s390x": "1.0.4" - } - }, - "node_modules/@img/sharp-linux-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", - "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linux-x64": "1.0.4" - } - }, - "node_modules/@img/sharp-linuxmusl-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", - "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" - } - }, - "node_modules/@img/sharp-linuxmusl-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", - "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - }, - "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-x64": "1.0.4" - } - }, - "node_modules/@img/sharp-wasm32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", - "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", - "cpu": [ - "wasm32" - ], - "optional": true, - "dependencies": { - "@emnapi/runtime": "^1.2.0" - }, - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-win32-ia32": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", - "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", - "cpu": [ - "ia32" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - } - }, - "node_modules/@img/sharp-win32-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", - "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": "^18.17.0 || ^20.3.0 || >=21.0.0" - }, - "funding": { - "url": "https://opencollective.com/libvips" - } - }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -1015,6 +715,30 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@monaco-editor/loader": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.4.0.tgz", + "integrity": "sha512-00ioBig0x642hytVspPl7DbQyaSWRaolYie/UFNjoTdvoKPzo6xrXLhTk9ixgIKcLH5b5vDOjVNiGyY+uDCUlg==", + "dependencies": { + "state-local": "^1.0.6" + }, + "peerDependencies": { + "monaco-editor": ">= 0.21.0 < 1" + } + }, + "node_modules/@monaco-editor/react": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.6.0.tgz", + "integrity": "sha512-RFkU9/i7cN2bsq/iTkurMWOEErmYcY6JiQI3Jn+WeR/FGISH8JbHERjpS9oRuSOPvDMJI0Z8nJeKkbOs9sBYQw==", + "dependencies": { + "@monaco-editor/loader": "^1.4.0" + }, + "peerDependencies": { + "monaco-editor": ">= 0.25.0 < 1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/@next/env": { "version": "15.0.3", "resolved": "https://registry.npmjs.org/@next/env/-/env-15.0.3.tgz", @@ -1045,111 +769,6 @@ "node": ">= 10" } }, - "node_modules/@next/swc-darwin-x64": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.0.3.tgz", - "integrity": "sha512-Zxl/TwyXVZPCFSf0u2BNj5sE0F2uR6iSKxWpq4Wlk/Sv9Ob6YCKByQTkV2y6BCic+fkabp9190hyrDdPA/dNrw==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-gnu": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.0.3.tgz", - "integrity": "sha512-T5+gg2EwpsY3OoaLxUIofmMb7ohAUlcNZW0fPQ6YAutaWJaxt1Z1h+8zdl4FRIOr5ABAAhXtBcpkZNwUcKI2fw==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-musl": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.0.3.tgz", - "integrity": "sha512-WkAk6R60mwDjH4lG/JBpb2xHl2/0Vj0ZRu1TIzWuOYfQ9tt9NFsIinI1Epma77JVgy81F32X/AeD+B2cBu/YQA==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-gnu": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.0.3.tgz", - "integrity": "sha512-gWL/Cta1aPVqIGgDb6nxkqy06DkwJ9gAnKORdHWX1QBbSZZB+biFYPFti8aKIQL7otCE1pjyPaXpFzGeG2OS2w==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-musl": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.0.3.tgz", - "integrity": "sha512-QQEMwFd8r7C0GxQS62Zcdy6GKx999I/rTO2ubdXEe+MlZk9ZiinsrjwoiBL5/57tfyjikgh6GOU2WRQVUej3UA==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-arm64-msvc": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.0.3.tgz", - "integrity": "sha512-9TEp47AAd/ms9fPNgtgnT7F3M1Hf7koIYYWCMQ9neOwjbVWJsHZxrFbI3iEDJ8rf1TDGpmHbKxXf2IFpAvheIQ==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-x64-msvc": { - "version": "15.0.3", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.0.3.tgz", - "integrity": "sha512-VNAz+HN4OGgvZs6MOoVfnn41kBzT+M+tB+OK4cww6DNyWS6wKaDpaAm/qLeOUbnMh0oVx1+mg0uoYARF69dJyA==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, "node_modules/@nicolo-ribaudo/eslint-scope-5-internals": { "version": "5.1.1-v1", "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz", @@ -1207,6 +826,114 @@ "url": "https://github.com/sponsors/panva" } }, + "node_modules/@parcel/watcher": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.0.tgz", + "integrity": "sha512-i0GV1yJnm2n3Yq1qw6QrUrd/LI9bE8WEBOTtOkpCXHHdyN3TAGgqAK/DAT05z4fq2x04cARXt2pDmjWjL92iTQ==", + "hasInstallScript": true, + "optional": true, + "peer": true, + "dependencies": { + "detect-libc": "^1.0.3", + "is-glob": "^4.0.3", + "micromatch": "^4.0.5", + "node-addon-api": "^7.0.0" + }, + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "@parcel/watcher-android-arm64": "2.5.0", + "@parcel/watcher-darwin-arm64": "2.5.0", + "@parcel/watcher-darwin-x64": "2.5.0", + "@parcel/watcher-freebsd-x64": "2.5.0", + "@parcel/watcher-linux-arm-glibc": "2.5.0", + "@parcel/watcher-linux-arm-musl": "2.5.0", + "@parcel/watcher-linux-arm64-glibc": "2.5.0", + "@parcel/watcher-linux-arm64-musl": "2.5.0", + "@parcel/watcher-linux-x64-glibc": "2.5.0", + "@parcel/watcher-linux-x64-musl": "2.5.0", + "@parcel/watcher-win32-arm64": "2.5.0", + "@parcel/watcher-win32-ia32": "2.5.0", + "@parcel/watcher-win32-x64": "2.5.0" + } + }, + "node_modules/@parcel/watcher-darwin-arm64": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.0.tgz", + "integrity": "sha512-hyZ3TANnzGfLpRA2s/4U1kbw2ZI4qGxaRJbBH2DCSREFfubMswheh8TeiC1sGZ3z2jUf3s37P0BBlrD3sjVTUw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "peer": true, + "engines": { + "node": ">= 10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/@parcel/watcher/node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", + "optional": true, + "peer": true, + "bin": { + "detect-libc": "bin/detect-libc.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/@patternfly/chatbot": { + "version": "2.1.0-prerelease.17", + "resolved": "https://registry.npmjs.org/@patternfly/chatbot/-/chatbot-2.1.0-prerelease.17.tgz", + "integrity": "sha512-fped4uypC7pci4jUgjguSo04v6h4YTow7x3Z9rU5+AjHfvE98vf7H7YAskd8wrhZOvjITHxhNiFGIZvQm4MViw==", + "dependencies": { + "@patternfly/react-code-editor": "^6.0.0", + "@patternfly/react-core": "^6.0.0", + "@patternfly/react-icons": "^6.0.0", + "clsx": "^2.1.0", + "framer-motion": "^11.3.28", + "path-browserify": "^1.0.1", + "react-jss": "^10.10.0", + "react-markdown": "^9.0.1", + "react-syntax-highlighter": "^15.5.0", + "react-textarea-auto-witdth-height": "^1.0.3", + "remark-gfm": "^4.0.0" + }, + "peerDependencies": { + "react": "^17 || ^18", + "react-dom": "^17 || ^18" + } + }, + "node_modules/@patternfly/react-code-editor": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@patternfly/react-code-editor/-/react-code-editor-6.0.0.tgz", + "integrity": "sha512-TnI/NNkizzWTzdVZWmpyEPKXgsOoUeklk8Xlgtl7II/+5juLjlt0wXTMhL35F59Rzd0YohGs251zXAwJbn6vIQ==", + "dependencies": { + "@monaco-editor/react": "^4.6.0", + "@patternfly/react-core": "^6.0.0", + "@patternfly/react-icons": "^6.0.0", + "@patternfly/react-styles": "^6.0.0", + "react-dropzone": "14.2.3", + "tslib": "^2.7.0" + }, + "peerDependencies": { + "react": "^17 || ^18", + "react-dom": "^17 || ^18" + } + }, "node_modules/@patternfly/react-core": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-6.0.0.tgz", @@ -1260,6 +987,28 @@ "resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-6.0.0.tgz", "integrity": "sha512-xd0ynDkiIW2rp8jz4TNvR4Dyaw9kSMkZdsuYcLlFXCVmvX//Mnl4rhBnid/2j2TaqK0NbkyTTPnPY/BU7SfLVQ==" }, + "node_modules/@patternfly/virtual-assistant": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@patternfly/virtual-assistant/-/virtual-assistant-2.0.2.tgz", + "integrity": "sha512-2qVQg1cU3Wv+mkFklAYQWyXWICbRlL/vgllSORGmBL0VIYM+Q3ICD3z/CkQrt0K1qdEqXp60dCnu1BKODrptxQ==", + "dependencies": { + "@patternfly/react-code-editor": "^6.0.0", + "@patternfly/react-core": "^6.0.0", + "@patternfly/react-icons": "^6.0.0", + "clsx": "^2.1.0", + "framer-motion": "^11.3.28", + "path-browserify": "^1.0.1", + "react-jss": "^10.10.0", + "react-markdown": "^9.0.1", + "react-syntax-highlighter": "^15.5.0", + "react-textarea-auto-witdth-height": "^1.0.3", + "remark-gfm": "^4.0.0" + }, + "peerDependencies": { + "react": "^17 || ^18", + "react-dom": "^17 || ^18" + } + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", @@ -1376,6 +1125,35 @@ "tslib": "^2.4.0" } }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dependencies": { + "@types/unist": "*" + } + }, "node_modules/@types/hoist-non-react-statics": { "version": "3.3.5", "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.5.tgz", @@ -1400,6 +1178,19 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/ms": { + "version": "0.7.34", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", + "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" + }, "node_modules/@types/node": { "version": "22.5.2", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.2.tgz", @@ -1413,14 +1204,12 @@ "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", - "dev": true, "license": "MIT" }, "node_modules/@types/react": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.1.tgz", "integrity": "sha512-V0kuGBX3+prX+DQ/7r2qsv1NsdfnCLnTgnRJ1pYnxykBhGMz+qj+box5lq7XsO5mtZsBqpjwwTu/7wszPfMBcw==", - "dev": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -1452,6 +1241,17 @@ "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz", "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==" }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "optional": true + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==" + }, "node_modules/@types/uuid": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-10.0.0.tgz", @@ -1681,7 +1481,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", - "dev": true, "license": "ISC" }, "node_modules/acorn": { @@ -2010,6 +1809,15 @@ "deep-equal": "^2.0.5" } }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -2031,7 +1839,7 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "fill-range": "^7.1.1" @@ -2133,6 +1941,67 @@ } ] }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chokidar": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.1.tgz", + "integrity": "sha512-n8enUVCED/KVRQlab1hr3MVpcVMvxtZjmEa956u+4YijlmQED223XMSYj2tLuKvr4jcCTzNNMpQDUer72MMmzA==", + "optional": true, + "peer": true, + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/clean-git-ref": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", @@ -2143,6 +2012,14 @@ "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "engines": { + "node": ">=6" + } + }, "node_modules/color": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", @@ -2200,6 +2077,15 @@ "node": ">= 0.8" } }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -2248,12 +2134,30 @@ "node": ">= 8" } }, - "node_modules/csstype": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "dev": true, - "license": "MIT" + "node_modules/css-jss": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/css-jss/-/css-jss-10.10.0.tgz", + "integrity": "sha512-YyMIS/LsSKEGXEaVJdjonWe18p4vXLo8CMA4FrW/kcaEyqdIGKCFXao31gbJddXEdIxSXFFURWrenBJPlKTgAA==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "^10.10.0", + "jss-preset-default": "^10.10.0" + } + }, + "node_modules/css-vendor": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/css-vendor/-/css-vendor-2.0.8.tgz", + "integrity": "sha512-x9Aq0XTInxrkuFeHKbYC7zWY8ai7qJ04Kxd9MnvbC1uO5DagxoHQjm4JvG+vCdXOoFtCjbL2XSZfxmoYa9uQVQ==", + "dependencies": { + "@babel/runtime": "^7.8.3", + "is-in-browser": "^1.0.2" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "license": "MIT" }, "node_modules/damerau-levenshtein": { "version": "1.0.8", @@ -2325,11 +2229,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/date-fns": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-4.1.0.tgz", + "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" + } + }, "node_modules/debug": { "version": "4.3.5", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", - "dev": true, "license": "MIT", "dependencies": { "ms": "2.1.2" @@ -2343,6 +2255,18 @@ } } }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -2442,6 +2366,14 @@ "node": ">=0.4.0" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "engines": { + "node": ">=6" + } + }, "node_modules/detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", @@ -2450,6 +2382,18 @@ "node": ">=8" } }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/diff3": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/diff3/-/diff3-0.0.3.tgz", @@ -2481,6 +2425,14 @@ "node": ">=6.0.0" } }, + "node_modules/dompurify": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.2.tgz", + "integrity": "sha512-YMM+erhdZ2nkZ4fTNRTSI94mb7VG7uVF5vj5Zde7tImgnhZE3R6YW/IACGIHb2ux+QkEXMhe591N+5jWOmL4Zw==", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -2719,6 +2671,17 @@ "node": ">=6" } }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint": { "version": "8.57.0", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", @@ -3577,6 +3540,15 @@ "node": ">=4.0" } }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -3587,6 +3559,11 @@ "node": ">=0.10.0" } }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -3655,6 +3632,18 @@ "reusify": "^1.0.4" } }, + "node_modules/fault": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", + "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", + "dependencies": { + "format": "^0.2.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/fecha": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz", @@ -3712,7 +3701,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" @@ -3835,6 +3824,14 @@ "node": ">= 6" } }, + "node_modules/format": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", + "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", + "engines": { + "node": ">=0.4.x" + } + }, "node_modules/formdata-polyfill": { "version": "4.0.10", "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", @@ -3847,6 +3844,32 @@ "node": ">=12.20.0" } }, + "node_modules/framer-motion": { + "version": "11.13.1", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.13.1.tgz", + "integrity": "sha512-F40tpGTHByhn9h3zdBQPcEro+pSLtzARcocbNqAyfBI+u9S+KZuHH/7O9+z+GEkoF3eqFxfvVw0eBDytohwqmQ==", + "dependencies": { + "motion-dom": "^11.13.0", + "motion-utils": "^11.13.0", + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0", + "react-dom": "^18.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, "node_modules/fs": { "version": "0.0.1-security", "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", @@ -4162,16 +4185,148 @@ "node": ">= 0.4" } }, + "node_modules/hast-util-parse-selector": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", + "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.2.tgz", + "integrity": "sha512-1ngXYb+V9UT5h+PxNRa1O1FYguZK/XL+gkeqvp7EdHlB9oHUG0eYRo/vY5inBdcqo3RkPMC58/H94HvkbfGdyg==", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-object": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hastscript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", + "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", + "dependencies": { + "@types/hast": "^2.0.0", + "comma-separated-tokens": "^1.0.0", + "hast-util-parse-selector": "^2.0.0", + "property-information": "^5.0.0", + "space-separated-tokens": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hastscript/node_modules/@types/hast": { + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz", + "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/hastscript/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==" + }, + "node_modules/hastscript/node_modules/comma-separated-tokens": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", + "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hastscript/node_modules/property-information": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", + "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", + "dependencies": { + "xtend": "^4.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hastscript/node_modules/space-separated-tokens": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", + "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/highlight.js": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", + "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "engines": { + "node": "*" + } + }, + "node_modules/highlightjs-vue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/highlightjs-vue/-/highlightjs-vue-1.0.0.tgz", + "integrity": "sha512-PDEfEF102G23vHmPhLyPboFCD+BkMGu+GuJe2d9/eH4FsCwvgBpnc9n0pGE+ffKdph38s6foEZiEjdgHdzp+IA==" + }, "node_modules/hoist-non-react-statics": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "dev": true, "license": "BSD-3-Clause", "dependencies": { "react-is": "^16.7.0" } }, + "node_modules/html-url-attributes": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz", + "integrity": "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hyphenate-style-name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.1.0.tgz", + "integrity": "sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw==" + }, "node_modules/ignore": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", @@ -4181,6 +4336,13 @@ "node": ">= 4" } }, + "node_modules/immutable": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-5.0.3.tgz", + "integrity": "sha512-P8IdPQHq3lA1xVeBRi5VPqUm5HDgKnx0Ru51wZz5mjxHr5n3RWhjIpOFU7ybkUxfB+5IToy+OLaHYDBIWsv+uw==", + "optional": true, + "peer": true + }, "node_modules/import-fresh": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", @@ -4226,6 +4388,11 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "license": "ISC" }, + "node_modules/inline-style-parser": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.4.tgz", + "integrity": "sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==" + }, "node_modules/internal-slot": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", @@ -4241,6 +4408,28 @@ "node": ">= 0.4" } }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/is-arguments": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", @@ -4388,11 +4577,20 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, + "devOptional": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -4441,7 +4639,7 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" @@ -4450,6 +4648,20 @@ "node": ">=0.10.0" } }, + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-in-browser": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-in-browser/-/is-in-browser-1.1.3.tgz", + "integrity": "sha512-FeXIBgG/CPGd/WUxuEyvgGTEfwiG9Z4EKGxjNMRqviiIIfsmgrpnHLffEDdwUHqNva1VEW91o3xBT/m8Elgl9g==" + }, "node_modules/is-map": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", @@ -4480,7 +4692,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, + "devOptional": true, "license": "MIT", "engines": { "node": ">=0.12.0" @@ -4512,6 +4724,17 @@ "node": ">=8" } }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -4679,7 +4902,6 @@ "version": "1.27.2", "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.27.2.tgz", "integrity": "sha512-nCiz+ieOkWb5kDJSSckDTiMjTcgkxqH2xuiQmw1Y6O/spwx4d6TKYSfGCd4f71HGvUYcRSUGqJEI+3uN6UQlOw==", - "license": "MIT", "dependencies": { "async-lock": "^1.4.1", "clean-git-ref": "^2.0.1", @@ -4808,6 +5030,158 @@ "node": ">=6" } }, + "node_modules/jss": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss/-/jss-10.10.0.tgz", + "integrity": "sha512-cqsOTS7jqPsPMjtKYDUpdFC0AbhYFLTcuGRqymgmdJIeQ8cH7+AgX7YSgQy79wXloZq2VvATYxUOUQEvS1V/Zw==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "csstype": "^3.0.2", + "is-in-browser": "^1.1.3", + "tiny-warning": "^1.0.2" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/jss" + } + }, + "node_modules/jss-plugin-camel-case": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-camel-case/-/jss-plugin-camel-case-10.10.0.tgz", + "integrity": "sha512-z+HETfj5IYgFxh1wJnUAU8jByI48ED+v0fuTuhKrPR+pRBYS2EDwbusU8aFOpCdYhtRc9zhN+PJ7iNE8pAWyPw==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "hyphenate-style-name": "^1.0.3", + "jss": "10.10.0" + } + }, + "node_modules/jss-plugin-compose": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-compose/-/jss-plugin-compose-10.10.0.tgz", + "integrity": "sha512-F5kgtWpI2XfZ3Z8eP78tZEYFdgTIbpA/TMuX3a8vwrNolYtN1N4qJR/Ob0LAsqIwCMLojtxN7c7Oo/+Vz6THow==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "tiny-warning": "^1.0.2" + } + }, + "node_modules/jss-plugin-default-unit": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-default-unit/-/jss-plugin-default-unit-10.10.0.tgz", + "integrity": "sha512-SvpajxIECi4JDUbGLefvNckmI+c2VWmP43qnEy/0eiwzRUsafg5DVSIWSzZe4d2vFX1u9nRDP46WCFV/PXVBGQ==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0" + } + }, + "node_modules/jss-plugin-expand": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-expand/-/jss-plugin-expand-10.10.0.tgz", + "integrity": "sha512-ymT62W2OyDxBxr7A6JR87vVX9vTq2ep5jZLIdUSusfBIEENLdkkc0lL/Xaq8W9s3opUq7R0sZQpzRWELrfVYzA==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0" + } + }, + "node_modules/jss-plugin-extend": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-extend/-/jss-plugin-extend-10.10.0.tgz", + "integrity": "sha512-sKYrcMfr4xxigmIwqTjxNcHwXJIfvhvjTNxF+Tbc1NmNdyspGW47Ey6sGH8BcQ4FFQhLXctpWCQSpDwdNmXSwg==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "tiny-warning": "^1.0.2" + } + }, + "node_modules/jss-plugin-global": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-global/-/jss-plugin-global-10.10.0.tgz", + "integrity": "sha512-icXEYbMufiNuWfuazLeN+BNJO16Ge88OcXU5ZDC2vLqElmMybA31Wi7lZ3lf+vgufRocvPj8443irhYRgWxP+A==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0" + } + }, + "node_modules/jss-plugin-nested": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-nested/-/jss-plugin-nested-10.10.0.tgz", + "integrity": "sha512-9R4JHxxGgiZhurDo3q7LdIiDEgtA1bTGzAbhSPyIOWb7ZubrjQe8acwhEQ6OEKydzpl8XHMtTnEwHXCARLYqYA==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "tiny-warning": "^1.0.2" + } + }, + "node_modules/jss-plugin-props-sort": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-props-sort/-/jss-plugin-props-sort-10.10.0.tgz", + "integrity": "sha512-5VNJvQJbnq/vRfje6uZLe/FyaOpzP/IH1LP+0fr88QamVrGJa0hpRRyAa0ea4U/3LcorJfBFVyC4yN2QC73lJg==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0" + } + }, + "node_modules/jss-plugin-rule-value-function": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-rule-value-function/-/jss-plugin-rule-value-function-10.10.0.tgz", + "integrity": "sha512-uEFJFgaCtkXeIPgki8ICw3Y7VMkL9GEan6SqmT9tqpwM+/t+hxfMUdU4wQ0MtOiMNWhwnckBV0IebrKcZM9C0g==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "tiny-warning": "^1.0.2" + } + }, + "node_modules/jss-plugin-rule-value-observable": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-rule-value-observable/-/jss-plugin-rule-value-observable-10.10.0.tgz", + "integrity": "sha512-ZLMaYrR3QE+vD7nl3oNXuj79VZl9Kp8/u6A1IbTPDcuOu8b56cFdWRZNZ0vNr8jHewooEeq2doy8Oxtymr2ZPA==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "symbol-observable": "^1.2.0" + } + }, + "node_modules/jss-plugin-template": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-template/-/jss-plugin-template-10.10.0.tgz", + "integrity": "sha512-ocXZBIOJOA+jISPdsgkTs8wwpK6UbsvtZK5JI7VUggTD6LWKbtoxUzadd2TpfF+lEtlhUmMsCkTRNkITdPKa6w==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "tiny-warning": "^1.0.2" + } + }, + "node_modules/jss-plugin-vendor-prefixer": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-plugin-vendor-prefixer/-/jss-plugin-vendor-prefixer-10.10.0.tgz", + "integrity": "sha512-UY/41WumgjW8r1qMCO8l1ARg7NHnfRVWRhZ2E2m0DMYsr2DD91qIXLyNhiX83hHswR7Wm4D+oDYNC1zWCJWtqg==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "css-vendor": "^2.0.8", + "jss": "10.10.0" + } + }, + "node_modules/jss-preset-default": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/jss-preset-default/-/jss-preset-default-10.10.0.tgz", + "integrity": "sha512-GL175Wt2FGhjE+f+Y3aWh+JioL06/QWFgZp53CbNNq6ZkVU0TDplD8Bxm9KnkotAYn3FlplNqoW5CjyLXcoJ7Q==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "jss": "10.10.0", + "jss-plugin-camel-case": "10.10.0", + "jss-plugin-compose": "10.10.0", + "jss-plugin-default-unit": "10.10.0", + "jss-plugin-expand": "10.10.0", + "jss-plugin-extend": "10.10.0", + "jss-plugin-global": "10.10.0", + "jss-plugin-nested": "10.10.0", + "jss-plugin-props-sort": "10.10.0", + "jss-plugin-rule-value-function": "10.10.0", + "jss-plugin-rule-value-observable": "10.10.0", + "jss-plugin-template": "10.10.0", + "jss-plugin-vendor-prefixer": "10.10.0" + } + }, "node_modules/jsx-ast-utils": { "version": "3.3.5", "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", @@ -4867,95 +5241,916 @@ "dev": true, "license": "MIT", "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "license": "MIT" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/logform": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.7.0.tgz", + "integrity": "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==", + "dependencies": { + "@colors/colors": "1.6.0", + "@types/triple-beam": "^1.3.2", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "safe-stable-stringify": "^2.3.1", + "triple-beam": "^1.3.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lowlight": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/lowlight/-/lowlight-1.20.0.tgz", + "integrity": "sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==", + "dependencies": { + "fault": "^1.0.0", + "highlight.js": "~10.7.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "peer": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz", + "integrity": "sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz", + "integrity": "sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz", + "integrity": "sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.3.tgz", + "integrity": "sha512-bfOjvNt+1AcbPLTFMFWY149nJz0OjmewJs3LQQ5pIyVGxP4CdOqNVJL6kTaM5c68p8q82Xv3nCyFfUnuEcH3UQ==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromark": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.1.tgz", + "integrity": "sha512-eBPdkcoCNvYcxQOAKAlceo5SNdzZWfF+FcSupREAzdAh9rRmE239CEQAiTwIgblwnoM8zzj35sZ5ZwvSEOF6Kw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.2.tgz", + "integrity": "sha512-FKjQKbxd1cibWMM1P9N+H8TwlgGgSkWZMmfuVucLCHaYqeSvJ0hFeHsIa65pA2nYbes0f8LDHPMrd9X7Ujxg9w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.0.tgz", + "integrity": "sha512-Ub2ncQv+fwD70/l4ou27b4YzfNaCJOvyX4HxXU15m7mpYY+rjuWzsLIPZHJL253Z643RpbcP1oeIJlQ/SKW67g==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "license": "MIT", + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "license": "MIT" + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true, - "license": "MIT" + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] }, - "node_modules/logform": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/logform/-/logform-2.7.0.tgz", - "integrity": "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ==", + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "@colors/colors": "1.6.0", - "@types/triple-beam": "^1.3.2", - "fecha": "^4.2.0", - "ms": "^2.1.1", - "safe-stable-stringify": "^2.3.1", - "triple-beam": "^1.3.0" - }, - "engines": { - "node": ">= 12.0.0" + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "license": "MIT", + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" + "micromark-util-types": "^2.0.0" } }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dev": true, - "peer": true, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "yallist": "^3.0.2" + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" } }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" + "node_modules/micromark-util-subtokenize": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.3.tgz", + "integrity": "sha512-VXJJuNxYWSoYL6AJ6OQECCFGhIU2GGHMw8tahogePBrjkG8aCCas3ibkp7RnVOSTClg2is05/R7maAhF1XyQMg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.1.tgz", + "integrity": "sha512-534m2WhVTddrcKVepwmVEVnUAmtrx9bfIjNoQHRqfnvdaHQiFytEhJoTgpWJvDEXCO5gLTQh3wYC1PgOJA4NSQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, "node_modules/micromatch": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "braces": "^3.0.3", @@ -5040,6 +6235,22 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/monaco-editor": { + "version": "0.52.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.0.tgz", + "integrity": "sha512-OeWhNpABLCeTqubfqLMXGsqf6OmPU6pHM85kF3dhy6kq5hnhuVS1p3VrEW/XhWHc71P2tHyS5JFySD8mgs1crw==", + "peer": true + }, + "node_modules/motion-dom": { + "version": "11.13.0", + "resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-11.13.0.tgz", + "integrity": "sha512-Oc1MLGJQ6nrvXccXA89lXtOqFyBmvHtaDcTRGT66o8Czl7nuA8BeHAd9MQV1pQKX0d2RHFBFaw5g3k23hQJt0w==" + }, + "node_modules/motion-utils": { + "version": "11.13.0", + "resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-11.13.0.tgz", + "integrity": "sha512-lq6TzXkH5c/ysJQBxgLXgM01qwBH1b4goTPh57VvZWJbVJZF/0SB31UWEn4EIqbVPf3au88n2rvK17SpDTja1A==" + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -5192,6 +6403,13 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/node-addon-api": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", + "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", + "optional": true, + "peer": true + }, "node_modules/node-domexception": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", @@ -5532,11 +6750,34 @@ "node": ">=6" } }, + "node_modules/parse-entities": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz", + "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==" + }, "node_modules/path-browserify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", - "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", - "license": "MIT" + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==" }, "node_modules/path-exists": { "version": "4.0.0", @@ -5618,7 +6859,7 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, + "devOptional": true, "license": "MIT", "engines": { "node": ">=8.6" @@ -5741,6 +6982,14 @@ "integrity": "sha512-WuxUnVtlWL1OfZFQFuqvnvs6MiAGk9UNsBostyBOB0Is9wb5uRESevA6rnl/rkksXaGX3GzZhPup5d6Vp1nFew==", "license": "MIT" }, + "node_modules/prismjs": { + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", + "integrity": "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==", + "engines": { + "node": ">=6" + } + }, "node_modules/prop-types": { "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", @@ -5752,6 +7001,15 @@ "react-is": "^16.13.1" } }, + "node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", @@ -5801,6 +7059,11 @@ "node": ">=0.10.0" } }, + "node_modules/react-display-name": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/react-display-name/-/react-display-name-0.2.5.tgz", + "integrity": "sha512-I+vcaK9t4+kypiSgaiVWAipqHRXYmZIuAiS8vzFvXHHXVigg/sMKwlRgLy6LH2i3rmP+0Vzfl5lFsFRwF1r3pg==" + }, "node_modules/react-dom": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", @@ -5837,6 +7100,76 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", "license": "MIT" }, + "node_modules/react-jss": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/react-jss/-/react-jss-10.10.0.tgz", + "integrity": "sha512-WLiq84UYWqNBF6579/uprcIUnM1TSywYq6AIjKTTTG5ziJl9Uy+pwuvpN3apuyVwflMbD60PraeTKT7uWH9XEQ==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "@emotion/is-prop-valid": "^0.7.3", + "css-jss": "10.10.0", + "hoist-non-react-statics": "^3.2.0", + "is-in-browser": "^1.1.3", + "jss": "10.10.0", + "jss-preset-default": "10.10.0", + "prop-types": "^15.6.0", + "shallow-equal": "^1.2.0", + "theming": "^3.3.0", + "tiny-warning": "^1.0.2" + }, + "peerDependencies": { + "react": ">=16.8.6" + } + }, + "node_modules/react-markdown": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-9.0.1.tgz", + "integrity": "sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg==", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "html-url-attributes": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=18", + "react": ">=18" + } + }, + "node_modules/react-syntax-highlighter": { + "version": "15.6.1", + "resolved": "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-15.6.1.tgz", + "integrity": "sha512-OqJ2/vL7lEeV5zTJyG7kmARppUjiB9h9udl4qHQjjgEos66z00Ia0OckwYfRxCSFrW8RJIBnsBwQsHZbVPspqg==", + "dependencies": { + "@babel/runtime": "^7.3.1", + "highlight.js": "^10.4.1", + "highlightjs-vue": "^1.0.0", + "lowlight": "^1.17.0", + "prismjs": "^1.27.0", + "refractor": "^3.6.0" + }, + "peerDependencies": { + "react": ">= 0.14.0" + } + }, + "node_modules/react-textarea-auto-witdth-height": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/react-textarea-auto-witdth-height/-/react-textarea-auto-witdth-height-1.0.3.tgz", + "integrity": "sha512-12NbXe+OBmwv1VCMdOKmjrHwLHrsVbOHn2Wm/xER6jtvARj0bX73skhJnywhhbuQ2FfX9Y57AkpnkQTJbwwBxA==", + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/readable-stream": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", @@ -5851,6 +7184,20 @@ "node": ">= 6" } }, + "node_modules/readdirp": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.0.2.tgz", + "integrity": "sha512-yDMz9g+VaZkqBYS/ozoBJwaBhTbZo3UNYQHNRw1D3UFQB8oHB4uS/tAODO+ZLjGWmUbKnIlOWO+aaIiAxrUWHA==", + "optional": true, + "peer": true, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/reflect.getprototypeof": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", @@ -5867,10 +7214,116 @@ "which-builtin-type": "^1.1.3" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/refractor": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/refractor/-/refractor-3.6.0.tgz", + "integrity": "sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==", + "dependencies": { + "hastscript": "^6.0.0", + "parse-entities": "^2.0.0", + "prismjs": "~1.27.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/character-entities": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", + "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/character-entities-legacy": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", + "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/character-reference-invalid": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", + "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/is-alphabetical": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", + "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/is-alphanumerical": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", + "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", + "dependencies": { + "is-alphabetical": "^1.0.0", + "is-decimal": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/is-decimal": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", + "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/is-hexadecimal": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", + "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/parse-entities": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", + "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", + "dependencies": { + "character-entities": "^1.0.0", + "character-entities-legacy": "^1.0.0", + "character-reference-invalid": "^1.0.0", + "is-alphanumerical": "^1.0.0", + "is-decimal": "^1.0.0", + "is-hexadecimal": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/refractor/node_modules/prismjs": { + "version": "1.27.0", + "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.27.0.tgz", + "integrity": "sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==", + "engines": { + "node": ">=6" } }, "node_modules/regenerator-runtime": { @@ -5898,6 +7351,68 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/remark-gfm": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz", + "integrity": "sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.1.tgz", + "integrity": "sha512-g/osARvjkBXb6Wo0XvAeXQohVta8i84ACbenPpoSsxTOQH/Ae0/RGP4WZgnMH5pMLpsj4FG7OHmcIcXxpza8eQ==", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/resolve": { "version": "1.22.8", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", @@ -6099,6 +7614,27 @@ "node": ">=10" } }, + "node_modules/sass": { + "version": "1.82.0", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.82.0.tgz", + "integrity": "sha512-j4GMCTa8elGyN9A7x7bEglx0VgSpNUG4W4wNedQ33wSMdnkqQCT8HTwOaVSV4e6yQovcu/3Oc4coJP/l0xhL2Q==", + "optional": true, + "peer": true, + "dependencies": { + "chokidar": "^4.0.0", + "immutable": "^5.0.2", + "source-map-js": ">=0.6.2 <2.0.0" + }, + "bin": { + "sass": "sass.js" + }, + "engines": { + "node": ">=14.0.0" + }, + "optionalDependencies": { + "@parcel/watcher": "^2.4.1" + } + }, "node_modules/scheduler": { "version": "0.23.2", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", @@ -6164,6 +7700,11 @@ "sha.js": "bin.js" } }, + "node_modules/shallow-equal": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shallow-equal/-/shallow-equal-1.2.1.tgz", + "integrity": "sha512-S4vJDjHHMBaiZuT9NPb616CSmLf618jawtv3sufLl6ivK8WocjAo58cXwbRV1cgqxH0Qbv+iUt6m05eqEa2IRA==" + }, "node_modules/sharp": { "version": "0.33.5", "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", @@ -6367,6 +7908,15 @@ "node": ">=0.10.0" } }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/stack-trace": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", @@ -6376,6 +7926,11 @@ "node": "*" } }, + "node_modules/state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==" + }, "node_modules/stop-iteration-iterator": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", @@ -6566,6 +8121,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", @@ -6616,6 +8184,14 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-to-object": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.8.tgz", + "integrity": "sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g==", + "dependencies": { + "inline-style-parser": "0.2.4" + } + }, "node_modules/styled-jsx": { "version": "5.1.6", "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz", @@ -6651,6 +8227,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/symbol-observable": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", + "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/synckit": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.8.tgz", @@ -6696,11 +8280,33 @@ "dev": true, "license": "MIT" }, + "node_modules/theming": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/theming/-/theming-3.3.0.tgz", + "integrity": "sha512-u6l4qTJRDaWZsqa8JugaNt7Xd8PPl9+gonZaIe28vAhqgHMIG/DOyFPqiKN/gQLQYj05tHv+YQdNILL4zoiAVA==", + "dependencies": { + "hoist-non-react-statics": "^3.3.0", + "prop-types": "^15.5.8", + "react-display-name": "^0.2.4", + "tiny-warning": "^1.0.2" + }, + "engines": { + "node": ">=8" + }, + "peerDependencies": { + "react": ">=16.3" + } + }, + "node_modules/tiny-warning": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", + "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "is-number": "^7.0.0" @@ -6709,6 +8315,15 @@ "node": ">=8.0" } }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/triple-beam": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.4.1.tgz", @@ -6717,6 +8332,15 @@ "node": ">= 14.0.0" } }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/ts-api-utils": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", @@ -6899,6 +8523,87 @@ "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", "dev": true }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz", + "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", + "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz", + "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/update-browserslist-db": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", @@ -6958,6 +8663,32 @@ "uuid": "dist/esm/bin/uuid" } }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz", + "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", @@ -7253,6 +8984,14 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", @@ -7272,6 +9011,120 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.0.3.tgz", + "integrity": "sha512-Zxl/TwyXVZPCFSf0u2BNj5sE0F2uR6iSKxWpq4Wlk/Sv9Ob6YCKByQTkV2y6BCic+fkabp9190hyrDdPA/dNrw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.0.3.tgz", + "integrity": "sha512-T5+gg2EwpsY3OoaLxUIofmMb7ohAUlcNZW0fPQ6YAutaWJaxt1Z1h+8zdl4FRIOr5ABAAhXtBcpkZNwUcKI2fw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.0.3.tgz", + "integrity": "sha512-WkAk6R60mwDjH4lG/JBpb2xHl2/0Vj0ZRu1TIzWuOYfQ9tt9NFsIinI1Epma77JVgy81F32X/AeD+B2cBu/YQA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.0.3.tgz", + "integrity": "sha512-gWL/Cta1aPVqIGgDb6nxkqy06DkwJ9gAnKORdHWX1QBbSZZB+biFYPFti8aKIQL7otCE1pjyPaXpFzGeG2OS2w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.0.3.tgz", + "integrity": "sha512-QQEMwFd8r7C0GxQS62Zcdy6GKx999I/rTO2ubdXEe+MlZk9ZiinsrjwoiBL5/57tfyjikgh6GOU2WRQVUej3UA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.0.3.tgz", + "integrity": "sha512-9TEp47AAd/ms9fPNgtgnT7F3M1Hf7koIYYWCMQ9neOwjbVWJsHZxrFbI3iEDJ8rf1TDGpmHbKxXf2IFpAvheIQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.0.3.tgz", + "integrity": "sha512-VNAz+HN4OGgvZs6MOoVfnn41kBzT+M+tB+OK4cww6DNyWS6wKaDpaAm/qLeOUbnMh0oVx1+mg0uoYARF69dJyA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } } } } diff --git a/package.json b/package.json index 66263049..f12e0212 100644 --- a/package.json +++ b/package.json @@ -19,11 +19,15 @@ "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.7.1", "@next/env": "^15.0.3", + "@patternfly/chatbot": "^2.1.0-prerelease.17", "@patternfly/react-core": "^6.0.0", "@patternfly/react-icons": "^6.0.0", "@patternfly/react-styles": "^6.0.0", "@patternfly/react-table": "^6.0.0", "axios": "^1.7.9", + "@patternfly/virtual-assistant": "^2.0.2", + "date-fns": "^4.1.0", + "dompurify": "^3.2.2", "fs": "^0.0.1-security", "isomorphic-git": "^1.27.2", "js-yaml": "^4.1.0", diff --git a/src/Containerfile b/src/Containerfile index 3e2e0989..383b6289 100644 --- a/src/Containerfile +++ b/src/Containerfile @@ -1,14 +1,17 @@ FROM registry.access.redhat.com/ubi9/nodejs-22:9.5-1730543890 +USER root + WORKDIR /opt/app-root/src -COPY package*.json ./ +COPY ./ . + +RUN dnf install -y jq +RUN mkdir -p node_modules +RUN chown -R default:root package*.json next-env.d.ts node_modules /opt/app-root/src/src/healthcheck-probe.sh -USER root -RUN chown -R default:root /opt/app-root/src/package*.json USER default -RUN npm install -COPY ./ . +RUN npm install RUN npm run build CMD ["npm", "run", "start"] diff --git a/src/app/api/envConfig/route.ts b/src/app/api/envConfig/route.ts index c58b2a65..9381ed2b 100644 --- a/src/app/api/envConfig/route.ts +++ b/src/app/api/envConfig/route.ts @@ -15,7 +15,9 @@ export async function GET() { UPSTREAM_REPO_OWNER: process.env.NEXT_PUBLIC_TAXONOMY_REPO_OWNER || '', UPSTREAM_REPO_NAME: process.env.NEXT_PUBLIC_TAXONOMY_REPO || '', DEPLOYMENT_TYPE: process.env.IL_UI_DEPLOYMENT || '', - EXPERIMENTAL_FEATURES: process.env.NEXT_PUBLIC_EXPERIMENTAL_FEATURES || '' + ENABLE_DEV_MODE: process.env.IL_ENABLE_DEV_MODE || 'false', + EXPERIMENTAL_FEATURES: process.env.NEXT_PUBLIC_EXPERIMENTAL_FEATURES || '', + TAXONOMY_ROOT_DIR: process.env.NEXT_PUBLIC_TAXONOMY_ROOT_DIR || '' }; return NextResponse.json(envConfig); diff --git a/src/app/api/fine-tune/data-sets/route.ts b/src/app/api/fine-tune/data-sets/route.ts new file mode 100644 index 00000000..e239d6ae --- /dev/null +++ b/src/app/api/fine-tune/data-sets/route.ts @@ -0,0 +1,22 @@ +// src/pages/api/fine-tune/data-sets.ts +'use server'; + +import { NextRequest, NextResponse } from 'next/server'; + +export async function GET(req: NextRequest) { + try { + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + const response = await fetch(`${API_SERVER}/data`); + const data = await response.json(); + + if (!response.ok) { + return NextResponse.json({ error: 'Failed to fetch datasets' }, { status: response.status }); + } + + return NextResponse.json(data, { status: 200 }); + } catch (error) { + console.error('Error fetching datasets:', error); + return NextResponse.json({ error: 'Internal Server Error' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/data/generate/route.ts b/src/app/api/fine-tune/data/generate/route.ts new file mode 100644 index 00000000..fb2e802f --- /dev/null +++ b/src/app/api/fine-tune/data/generate/route.ts @@ -0,0 +1,26 @@ +'use server'; + +import { NextResponse } from 'next/server'; + +export async function POST(request: Request) { + try { + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + const response = await fetch(`${API_SERVER}/data/generate`, { + method: 'POST' + }); + + if (!response.ok) { + console.error('Error response from API server:', response.status, response.statusText); + return NextResponse.json({ error: 'Failed to generate data' }, { status: response.status }); + } + + const responseData = await response.json(); + + // Return the response from the API server to the client + return NextResponse.json(responseData, { status: 200 }); + } catch (error) { + console.error('Error generating data:', error); + return NextResponse.json({ error: 'An error occurred while generating data' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/git/branches/route.ts b/src/app/api/fine-tune/git/branches/route.ts new file mode 100644 index 00000000..ccb1902a --- /dev/null +++ b/src/app/api/fine-tune/git/branches/route.ts @@ -0,0 +1,65 @@ +// src/app/api/native/fine-tune/git/branches/route.ts +import { NextResponse } from 'next/server'; +import * as git from 'isomorphic-git'; +import fs from 'fs'; +import path from 'path'; + +const REMOTE_TAXONOMY_ROOT_DIR = process.env.NEXT_PUBLIC_TAXONOMY_ROOT_DIR || ''; + +export async function GET() { + const REPO_DIR = path.join(REMOTE_TAXONOMY_ROOT_DIR, '/taxonomy'); + try { + console.log(`Checking local taxonomy directory for branches: ${REPO_DIR}`); + + // Ensure the repository path exists + if (!fs.existsSync(REPO_DIR)) { + console.log('Local repository path does not exist:', REPO_DIR); + return NextResponse.json({ error: 'Local repository path does not exist.' }, { status: 400 }); + } + + console.log('Local taxonomy directory exists. Proceeding with branch listing.'); + + // List all branches in the repository + const branches = await git.listBranches({ fs, dir: REPO_DIR }); + console.log(`Branches found: ${branches.join(', ')}`); + + const branchDetails = []; + + for (const branch of branches) { + const branchCommit = await git.resolveRef({ + fs, + dir: REPO_DIR, + ref: branch + }); + const commitDetails = await git.readCommit({ + fs, + dir: REPO_DIR, + oid: branchCommit + }); + + const commitMessage = commitDetails.commit.message; + + // Check for Signed-off-by line + const signoffMatch = commitMessage.match(/^Signed-off-by: (.+)$/m); + const signoff = signoffMatch ? signoffMatch[1] : null; + const messageStr = commitMessage.split('Signed-off-by'); + + branchDetails.push({ + name: branch, + creationDate: commitDetails.commit.committer.timestamp * 1000, + message: messageStr[0].replace(/\n+$/, ''), + author: signoff + }); + } + + // Sort by creation date, newest first + branchDetails.sort((a, b) => b.creationDate - a.creationDate); + + console.log('Total branches present in native taxonomy (fine-tune):', branchDetails.length); + + return NextResponse.json({ branches: branchDetails }, { status: 200 }); + } catch (error) { + console.error('Failed to list branches from local taxonomy (fine-tune):', error); + return NextResponse.json({ error: 'Failed to list branches from local taxonomy (fine-tune)' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/gpu-free/route.ts b/src/app/api/fine-tune/gpu-free/route.ts new file mode 100644 index 00000000..1cbd2fa1 --- /dev/null +++ b/src/app/api/fine-tune/gpu-free/route.ts @@ -0,0 +1,25 @@ +// src/app/api/fine-tune/gpu-free/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function GET() { + try { + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + const endpoint = `${API_SERVER}/gpu-free`; + + console.log('Forwarding request to gpu-free endpoint:', endpoint); + const resp = await fetch(endpoint); + if (!resp.ok) { + console.error('gpu-free error from API server:', resp.status, resp.statusText); + return NextResponse.json({ free_gpus: 0 }, { status: 200 }); + // Return 0 in case of error + } + + const data = await resp.json(); + return NextResponse.json(data, { status: 200 }); + } catch (error) { + console.error('Unexpected error in gpu-free route:', error); + return NextResponse.json({ free_gpus: 0 }, { status: 200 }); + } +} diff --git a/src/app/api/fine-tune/jobs/[job_id]/logs/route.ts b/src/app/api/fine-tune/jobs/[job_id]/logs/route.ts new file mode 100644 index 00000000..9017adb8 --- /dev/null +++ b/src/app/api/fine-tune/jobs/[job_id]/logs/route.ts @@ -0,0 +1,29 @@ +// src/app/api/fine-tune/jobs/[job_id]/logs/route.ts +import { NextResponse } from 'next/server'; + +const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + +export async function GET(request: Request, { params }: { params: { job_id: string } }) { + const { job_id } = await Promise.resolve(params); + + try { + const response = await fetch(`${API_SERVER}/jobs/${job_id}/logs`, { + method: 'GET' + }); + + if (!response.ok) { + const errorText = await response.text(); + console.error('Error from API server:', errorText); + return NextResponse.json({ error: 'Error fetching logs' }, { status: 500 }); + } + + const logs = await response.text(); + return new NextResponse(logs, { + status: 200, + headers: { 'Content-Type': 'text/plain' } + }); + } catch (error) { + console.error('Error fetching logs:', error); + return NextResponse.json({ error: 'Error fetching logs' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/jobs/[job_id]/status/route.ts b/src/app/api/fine-tune/jobs/[job_id]/status/route.ts new file mode 100644 index 00000000..8b793a6b --- /dev/null +++ b/src/app/api/fine-tune/jobs/[job_id]/status/route.ts @@ -0,0 +1,29 @@ +// src/app/api/fine-tune/jobs/[job_id]/status/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function GET(request: Request, { params }: { params: { job_id: string } }) { + const { job_id } = params; + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + try { + // Forward the request to the API server + const response = await fetch(`${API_SERVER}/jobs/${job_id}/status`, { + method: 'GET' + }); + + if (!response.ok) { + const errorText = await response.text(); + console.error('Error from API server:', errorText); + return NextResponse.json({ error: 'Error fetching job status' }, { status: 500 }); + } + + const result = await response.json(); + // Return the job status to the client + return NextResponse.json(result, { status: 200 }); + } catch (error) { + console.error('Error fetching job status:', error); + return NextResponse.json({ error: 'Error fetching job status' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/jobs/route.ts b/src/app/api/fine-tune/jobs/route.ts new file mode 100644 index 00000000..ef5dead3 --- /dev/null +++ b/src/app/api/fine-tune/jobs/route.ts @@ -0,0 +1,22 @@ +// src/app/api/fine-tune/jobs/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function GET(request: Request) { + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + try { + const response = await fetch(`${API_SERVER}/jobs`); + if (!response.ok) { + const errorText = await response.text(); + console.error('Error from API server:', errorText); + return NextResponse.json({ error: 'Error fetching jobs' }, { status: 500 }); + } + const result = await response.json(); + return NextResponse.json(result, { status: 200 }); + } catch (error) { + console.error('Error fetching jobs:', error); + return NextResponse.json({ error: 'Error fetching jobs' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/model/serve-base/route.ts b/src/app/api/fine-tune/model/serve-base/route.ts new file mode 100644 index 00000000..aa415db3 --- /dev/null +++ b/src/app/api/fine-tune/model/serve-base/route.ts @@ -0,0 +1,54 @@ +// src/app/api/model/serve-base/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function POST() { + try { + console.log('Received serve-base model request'); + + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + const endpoint = `${API_SERVER}/model/serve-base`; + + console.log(`Forwarding request to the API server: ${endpoint}`); + + // No request body needed for serving the base model + const response = await fetch(endpoint, { + method: 'POST', + headers: { 'Content-Type': 'application/json' } + }); + + console.log('Response from API server (serve-base):', { + status: response.status, + statusText: response.statusText + }); + + if (!response.ok) { + console.error('Error response from the API server:', response.status, response.statusText); + return NextResponse.json({ error: 'Failed to serve the base model on the API server' }, { status: response.status }); + } + + // Parse response safely + let responseData; + try { + const text = await response.text(); + responseData = text ? JSON.parse(text) : {}; + console.log('Parsed response data (serve-base):', responseData); + } catch (error) { + console.error('Error parsing JSON response from API server:', error); + return NextResponse.json({ error: 'Invalid JSON response from the API server' }, { status: 500 }); + } + + if (!responseData.job_id) { + console.error('Missing job_id in API server response for serve-base:', responseData); + return NextResponse.json({ error: 'API server response does not contain job_id' }, { status: 500 }); + } + + // Return the response from the API server to the client + console.log('Returning success response with job_id (serve-base):', responseData.job_id); + return NextResponse.json(responseData, { status: 200 }); + } catch (error) { + console.error('Unexpected error during serve-base:', error); + return NextResponse.json({ error: 'An unexpected error occurred during serving the base model' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/model/serve-latest/route.ts b/src/app/api/fine-tune/model/serve-latest/route.ts new file mode 100644 index 00000000..92b75593 --- /dev/null +++ b/src/app/api/fine-tune/model/serve-latest/route.ts @@ -0,0 +1,54 @@ +// src/app/api/model/serve-latest/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function POST() { + try { + console.log('Received serve-latest model request'); + + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + const endpoint = `${API_SERVER}/model/serve-latest`; + + console.log(`Forwarding request to API server: ${endpoint}`); + + // No request body needed for serving the latest model + const response = await fetch(endpoint, { + method: 'POST', + headers: { 'Content-Type': 'application/json' } + }); + + console.log('Response from API server:', { + status: response.status, + statusText: response.statusText + }); + + if (!response.ok) { + console.error('Error response from API server:', response.status, response.statusText); + return NextResponse.json({ error: 'Failed to serve the latest model on the API server' }, { status: response.status }); + } + + // Parse response safely + let responseData; + try { + const text = await response.text(); + responseData = text ? JSON.parse(text) : {}; + console.log('Parsed response data (serve-latest):', responseData); + } catch (error) { + console.error('Error parsing JSON response from API server:', error); + return NextResponse.json({ error: 'Invalid JSON response from the API server' }, { status: 500 }); + } + + if (!responseData.job_id) { + console.error('Missing job_id in API server response for serve-latest:', responseData); + return NextResponse.json({ error: 'API server response does not contain job_id' }, { status: 500 }); + } + + // Return the response from the API server to the client + console.log('Returning success response with job_id (serve-latest):', responseData.job_id); + return NextResponse.json(responseData, { status: 200 }); + } catch (error) { + console.error('Unexpected error during serve-latest:', error); + return NextResponse.json({ error: 'An unexpected error occurred during serving the latest model' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/model/train/route.ts b/src/app/api/fine-tune/model/train/route.ts new file mode 100644 index 00000000..a910cd9f --- /dev/null +++ b/src/app/api/fine-tune/model/train/route.ts @@ -0,0 +1,77 @@ +// src/app/api/fine-tune/model/train/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function POST(request: Request) { + try { + console.log('Received train job request'); + + // Parse the request body for required data + const { modelName, branchName, epochs } = await request.json(); + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + console.log('Request body:', { modelName, branchName, epochs }); + + if (!modelName || !branchName) { + console.error('Missing required parameters: modelName and branchName'); + return NextResponse.json({ error: 'Missing required parameters: modelName and branchName' }, { status: 400 }); + } + + // Validate epochs if provided + if (epochs !== undefined && (typeof epochs !== 'number' || epochs <= 0)) { + return NextResponse.json({ error: "'epochs' must be a positive integer" }, { status: 400 }); + } + + // Forward the request to the API server + const endpoint = `${API_SERVER}/model/train`; + + console.log(`Forwarding request to API server: ${API_SERVER}`); + + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + modelName, + branchName, + epochs + }) + }); + + console.log('Response from API server:', { + status: response.status, + statusText: response.statusText + }); + + if (!response.ok) { + const errorText = await response.text(); + console.error('Error response from API server:', response.status, response.statusText, errorText); + return NextResponse.json({ error: 'Failed to train the model on the API server' }, { status: response.status }); + } + + // Parse response safely + let responseData; + try { + const text = await response.text(); + responseData = text ? JSON.parse(text) : {}; + console.log('Parsed response data:', responseData); + } catch (error) { + console.error('Error parsing JSON response from API server:', error); + return NextResponse.json({ error: 'Invalid JSON response from the API server' }, { status: 500 }); + } + + if (!responseData.job_id) { + console.error('Missing job_id in API server response:', responseData); + return NextResponse.json({ error: 'API server response does not contain job_id' }, { status: 500 }); + } + + // Return the response from the API server to the client + console.log('Returning success response with job_id:', responseData.job_id); + return NextResponse.json(responseData, { status: 200 }); + } catch (error) { + console.error('Unexpected error during training:', error); + return NextResponse.json({ error: 'An unexpected error occurred during training' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/model/vllm-status/route.ts b/src/app/api/fine-tune/model/vllm-status/route.ts new file mode 100644 index 00000000..bc961376 --- /dev/null +++ b/src/app/api/fine-tune/model/vllm-status/route.ts @@ -0,0 +1,31 @@ +// src/app/api/fine-tune/model/vllm-status +'use server'; + +import { NextResponse } from 'next/server'; + +export async function GET(request: Request) { + try { + const { searchParams } = new URL(request.url); + const modelName = searchParams.get('modelName'); + if (!modelName) { + return NextResponse.json({ error: 'Missing modelName query param' }, { status: 400 }); + } + + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + const endpoint = `${API_SERVER}/vllm-status?model_name=${modelName}`; + + console.log('Forwarding request to vllm-status:', endpoint); + + const response = await fetch(endpoint); + if (!response.ok) { + console.error('vllm-status error from API server:', response.status, response.statusText); + return NextResponse.json({ error: 'Failed to get vllm status' }, { status: response.status }); + } + + const statusData = await response.json(); + return NextResponse.json(statusData, { status: 200 }); + } catch (error) { + console.error('Unexpected error in vllm-status route:', error); + return NextResponse.json({ error: 'Unexpected error fetching vllm status' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/model/vllm-unload/route.ts b/src/app/api/fine-tune/model/vllm-unload/route.ts new file mode 100644 index 00000000..87c14253 --- /dev/null +++ b/src/app/api/fine-tune/model/vllm-unload/route.ts @@ -0,0 +1,37 @@ +'use server'; + +import { NextResponse } from 'next/server'; + +export async function POST(request: Request) { + try { + const body = await request.json(); + const { model_name } = body; + if (!model_name) { + return NextResponse.json({ error: 'Missing model_name' }, { status: 400 }); + } + + // Make sure you have NEXT_PUBLIC_API_SERVER set in your .env or environment + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + const endpoint = `${API_SERVER}/vllm-unload`; + + // Forward to your Go server + const resp = await fetch(endpoint, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ model_name }) + }); + + if (!resp.ok) { + const errorText = await resp.text(); + console.error('Unload model error from API server:', errorText); + return NextResponse.json({ error: errorText }, { status: resp.status }); + } + + // Return the successful JSON from the Go server + const data = await resp.json(); + return NextResponse.json(data, { status: 200 }); + } catch (error) { + console.error('Unexpected error in vllm-unload route:', error); + return NextResponse.json({ error: 'An unexpected error occurred' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/models/route.ts b/src/app/api/fine-tune/models/route.ts new file mode 100644 index 00000000..7c0c3d1b --- /dev/null +++ b/src/app/api/fine-tune/models/route.ts @@ -0,0 +1,22 @@ +// src/pages/api/fine-tune/models/route.ts +'use server'; + +import { NextRequest, NextResponse } from 'next/server'; + +export async function GET(req: NextRequest) { + try { + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + const response = await fetch(`${API_SERVER}/models`); + const data = await response.json(); + + if (!response.ok) { + return NextResponse.json({ error: 'Failed to fetch models' }, { status: response.status }); + } + + return NextResponse.json(data, { status: 200 }); + } catch (error) { + console.error('Error fetching models:', error); + return NextResponse.json({ error: 'Internal Server Error' }, { status: 500 }); + } +} diff --git a/src/app/api/fine-tune/pipeline/generate-train/route.ts b/src/app/api/fine-tune/pipeline/generate-train/route.ts new file mode 100644 index 00000000..d9bdb91f --- /dev/null +++ b/src/app/api/fine-tune/pipeline/generate-train/route.ts @@ -0,0 +1,72 @@ +// src/app/api/fine-tune/pipeline/generate-train/route.ts +'use server'; + +import { NextResponse } from 'next/server'; + +export async function POST(request: Request) { + try { + // Parse the request body for required data + const { modelName, branchName, epochs } = await request.json(); + const API_SERVER = process.env.NEXT_PUBLIC_API_SERVER!; + + console.log('Request body:', { modelName, branchName, epochs }); + + if (!modelName || !branchName) { + console.error('Missing required parameters: modelName and branchName'); + return NextResponse.json({ error: 'Missing required parameters: modelName and branchName' }, { status: 400 }); + } + + // Validate epochs if provided + if (epochs !== undefined && (typeof epochs !== 'number' || epochs <= 0)) { + return NextResponse.json({ error: "'epochs' must be a positive integer" }, { status: 400 }); + } + + // Forward the request to the API server's pipeline endpoint + const endpoint = `${API_SERVER}/pipeline/generate-train`; + + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + modelName, + branchName, + epochs + }) + }); + + console.log('Response from API server:', { + status: response.status, + statusText: response.statusText + }); + + if (!response.ok) { + const errorText = await response.text(); + console.error('Error response from API server:', response.status, response.statusText, errorText); + return NextResponse.json({ error: 'Failed to train the model on the API server' }, { status: response.status }); + } + + const responseData; + try { + const text = await response.text(); + responseData = text ? JSON.parse(text) : {}; + console.log('Parsed response data:', responseData); + } catch (error) { + console.error('Error parsing JSON response from API server:', error); + return NextResponse.json({ error: 'Invalid JSON response from the API server' }, { status: 500 }); + } + + if (!responseData.job_id) { + console.error('Missing job_id in API server response:', responseData); + return NextResponse.json({ error: 'API server response does not contain job_id' }, { status: 500 }); + } + + // Return the response from the API server to the client + console.log('Returning success response with job_id:', responseData.job_id); + return NextResponse.json(responseData, { status: 200 }); + } catch (error) { + console.error('Unexpected error during training:', error); + return NextResponse.json({ error: 'An unexpected error occurred during training' }, { status: 500 }); + } +} diff --git a/src/app/api/local/clone-repo/route.ts b/src/app/api/local/clone-repo/route.ts deleted file mode 100644 index 309582c5..00000000 --- a/src/app/api/local/clone-repo/route.ts +++ /dev/null @@ -1,45 +0,0 @@ -// src/pages/api/clone-repo.ts -import { NextRequest, NextResponse } from 'next/server'; -import * as git from 'isomorphic-git'; -import http from 'isomorphic-git/http/node'; -import fs from 'fs'; -import path from 'path'; - -// Retrieve the base directory from the environment variable -const BASE_DIRECTORY = process.env.NEXT_PUBLIC_BASE_CLONE_DIRECTORY; - -export async function POST(req: NextRequest) { - const { repoUrl, directory } = await req.json(); - - if (!repoUrl || !directory) { - return NextResponse.json({ message: 'Repository URL and directory are required' }, { status: 400 }); - } - - if (!BASE_DIRECTORY) { - return NextResponse.json({ message: 'Base directory is not configured on the server' }, { status: 500 }); - } - - try { - const clonePath = path.resolve(BASE_DIRECTORY, directory); - - // Ensure clonePath is within BASE_DIRECTORY - if (!clonePath.startsWith(BASE_DIRECTORY)) { - return NextResponse.json({ message: 'Invalid directory path' }, { status: 403 }); - } - - await git.clone({ - fs, - http, - dir: clonePath, - url: repoUrl, - singleBranch: true, - depth: 1 - }); - - // Include the full path in the response for client display - return NextResponse.json({ message: `Repository cloned successfully.`, fullPath: clonePath }, { status: 200 }); - } catch (error: unknown) { - const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'; - return NextResponse.json({ message: `Failed to clone repository: ${errorMessage}` }, { status: 500 }); - } -} diff --git a/src/app/api/local/git/branches/route.ts b/src/app/api/local/git/branches/route.ts deleted file mode 100644 index 2558f6f2..00000000 --- a/src/app/api/local/git/branches/route.ts +++ /dev/null @@ -1,142 +0,0 @@ -// src/app/api/local/git/branches/route.ts -import { NextRequest, NextResponse } from 'next/server'; -import * as git from 'isomorphic-git'; -import fs from 'fs'; -import path from 'path'; - -// Get the repository path from the environment variable -const REPO_DIR = process.env.NEXT_PUBLIC_LOCAL_REPO_PATH || '/path/to/local/repo'; - -export async function GET() { - try { - // Ensure the repository path exists - if (!fs.existsSync(REPO_DIR)) { - return NextResponse.json({ error: 'Repository path does not exist.' }, { status: 400 }); - } - - // List all branches in the repository - const branches = await git.listBranches({ fs, dir: REPO_DIR }); - const branchDetails = []; - - for (const branch of branches) { - const branchCommit = await git.resolveRef({ fs, dir: REPO_DIR, ref: branch }); - const commitDetails = await git.readCommit({ fs, dir: REPO_DIR, oid: branchCommit }); - - branchDetails.push({ - name: branch, - creationDate: commitDetails.commit.committer.timestamp * 1000 // Convert to milliseconds - }); - } - - branchDetails.sort((a, b) => b.creationDate - a.creationDate); // Sort by creation date, newest first - - return NextResponse.json({ branches: branchDetails }, { status: 200 }); - } catch (error) { - console.error('Failed to list branches:', error); - return NextResponse.json({ error: 'Failed to list branches' }, { status: 500 }); - } -} - -// Handle POST requests for merge or branch comparison -export async function POST(req: NextRequest) { - const { branchName, action } = await req.json(); - - try { - if (action === 'merge') { - // Ensure valid branch name - if (!branchName || branchName === 'main') { - return NextResponse.json({ error: 'Invalid branch name for merge' }, { status: 400 }); - } - - // Initialize the repository and checkout main branch - await git.init({ fs, dir: REPO_DIR }); - await git.checkout({ fs, dir: REPO_DIR, ref: 'main' }); - - // Perform the merge - await git.merge({ - fs, - dir: REPO_DIR, - ours: 'main', - theirs: branchName, - author: { - name: 'Instruct Lab Local', - email: 'local@instructlab.ai' - } - }); - - return NextResponse.json({ message: `Successfully merged ${branchName} into main.` }, { status: 200 }); - } else if (action === 'diff') { - // Ensure valid branch name - if (!branchName || branchName === 'main') { - return NextResponse.json({ error: 'Invalid branch name for comparison' }, { status: 400 }); - } - - // Fetch the commit SHA for `main` and the target branch - const mainCommit = await git.resolveRef({ fs, dir: REPO_DIR, ref: 'main' }); - const branchCommit = await git.resolveRef({ fs, dir: REPO_DIR, ref: branchName }); - - const mainFiles = await getFilesFromTree(mainCommit); - const branchFiles = await getFilesFromTree(branchCommit); - - const changes = []; - - // Identify modified and deleted files - for (const file in mainFiles) { - if (branchFiles[file]) { - if (mainFiles[file] !== branchFiles[file]) { - changes.push({ file, status: 'modified' }); - } - } else { - changes.push({ file, status: 'deleted' }); - } - } - - // Identify added files - for (const file in branchFiles) { - if (!mainFiles[file]) { - changes.push({ file, status: 'added' }); - } - } - - return NextResponse.json({ changes }, { status: 200 }); - } else { - return NextResponse.json({ error: 'Invalid action specified' }, { status: 400 }); - } - } catch (error) { - console.error(`Failed to ${action === 'merge' ? 'merge branch' : 'compare branches'}:`, error); - return NextResponse.json( - { - error: `Failed to ${action === 'merge' ? 'merge branch' : 'compare branches'}` - }, - { status: 500 } - ); - } finally { - // Ensure switching back to 'main' branch after any operation - try { - await git.checkout({ fs, dir: REPO_DIR, ref: 'main' }); - } catch (checkoutError) { - console.error('Failed to switch back to main branch:', checkoutError); - } - } -} - -// Helper function to recursively gather file paths and their oids from a tree -async function getFilesFromTree(commitOid: string) { - const fileMap: Record = {}; - - async function walkTree(dir: string) { - const tree = await git.readTree({ fs, dir: REPO_DIR, oid: commitOid, filepath: dir }); - - for (const entry of tree.tree) { - const fullPath = path.join(dir, entry.path); - if (entry.type === 'blob') { - fileMap[fullPath] = entry.oid; - } else if (entry.type === 'tree') { - await walkTree(fullPath); // Recursively walk subdirectories - } - } - } - - await walkTree(''); - return fileMap; -} diff --git a/src/app/api/native/clone-repo/route.ts b/src/app/api/native/clone-repo/route.ts new file mode 100644 index 00000000..e201529e --- /dev/null +++ b/src/app/api/native/clone-repo/route.ts @@ -0,0 +1,41 @@ +// src/pages/api/clone-repo.ts +import { NextResponse } from 'next/server'; +import * as git from 'isomorphic-git'; +import http from 'isomorphic-git/http/node'; +import fs from 'fs'; +import path from 'path'; + +// Retrieve the base directory from the environment variable +const LOCAL_TAXONOMY_ROOT_DIR = process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_ROOT_DIR || `${process.env.HOME}/.instructlab-ui`; +const TAXONOMY_REPO_URL = process.env.NEXT_PUBLIC_TAXONOMY_REPO_URL || 'https://github.com/instructlab/taxonomy.git'; + +export async function POST() { + const taxonomyDirectoryPath = path.join(LOCAL_TAXONOMY_ROOT_DIR, '/taxonomy'); + + if (fs.existsSync(taxonomyDirectoryPath)) { + const files = fs.readdirSync(taxonomyDirectoryPath); + if (files.length > 0) { + console.log(`Using existing native Taxonomy repository at ${taxonomyDirectoryPath}.`); + return NextResponse.json({ message: `Using existing native Taxonomy repository at ${taxonomyDirectoryPath}.` }, { status: 200 }); + } + fs.rmdirSync(taxonomyDirectoryPath, { recursive: true }); + } + + try { + await git.clone({ + fs, + http, + dir: taxonomyDirectoryPath, + url: TAXONOMY_REPO_URL, + singleBranch: true + }); + + // Include the full path in the response for client display + console.log(`Local Taxonomy repository cloned successfully to ${taxonomyDirectoryPath}.`); + return NextResponse.json({ message: `Local Taxonomy repository cloned successfully to ${taxonomyDirectoryPath}.` }, { status: 200 }); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'; + console.error(`Failed to clone local taxonomy repository: ${errorMessage}`); + return NextResponse.json({ message: `Failed to clone local taxonomy repository: ${errorMessage}` }, { status: 500 }); + } +} diff --git a/src/app/api/native/git/branches/route.ts b/src/app/api/native/git/branches/route.ts new file mode 100644 index 00000000..c6e0e68f --- /dev/null +++ b/src/app/api/native/git/branches/route.ts @@ -0,0 +1,358 @@ +// src/app/api/native/git/branches/route.ts +import { NextRequest, NextResponse } from 'next/server'; +import * as git from 'isomorphic-git'; +import fs from 'fs'; +import path from 'path'; + +// Get the repository path from the environment variable +const LOCAL_TAXONOMY_ROOT_DIR = process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_ROOT_DIR || `${process.env.HOME}/.instructlab-ui`; +const REMOTE_TAXONOMY_ROOT_DIR = process.env.NEXT_PUBLIC_TAXONOMY_ROOT_DIR || ''; +const REMOTE_TAXONOMY_REPO_CONTAINER_MOUNT_DIR = '/tmp/.instructlab-ui'; + +interface Diffs { + file: string; + status: string; + content?: string; +} + +export async function GET() { + const REPO_DIR = path.join(LOCAL_TAXONOMY_ROOT_DIR, '/taxonomy'); + try { + // Ensure the repository path exists + if (!fs.existsSync(REPO_DIR)) { + return NextResponse.json({ error: 'Local repository path does not exist.' }, { status: 400 }); + } + + // List all branches in the repository + const branches = await git.listBranches({ fs, dir: REPO_DIR }); + const branchDetails = []; + + for (const branch of branches) { + const branchCommit = await git.resolveRef({ fs, dir: REPO_DIR, ref: branch }); + const commitDetails = await git.readCommit({ fs, dir: REPO_DIR, oid: branchCommit }); + + const commitMessage = commitDetails.commit.message; + + // Check for Signed-off-by line + const signoffMatch = commitMessage.match(/^Signed-off-by: (.+)$/m); + const signoff = signoffMatch ? signoffMatch[1] : null; + const messageStr = commitMessage.split('Signed-off-by'); + branchDetails.push({ + name: branch, + creationDate: commitDetails.commit.committer.timestamp * 1000, + message: messageStr[0].replace(/\n+$/, ''), + author: signoff + }); + } + + branchDetails.sort((a, b) => b.creationDate - a.creationDate); // Sort by creation date, newest first + console.log('Total branches present in native taxonomy:', branchDetails.length); + + return NextResponse.json({ branches: branchDetails }, { status: 200 }); + } catch (error) { + console.error('Failed to list branches from local taxonomy:', error); + return NextResponse.json({ error: 'Failed to list branches from local taxonomy' }, { status: 500 }); + } +} + +// Handle POST requests for delete/diff/publish actions +export async function POST(req: NextRequest) { + const LOCAL_TAXONOMY_DIR = path.join(LOCAL_TAXONOMY_ROOT_DIR, '/taxonomy'); + const { branchName, action } = await req.json(); + console.log('Received POST request:', { branchName, action }); + + if (action === 'delete') { + return handleDelete(branchName, LOCAL_TAXONOMY_DIR); + } + + if (action === 'diff') { + return handleDiff(branchName, LOCAL_TAXONOMY_DIR); + } + + if (action === 'publish') { + let remoteTaxonomyRepoDirFinal: string = ''; + + const remoteTaxonomyRepoContainerMountDir = path.join(REMOTE_TAXONOMY_REPO_CONTAINER_MOUNT_DIR, '/taxonomy'); + const remoteTaxonomyRepoDir = path.join(REMOTE_TAXONOMY_ROOT_DIR, '/taxonomy'); + + // Check if there is taxonomy repository mounted in the container + if (fs.existsSync(remoteTaxonomyRepoContainerMountDir) && fs.readdirSync(remoteTaxonomyRepoContainerMountDir).length !== 0) { + remoteTaxonomyRepoDirFinal = remoteTaxonomyRepoContainerMountDir; + console.log('Remote taxonomy repository ', remoteTaxonomyRepoDir, ' is mounted at:', remoteTaxonomyRepoDirFinal); + } else { + // If remote taxonomy is not mounted, it means it's local deployment and we can directly use the paths + if (fs.existsSync(remoteTaxonomyRepoDir) && fs.readdirSync(remoteTaxonomyRepoDir).length !== 0) { + remoteTaxonomyRepoDirFinal = remoteTaxonomyRepoDir; + } + } + if (remoteTaxonomyRepoDirFinal === '') { + return NextResponse.json({ error: 'Remote taxonomy repository path does not exist.' }, { status: 400 }); + } + + console.log('Remote taxonomy repository path:', remoteTaxonomyRepoDirFinal); + + return handlePublish(branchName, LOCAL_TAXONOMY_DIR, remoteTaxonomyRepoDirFinal); + } + return NextResponse.json({ error: 'Invalid action specified' }, { status: 400 }); +} + +async function handleDelete(branchName: string, localTaxonomyDir: string) { + try { + if (!branchName || branchName === 'main') { + return NextResponse.json({ error: 'Invalid branch name for deletion' }, { status: 400 }); + } + + // Delete the target branch + await git.deleteBranch({ fs, dir: localTaxonomyDir, ref: branchName }); + + return NextResponse.json({ message: `Successfully deleted contribution ${branchName}.` }, { status: 200 }); + } catch (error) { + console.error(`Failed to delete contribution ${branchName}:`, error); + return NextResponse.json( + { + error: `Failed to delete contribution ${branchName}` + }, + { status: 500 } + ); + } finally { + // Ensure switching back to 'main' branch after any operation + try { + await git.checkout({ fs, dir: localTaxonomyDir, ref: 'main' }); + } catch (checkoutError) { + console.error('Failed to switch back to main branch:', checkoutError); + } + } +} + +async function handleDiff(branchName: string, localTaxonomyDir: string) { + try { + // Ensure valid branch name + if (!branchName || branchName === 'main') { + return NextResponse.json({ error: 'Invalid branch name for comparison' }, { status: 400 }); + } + + const changes = await findDiff(branchName, localTaxonomyDir); + const enrichedChanges: Diffs[] = []; + for (const change of changes) { + if (change.status === 'added' || change.status === 'modified') { + const fileContent = await readFileFromBranch(localTaxonomyDir, branchName, change.file); + enrichedChanges.push({ ...change, content: fileContent }); + } else { + enrichedChanges.push(change); + } + } + + return NextResponse.json({ changes: enrichedChanges }, { status: 200 }); + } catch (error) { + console.error(`Failed to show contribution changes ${branchName}:`, error); + return NextResponse.json( + { + error: `Failed to show contribution changes for ${branchName}` + }, + { status: 500 } + ); + } finally { + // Ensure switching back to 'main' branch after any operation + try { + await git.checkout({ fs, dir: localTaxonomyDir, ref: 'main' }); + } catch (checkoutError) { + console.error('Failed to switch back to main branch:', checkoutError); + } + } +} + +async function findDiff(branchName: string, localTaxonomyDir: string): Promise { + // Fetch the commit SHA for `main` and the target branch + const mainCommit = await git.resolveRef({ fs, dir: localTaxonomyDir, ref: 'main' }); + const branchCommit = await git.resolveRef({ fs, dir: localTaxonomyDir, ref: branchName }); + + const mainFiles = await getFilesFromTree(mainCommit, localTaxonomyDir); + const branchFiles = await getFilesFromTree(branchCommit, localTaxonomyDir); + + // Create an array of Diffs to store changes + const changes: Diffs[] = []; + // Identify modified and deleted files + for (const file in mainFiles) { + if (branchFiles[file]) { + if (mainFiles[file] !== branchFiles[file]) { + changes.push({ file, status: 'modified' }); + } + } else { + changes.push({ file, status: 'deleted' }); + } + } + + // Identify added files + for (const file in branchFiles) { + if (!mainFiles[file]) { + changes.push({ file, status: 'added' }); + } + } + return changes; +} + +async function getTopCommitDetails(dir: string, ref: string = 'HEAD') { + try { + // Fetch the top commit (latest commit on the branch) + const [topCommit] = await git.log({ + fs, + dir, + ref, + depth: 1 // Only fetch the latest commit + }); + + if (!topCommit) { + throw new Error('No commits found in the repository.'); + } + + // Extract commit message + const commitMessage = topCommit.commit.message; + + // Check for Signed-off-by line + const signoffMatch = commitMessage.match(/^Signed-off-by: (.+)$/m); + const signoff = signoffMatch ? signoffMatch[1] : null; + + return { + message: commitMessage, + signoff + }; + } catch (error) { + console.error('Error reading top commit details:', error); + throw error; + } +} + +async function handlePublish(branchName: string, localTaxonomyDir: string, remoteTaxonomyDir: string) { + try { + if (!branchName || branchName === 'main') { + return NextResponse.json({ error: 'Invalid contribution name for publish' }, { status: 400 }); + } + + console.log(`Publishing contribution from ${branchName} to remote taxonomy repo at ${REMOTE_TAXONOMY_ROOT_DIR}/taxonomy`); + const changes = await findDiff(branchName, localTaxonomyDir); + + // Check if there are any changes to publish, create a new branch at remoteTaxonomyDir and + // copy all the files listed in the changes array to the new branch and create a commit + if (changes.length > 0) { + await git.checkout({ fs, dir: localTaxonomyDir, ref: branchName }); + // Read the commit message of the top commit from the branch + const details = await getTopCommitDetails(localTaxonomyDir); + + // Check if the remote branch exists, if not create it + const remoteBranchName = branchName; + const remoteBranchExists = await git.listBranches({ fs, dir: remoteTaxonomyDir }); + if (remoteBranchExists.includes(remoteBranchName)) { + console.log(`Branch ${remoteBranchName} exists in remote taxonomy, deleting it.`); + await git.deleteBranch({ fs, dir: remoteTaxonomyDir, ref: remoteBranchName }); + } else { + console.log(`Branch ${remoteBranchName} does not exist in remote taxonomy, creating a new branch.`); + } + + await git.checkout({ fs, dir: remoteTaxonomyDir, ref: 'main' }); + await git.branch({ fs, dir: remoteTaxonomyDir, ref: remoteBranchName }); + await git.checkout({ fs, dir: remoteTaxonomyDir, ref: remoteBranchName }); + + // Copy the files listed in the changes array to the remote branch and if the directories do not exist, create them + for (const change of changes) { + if (change.status !== 'deleted') { + const filePath = path.join(localTaxonomyDir, change.file); + const remoteFilePath = path.join(remoteTaxonomyDir, change.file); + const remoteFileDir = path.dirname(remoteFilePath); + if (!fs.existsSync(remoteFileDir)) { + fs.mkdirSync(remoteFileDir, { recursive: true }); + } + fs.copyFileSync(filePath, remoteFilePath); + } else { + // If deleted, ensure the file is removed from remote as well, if it exists + const remoteFilePath = path.join(remoteTaxonomyDir, change.file); + if (fs.existsSync(remoteFilePath)) { + fs.rmSync(remoteFilePath); + } + } + } + + await git.add({ fs, dir: remoteTaxonomyDir, filepath: '.' }); + + const authorInfo = details.signoff!.match(/(.*?) <(.*?)>/); + let authorName = ''; + let authorEmail = ''; + if (authorInfo) { + console.log(`Author information found in signoff: ${authorInfo}`); + authorName = authorInfo[1]; + authorEmail = authorInfo[2]; + } else { + return NextResponse.json({ message: `Author information is not present in the contribution ${branchName}.` }, { status: 500 }); + } + // Create a commit with the same message and signoff as the top commit from the local branch + await git.commit({ + fs, + dir: remoteTaxonomyDir, + message: details.message, + author: { + name: authorName, + email: authorEmail + } + }); + console.log(`Successfully published contribution ${branchName} to remote taxonomy repo at ${REMOTE_TAXONOMY_ROOT_DIR}/taxonomy.`); + return NextResponse.json( + { message: `Successfully published contribution ${branchName} to ${REMOTE_TAXONOMY_ROOT_DIR}/taxonomy.` }, + { status: 200 } + ); + } else { + return NextResponse.json({ message: `No changes to publish from contribution ${branchName}.` }, { status: 200 }); + } + } catch (error) { + console.error(`Failed to publish contribution from ${branchName}:`, error); + return NextResponse.json( + { + error: `Failed to publish contribution from ${branchName}` + }, + { status: 500 } + ); + } finally { + // Ensure switching back to 'main' branch after any operation + try { + await git.checkout({ fs, dir: localTaxonomyDir, ref: 'main' }); + } catch (checkoutError) { + console.error('Failed to switch back to main branch in local taxonomy repo:', checkoutError); + } + try { + await git.checkout({ fs, dir: remoteTaxonomyDir, ref: 'main' }); + } catch (checkoutError) { + console.error('Failed to switch back to main branch in remote taxonomy repo:', checkoutError); + } + } +} + +async function readFileFromBranch(localTaxonomyDir: string, branchName: string, filePath: string): Promise { + const tempDir = path.join(localTaxonomyDir, '.temp_checkout'); + if (!fs.existsSync(tempDir)) { + fs.mkdirSync(tempDir); + } + + const branchCommit = await git.resolveRef({ fs, dir: localTaxonomyDir, ref: branchName }); + const { blob } = await git.readBlob({ fs, dir: localTaxonomyDir, oid: branchCommit, filepath: filePath }); + + const decoder = new TextDecoder('utf-8'); + const content = decoder.decode(blob); + return content; +} + +async function getFilesFromTree(commitOid: string, repoDir: string) { + const fileMap: Record = {}; + + async function walkTree(dir: string) { + const tree = await git.readTree({ fs, dir: repoDir, oid: commitOid, filepath: dir }); + for (const entry of tree.tree) { + const fullPath = path.join(dir, entry.path); + if (entry.type === 'blob') { + fileMap[fullPath] = entry.oid; + } else if (entry.type === 'tree') { + await walkTree(fullPath); // Recursively walk subdirectories + } + } + } + + await walkTree(''); + return fileMap; +} diff --git a/src/app/api/native/git/knowledge-files/route.ts b/src/app/api/native/git/knowledge-files/route.ts new file mode 100644 index 00000000..1bfc47a8 --- /dev/null +++ b/src/app/api/native/git/knowledge-files/route.ts @@ -0,0 +1,212 @@ +// src/app/api/native/git/knowledge-files/route.ts + +'use server'; +import { NextRequest, NextResponse } from 'next/server'; +import * as git from 'isomorphic-git'; +import fs from 'fs'; +import path from 'path'; + +// Constants for repository paths +const LOCAL_TAXONOMY_DOCS_ROOT_DIR = + process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_DOCS_ROOT_DIR || `${process.env.HOME}/.instructlab-ui/taxonomy-knowledge-docs`; + +// Interface for the response +interface KnowledgeFile { + filename: string; + content: string; + commitSha: string; + commitDate: string; +} + +interface Branch { + name: string; + commitSha: string; + commitDate: string; +} + +/** + * Function to list all branches. + */ +const listAllBranches = async (): Promise => { + const REPO_DIR = LOCAL_TAXONOMY_DOCS_ROOT_DIR; + + if (!fs.existsSync(REPO_DIR)) { + throw new Error('Repository path does not exist.'); + } + + const branches = await git.listBranches({ fs, dir: REPO_DIR }); + + const branchDetails: Branch[] = []; + + for (const branch of branches) { + try { + const latestCommit = await git.log({ fs, dir: REPO_DIR, ref: branch, depth: 1 }); + if (latestCommit.length === 0) { + continue; // No commits on this branch + } + + const commit = latestCommit[0]; + const commitSha = commit.oid; + const commitDate = new Date(commit.commit.committer.timestamp * 1000).toISOString(); + + branchDetails.push({ + name: branch, + commitSha: commitSha, + commitDate: commitDate + }); + } catch (error) { + console.error(`Failed to retrieve commit for branch ${branch}:`, error); + continue; + } + } + + return branchDetails; +}; + +/** + * Function to retrieve knowledge files from a specific branch. + * @param branchName - The name of the branch to retrieve files from. + * @returns An array of KnowledgeFile objects. + */ +const getKnowledgeFiles = async (branchName: string): Promise => { + const REPO_DIR = path.join(LOCAL_TAXONOMY_DOCS_ROOT_DIR, '/taxonomy-knowledge-docs'); + + // Ensure the repository path exists + if (!fs.existsSync(REPO_DIR)) { + throw new Error('Repository path does not exist.'); + } + + // Check if the branch exists + const branches = await git.listBranches({ fs, dir: REPO_DIR }); + if (!branches.includes(branchName)) { + throw new Error(`Branch "${branchName}" does not exist.`); + } + + // Checkout the specified branch + await git.checkout({ fs, dir: REPO_DIR, ref: branchName }); + + // Read all files in the repository root directory + const allFiles = fs.readdirSync(REPO_DIR); + + // Filter for Markdown files only + const markdownFiles = allFiles.filter((file) => path.extname(file).toLowerCase() === '.md'); + + const knowledgeFiles: KnowledgeFile[] = []; + + for (const file of markdownFiles) { + const filePath = path.join(REPO_DIR, file); + + // Check if the file is a regular file + const stat = fs.statSync(filePath); + if (!stat.isFile()) { + continue; + } + + try { + // Retrieve the latest commit SHA for the file on the specified branch + const logs = await git.log({ + fs, + dir: REPO_DIR, + ref: branchName, + filepath: file, + depth: 1 // Only the latest commit + }); + + if (logs.length === 0) { + // No commits found for this file; skip it + continue; + } + + const latestCommit = logs[0]; + const commitSha = latestCommit.oid; + const commitDate = new Date(latestCommit.commit.committer.timestamp * 1000).toISOString(); + + // Read the file content + const fileContent = fs.readFileSync(filePath, 'utf-8'); + + knowledgeFiles.push({ + filename: file, + content: fileContent, + commitSha: commitSha, + commitDate: commitDate + }); + } catch (error) { + console.error(`Failed to retrieve commit for file ${file}:`, error); + // Skip files that cause errors + continue; + } + } + + return knowledgeFiles; +}; + +/** + * Handler for GET requests. + * - If 'action=list-branches' is present, return all branches. + * - Else, return knowledge files from the 'main' branch. + */ +const getKnowledgeFilesHandler = async (req: NextRequest): Promise => { + try { + const { searchParams } = new URL(req.url); + const action = searchParams.get('action'); + + if (action === 'list-branches') { + const branches = await listAllBranches(); + return NextResponse.json({ branches }, { status: 200 }); + } + + // Default behavior: fetch files from 'main' branch + const branchName = 'main'; + const knowledgeFiles = await getKnowledgeFiles(branchName); + return NextResponse.json({ files: knowledgeFiles }, { status: 200 }); + } catch (error) { + console.error('Failed to retrieve knowledge files:', error); + return NextResponse.json({ error: (error as Error).message }, { status: 500 }); + } +}; + +/** + * Handler for POST requests. + * - If 'branchName' is provided, fetch files for that branch. + * - If 'action=diff', fetch files from the 'main' branch. + * - Else, return an error. + */ +const postKnowledgeFilesHandler = async (req: NextRequest): Promise => { + try { + const body = await req.json(); + const { action, branchName } = body; + + if (action === 'diff') { + // fetch files from main + const branchNameForDiff = 'main'; + const knowledgeFiles = await getKnowledgeFiles(branchNameForDiff); + return NextResponse.json({ files: knowledgeFiles }, { status: 200 }); + } + + if (branchName && typeof branchName === 'string') { + // Fetch files from a specified branch + const knowledgeFiles = await getKnowledgeFiles(branchName); + return NextResponse.json({ files: knowledgeFiles }, { status: 200 }); + } + + // If no valid action or branchName is provided + return NextResponse.json({ error: 'Invalid request. Provide an action or branchName.' }, { status: 400 }); + } catch (error) { + console.error('Failed to process POST request:', error); + return NextResponse.json({ error: (error as Error).message }, { status: 500 }); + } +}; + +/** + * GET handler to retrieve knowledge files or list branches based on 'action' query parameter. + */ +export async function GET(req: NextRequest) { + return await getKnowledgeFilesHandler(req); +} + +/** + * POST handler to retrieve knowledge files based on 'branchName' or 'action'. + */ +export async function POST(req: NextRequest) { + return await postKnowledgeFilesHandler(req); +} diff --git a/src/app/api/local/pr/knowledge/route.ts b/src/app/api/native/pr/knowledge/route.ts similarity index 78% rename from src/app/api/local/pr/knowledge/route.ts rename to src/app/api/native/pr/knowledge/route.ts index 353a04c4..155e5f4c 100644 --- a/src/app/api/local/pr/knowledge/route.ts +++ b/src/app/api/native/pr/knowledge/route.ts @@ -1,4 +1,4 @@ -// src/app/api/local/pr/knowledge/route.ts +// src/app/api/native/pr/knowledge/route.ts import { NextResponse } from 'next/server'; import { NextRequest } from 'next/server'; @@ -10,10 +10,12 @@ import { KnowledgeYamlData } from '@/types'; import yaml from 'js-yaml'; // Define paths and configuration -const REPO_DIR = process.env.NEXT_PUBLIC_LOCAL_REPO_PATH || '/path/to/local/repo'; // Update with actual local path +const LOCAL_TAXONOMY_ROOT_DIR = process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_ROOT_DIR || `${process.env.HOME}/.instructlab-ui`; + const KNOWLEDGE_DIR = 'knowledge'; export async function POST(req: NextRequest) { + const REPO_DIR = path.join(LOCAL_TAXONOMY_ROOT_DIR, '/taxonomy'); try { // Extract the data from the request body const { content, attribution, name, email, submissionSummary, filePath } = await req.json(); @@ -69,9 +71,10 @@ Creator names: ${attribution.creator_names} }); // Respond with success message and branch name - return NextResponse.json({ message: 'Branch and commit created locally', branch: branchName }, { status: 201 }); + console.log(`Knowledge contribution submitted successfully to local taxonomy repo. Submission Name is ${branchName}.`); + return NextResponse.json({ message: 'Knowledge contribution submitted successfully.', branch: branchName }, { status: 201 }); } catch (error) { - console.error('Failed to create local branch and commit:', error); - return NextResponse.json({ error: 'Failed to create local branch and commit' }, { status: 500 }); + console.error(`Failed to submit knowledge contribution to local taxonomy repo:`, error); + return NextResponse.json({ error: 'Failed to submit knowledge contribution.' }, { status: 500 }); } } diff --git a/src/app/api/local/pr/skill/route.ts b/src/app/api/native/pr/skill/route.ts similarity index 67% rename from src/app/api/local/pr/skill/route.ts rename to src/app/api/native/pr/skill/route.ts index 873da074..cdfaefa0 100644 --- a/src/app/api/local/pr/skill/route.ts +++ b/src/app/api/native/pr/skill/route.ts @@ -1,16 +1,20 @@ -// src/app/api/local/pr/skill/route.ts +// src/app/api/native/pr/skill/route.ts import { NextResponse } from 'next/server'; import { NextRequest } from 'next/server'; import * as git from 'isomorphic-git'; import fs from 'fs'; import path from 'path'; import yaml from 'js-yaml'; +import { AttributionData, SkillYamlData } from '@/types'; +import { dumpYaml } from '@/utils/yamlConfig'; // Define paths and configuration -const REPO_DIR = process.env.NEXT_PUBLIC_LOCAL_REPO_PATH || '/path/to/local/repo'; // Update with actual local path +const LOCAL_TAXONOMY_ROOT_DIR = process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_ROOT_DIR || `${process.env.HOME}/.instructlab-ui`; + const SKILLS_DIR = 'compositional_skills'; export async function POST(req: NextRequest) { + const REPO_DIR = path.join(LOCAL_TAXONOMY_ROOT_DIR, '/taxonomy'); try { // Extract the QnA data from the request body TODO: what is documentOutline? const { content, attribution, name, email, submissionSummary, documentOutline, filePath } = await req.json(); // eslint-disable-line @typescript-eslint/no-unused-vars @@ -20,12 +24,14 @@ export async function POST(req: NextRequest) { const newYamlFilePath = path.join(SKILLS_DIR, filePath, 'qna.yaml'); const newAttributionFilePath = path.join(SKILLS_DIR, filePath, 'attribution.txt'); - // Prepare file content - const yamlString = yaml.dump(content); + const skillData = yaml.load(content) as SkillYamlData; + const attributionData = attribution as AttributionData; + + const yamlString = dumpYaml(skillData); const attributionString = ` -Title of work: ${attribution.title_of_work} -License of the work: ${attribution.license_of_the_work} -Creator names: ${attribution.creator_names} +Title of work: ${attributionData.title_of_work} +License of the work: ${attributionData.license_of_the_work} +Creator names: ${attributionData.creator_names} `; // Initialize the repository if it doesn’t exist @@ -62,9 +68,10 @@ Creator names: ${attribution.creator_names} }); // Respond with success - return NextResponse.json({ message: 'Branch and commit created locally', branch: branchName }, { status: 201 }); + console.log('Skill contribution submitted successfully. Submission name is ', branchName); + return NextResponse.json({ message: 'Skill contribution submitted successfully.', branch: branchName }, { status: 201 }); } catch (error) { console.error('Failed to create local branch and commit:', error); - return NextResponse.json({ error: 'Failed to create local branch and commit' }, { status: 500 }); + return NextResponse.json({ error: 'Failed to submit skill contribution.' }, { status: 500 }); } } diff --git a/src/app/api/native/upload/route.ts b/src/app/api/native/upload/route.ts new file mode 100644 index 00000000..b67a452f --- /dev/null +++ b/src/app/api/native/upload/route.ts @@ -0,0 +1,98 @@ +// src/app/api/native/upload/route.ts +import { NextResponse } from 'next/server'; +import { NextRequest } from 'next/server'; +import * as git from 'isomorphic-git'; +import http from 'isomorphic-git/http/node'; +import path from 'path'; +import fs from 'fs'; + +const LOCAL_TAXONOMY_DOCS_ROOT_DIR = process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_ROOT_DIR || `${process.env.HOME}/.instructlab-ui`; +const TAXONOMY_KNOWLEDGE_DOCS_REPO_URL = 'https://github.com/instructlab-public/taxonomy-knowledge-docs.git'; + +export async function POST(req: NextRequest) { + try { + const body = await req.json(); + const { files } = body; + const docsRepoUrl = await cloneTaxonomyDocsRepo(); + + // If the repository was not cloned, return an error + if (!docsRepoUrl) { + return NextResponse.json({ error: 'Failed to clone taxonomy knowledge docs repository' }, { status: 500 }); + } + + const timestamp = new Date().toISOString().replace(/[-:.]/g, '').replace('T', 'T').slice(0, -1); + const filesWithTimestamp = files.map((file: { fileName: string; fileContent: string }) => { + const [name, extension] = file.fileName.split(/\.(?=[^.]+$)/); + return { + fileName: `${name}-${timestamp}.${extension}`, + fileContent: file.fileContent + }; + }); + + // Write the files to the repository + for (const file of filesWithTimestamp) { + const filePath = path.join(docsRepoUrl, file.fileName); + fs.writeFileSync(filePath, file.fileContent); + } + + // Checkout the main branch + await git.checkout({ fs, dir: docsRepoUrl, ref: 'main' }); + + // Stage the files + await git.add({ fs, dir: docsRepoUrl, filepath: '.' }); + + // Commit the files + const commitSha = await git.commit({ + fs, + dir: docsRepoUrl, + author: { name: 'instructlab-ui', email: 'ui@instructlab.ai' }, + message: `Add files: ${files + .map((file: { fileName: string; fileContent: string }) => file.fileName) + .join(', ')}\n\nSigned-off-by: ui@instructlab.ai` + }); + + return NextResponse.json( + { + repoUrl: docsRepoUrl, + commitSha, + documentNames: filesWithTimestamp.map((file: { fileName: string }) => file.fileName), + prUrl: '' + }, + { status: 201 } + ); + } catch (error) { + console.error('Failed to upload documents:', error); + return NextResponse.json({ error: 'Failed to upload documents' }, { status: 500 }); + } +} + +async function cloneTaxonomyDocsRepo() { + const taxonomyDocsDirectoryPath = path.join(LOCAL_TAXONOMY_DOCS_ROOT_DIR, '/taxonomy-knowledge-docs'); + console.log(`Cloning taxonomy docs repository to ${taxonomyDocsDirectoryPath}...`); + + if (fs.existsSync(taxonomyDocsDirectoryPath)) { + console.log(`Using existing taxonomy knowledge docs repository at ${taxonomyDocsDirectoryPath}.`); + return taxonomyDocsDirectoryPath; + } else { + console.log(`Taxonomy knowledge docs repository not found at ${taxonomyDocsDirectoryPath}. Cloning...`); + } + + try { + await git.clone({ + fs, + http, + dir: taxonomyDocsDirectoryPath, + url: TAXONOMY_KNOWLEDGE_DOCS_REPO_URL, + singleBranch: true, + depth: 1 + }); + + // Include the full path in the response for client display + console.log(`Repository cloned successfully to ${taxonomyDocsDirectoryPath}.`); + return taxonomyDocsDirectoryPath; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'; + console.error(`Failed to clone taxonomy docs repository: ${errorMessage}`); + return null; + } +} diff --git a/src/app/api/tree/route.ts b/src/app/api/tree/route.ts index a3d14077..7af32894 100644 --- a/src/app/api/tree/route.ts +++ b/src/app/api/tree/route.ts @@ -2,18 +2,14 @@ import axios from 'axios'; import { NextRequest, NextResponse } from 'next/server'; -const DEPLOYMENT = process.env.IL_UI_DEPLOYMENT!; -const EXPERIMENTAL_FEATURES = process.env.NEXT_PUBLIC_EXPERIMENTAL_FEATURES || ''; +const PATH_SERVICE_URL = process.env.IL_PATH_SERVICE_URL || 'http://pathservice:4000/tree/'; export async function POST(req: NextRequest) { const body = await req.json(); const { root_path, dir_name } = body; try { - let apiBaseUrl = 'http://pathservice:4000/tree/'; - if (DEPLOYMENT === 'dev' && EXPERIMENTAL_FEATURES !== 'true') { - apiBaseUrl = 'http://localhost:4000/tree/'; - } + const apiBaseUrl = PATH_SERVICE_URL; const response = await axios.get(apiBaseUrl + root_path, { params: { dir_name: dir_name } }); diff --git a/src/app/api/upload/route.ts b/src/app/api/upload/route.ts index b1e2dc78..83297637 100644 --- a/src/app/api/upload/route.ts +++ b/src/app/api/upload/route.ts @@ -4,7 +4,7 @@ import { getToken } from 'next-auth/jwt'; import { NextRequest } from 'next/server'; const GITHUB_API_URL = 'https://api.github.com'; -const TAXONOMY_DOCUMENTS_REPO = process.env.TAXONOMY_DOCUMENTS_REPO!; +const TAXONOMY_DOCUMENTS_REPO = process.env.NEXT_PUBLIC_TAXONOMY_DOCUMENTS_REPO!; const BASE_BRANCH = 'main'; export async function POST(req: NextRequest) { diff --git a/src/app/contribute/knowledge/page.tsx b/src/app/contribute/knowledge/page.tsx index cab86364..9caebe86 100644 --- a/src/app/contribute/knowledge/page.tsx +++ b/src/app/contribute/knowledge/page.tsx @@ -1,14 +1,23 @@ // src/app/contribute/knowledge/page.tsx -import * as React from 'react'; -import { AppLayout } from '../../../components/AppLayout'; -import { KnowledgeForm } from '../../../components/Contribute/Knowledge'; - -const KnowledgeFormPage: React.FC = () => { - return ( - - - - ); +'use client'; +import { AppLayout } from '@/components/AppLayout'; +import { KnowledgeFormGithub } from '@/components/Contribute/Knowledge/Github/index'; +import KnowledgeFormNative from '@/components/Contribute/Knowledge/Native/index'; +import { useEffect, useState } from 'react'; + +const KnowledgeFormPage: React.FunctionComponent = () => { + const [deploymentType, setDeploymentType] = useState(); + + useEffect(() => { + const getEnvVariables = async () => { + const res = await fetch('/api/envConfig'); + const envConfig = await res.json(); + setDeploymentType(envConfig.DEPLOYMENT_TYPE); + }; + getEnvVariables(); + }, []); + + return {deploymentType === 'native' ? : }; }; export default KnowledgeFormPage; diff --git a/src/app/contribute/skill/page.tsx b/src/app/contribute/skill/page.tsx index a2b5d3bc..b4d31d35 100644 --- a/src/app/contribute/skill/page.tsx +++ b/src/app/contribute/skill/page.tsx @@ -1,14 +1,23 @@ // src/app/contribute/skill/page.tsx -import * as React from 'react'; -import { AppLayout } from '../../../components/AppLayout'; -import { SkillForm } from '../../../components/Contribute/Skill'; - -const SkillFormPage: React.FC = () => { - return ( - - - - ); +'use client'; +import { AppLayout } from '@/components/AppLayout'; +import { SkillFormGithub } from '@/components/Contribute/Skill/Github/index'; +import { SkillFormNative } from '@/components/Contribute/Skill/Native/index'; +import { useEffect, useState } from 'react'; + +const SkillFormPage: React.FunctionComponent = () => { + const [deploymentType, setDeploymentType] = useState(); + + useEffect(() => { + const getEnvVariables = async () => { + const res = await fetch('/api/envConfig'); + const envConfig = await res.json(); + setDeploymentType(envConfig.DEPLOYMENT_TYPE); + }; + getEnvVariables(); + }, []); + + return {deploymentType === 'native' ? : }; }; export default SkillFormPage; diff --git a/src/app/dashboard/page.tsx b/src/app/dashboard/page.tsx index 646f1dfd..ee28b27a 100644 --- a/src/app/dashboard/page.tsx +++ b/src/app/dashboard/page.tsx @@ -1,17 +1,25 @@ -// src/app/page.tsx +// src/app/dashboard/page.tsx 'use client'; -import * as React from 'react'; import '@patternfly/react-core/dist/styles/base.css'; import { AppLayout } from '@/components/AppLayout'; -import { Index } from '@/components/Dashboard'; +import { DashboardGithub } from '@/components/Dashboard/Github/dashboard'; +import { DashboardNative } from '@/components/Dashboard/Native/dashboard'; +import { useEffect, useState } from 'react'; const Home: React.FunctionComponent = () => { - return ( - - - - ); + const [deploymentType, setDeploymentType] = useState(); + + useEffect(() => { + const getEnvVariables = async () => { + const res = await fetch('/api/envConfig'); + const envConfig = await res.json(); + setDeploymentType(envConfig.DEPLOYMENT_TYPE); + }; + getEnvVariables(); + }, []); + + return {deploymentType === 'native' ? : }; }; export default Home; diff --git a/src/app/experimental/chat-eval/page.tsx b/src/app/experimental/chat-eval/page.tsx new file mode 100644 index 00000000..5f408703 --- /dev/null +++ b/src/app/experimental/chat-eval/page.tsx @@ -0,0 +1,17 @@ +// src/app/experimental/chat-eval/page.tsx +'use client'; + +import * as React from 'react'; +import '@patternfly/react-core/dist/styles/base.css'; +import { AppLayout } from '@/components/AppLayout'; +import ChatModelEval from '@/components/Experimental/ChatEval/ChatEval'; + +const ChatEval: React.FunctionComponent = () => { + return ( + + + + ); +}; + +export default ChatEval; diff --git a/src/app/experimental/contribute-local/configuration-local/page.tsx b/src/app/experimental/contribute-local/configuration-local/page.tsx deleted file mode 100644 index c7550df8..00000000 --- a/src/app/experimental/contribute-local/configuration-local/page.tsx +++ /dev/null @@ -1,14 +0,0 @@ -// src/app/experimental/contribute-local/clone-repo/page.tsx -import * as React from 'react'; -import { AppLayout } from '@/components/AppLayout'; -import CloneRepoLocal from '@/components/Experimental/CloneRepoLocal/CloneRepoLocal'; - -const CloneRepoPage: React.FC = () => { - return ( - - - - ); -}; - -export default CloneRepoPage; diff --git a/src/app/experimental/contribute-local/knowledge/page.tsx b/src/app/experimental/contribute-local/knowledge/page.tsx deleted file mode 100644 index dc1a4cf2..00000000 --- a/src/app/experimental/contribute-local/knowledge/page.tsx +++ /dev/null @@ -1,14 +0,0 @@ -// src/app/experimental/contribute-local/knowledge/page.tsx -import * as React from 'react'; -import { AppLayout } from '@/components/AppLayout'; -import { KnowledgeFormLocal } from '@/components/Experimental/ContributeLocal/Knowledge'; - -const KnowledgeFormLocalPage: React.FC = () => { - return ( - - - - ); -}; - -export default KnowledgeFormLocalPage; diff --git a/src/app/experimental/contribute-local/skill/page.tsx b/src/app/experimental/contribute-local/skill/page.tsx deleted file mode 100644 index 9bcdc347..00000000 --- a/src/app/experimental/contribute-local/skill/page.tsx +++ /dev/null @@ -1,14 +0,0 @@ -// src/app/experimental/contribute-local/skill/page.tsx -import * as React from 'react'; -import { AppLayout } from '@/components/AppLayout'; -import SkillFormLocal from '@/components/Experimental/ContributeLocal/Skill'; - -const SkillFormPageLocal: React.FC = () => { - return ( - - - - ); -}; - -export default SkillFormPageLocal; diff --git a/src/app/experimental/dashboard-local/page.tsx b/src/app/experimental/dashboard-local/page.tsx deleted file mode 100644 index e1da680f..00000000 --- a/src/app/experimental/dashboard-local/page.tsx +++ /dev/null @@ -1,17 +0,0 @@ -// src/app/experimental/dashboard-local/page.tsx -'use client'; - -import * as React from 'react'; -import '@patternfly/react-core/dist/styles/base.css'; -import { AppLayout } from '@/components/AppLayout'; -import { DashboardLocal } from '@/components/Experimental/DashboardLocal'; - -const Home: React.FunctionComponent = () => { - return ( - - - - ); -}; - -export default Home; diff --git a/src/app/experimental/fine-tune/page.tsx b/src/app/experimental/fine-tune/page.tsx new file mode 100644 index 00000000..82f74461 --- /dev/null +++ b/src/app/experimental/fine-tune/page.tsx @@ -0,0 +1,17 @@ +// src/app/experimental/fine-tune/page.tsx +'use client'; + +import * as React from 'react'; +import '@patternfly/react-core/dist/styles/base.css'; +import { AppLayout } from '@/components/AppLayout'; +import FineTuning from '@/components/Experimental/FineTuning'; + +const FineTune: React.FunctionComponent = () => { + return ( + + + + ); +}; + +export default FineTune; diff --git a/src/app/login/locallogin.tsx b/src/app/login/devmodelogin.tsx similarity index 96% rename from src/app/login/locallogin.tsx rename to src/app/login/devmodelogin.tsx index f30e1136..21df3eb0 100644 --- a/src/app/login/locallogin.tsx +++ b/src/app/login/devmodelogin.tsx @@ -1,4 +1,4 @@ -// src/app/login/LocalLogin.tsx +// src/app/login/DevModeLogin.tsx import React, { useState } from 'react'; import { signIn } from 'next-auth/react'; import { Grid, GridItem } from '@patternfly/react-core/dist/dynamic/layouts/Grid'; @@ -12,7 +12,7 @@ import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/H import GithubIcon from '@patternfly/react-icons/dist/dynamic/icons/github-icon'; import './githublogin.css'; -const LocalLogin: React.FunctionComponent = () => { +const DevModeLogin: React.FunctionComponent = () => { const [, setShowHelperText] = useState(false); const [username, setUsername] = useState(''); const [isValidUsername, setIsValidUsername] = useState(true); @@ -27,7 +27,7 @@ const LocalLogin: React.FunctionComponent = () => { setIsValidUsername(false); setIsValidPassword(false); } else { - window.location.href = '/'; + window.location.href = '/dashboard'; } }; @@ -40,7 +40,7 @@ const LocalLogin: React.FunctionComponent = () => { }; const handleGitHubLogin = () => { - signIn('github', { callbackUrl: '/' }); + signIn('github', { callbackUrl: '/dashboard' }); }; return ( @@ -158,4 +158,4 @@ const LocalLogin: React.FunctionComponent = () => { ); }; -export default LocalLogin; +export default DevModeLogin; diff --git a/src/app/login/githublogin.tsx b/src/app/login/githublogin.tsx index 4b20c9b8..8281b487 100644 --- a/src/app/login/githublogin.tsx +++ b/src/app/login/githublogin.tsx @@ -11,8 +11,8 @@ import { useRouter, useSearchParams } from 'next/navigation'; import { Modal, ModalVariant } from '@patternfly/react-core/dist/esm/deprecated/components/Modal'; const GithubLogin: React.FC = () => { - const searchParams = useSearchParams(); const router = useRouter(); + const searchParams = useSearchParams(); const [showError, setShowError] = useState(false); const [errorMsg, setErrorMsg] = useState('Something went wrong.'); const [githubUsername, setGithubUsername] = useState(null); diff --git a/src/app/login/nativelogin.tsx b/src/app/login/nativelogin.tsx new file mode 100644 index 00000000..5f0c76e7 --- /dev/null +++ b/src/app/login/nativelogin.tsx @@ -0,0 +1,146 @@ +// src/app/login/NativeLogin.tsx +import React, { useState } from 'react'; +import { signIn } from 'next-auth/react'; +import { Grid, GridItem } from '@patternfly/react-core/dist/dynamic/layouts/Grid'; +import { Content } from '@patternfly/react-core/dist/dynamic/components/Content'; +import { Form } from '@patternfly/react-core/dist/dynamic/components/Form'; +import { FormGroup } from '@patternfly/react-core/dist/dynamic/components/Form'; +import { TextInput } from '@patternfly/react-core/dist/dynamic/components/TextInput'; +import { Button } from '@patternfly/react-core/dist/dynamic/components/Button'; +import { HelperText } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import './githublogin.css'; + +const NativeLogin: React.FunctionComponent = () => { + const [, setShowHelperText] = useState(false); + const [username, setUsername] = useState(''); + const [isValidUsername, setIsValidUsername] = useState(true); + const [password, setPassword] = useState(''); + const [isValidPassword, setIsValidPassword] = useState(true); + + const handleLogin = async (e: React.FormEvent) => { + e.preventDefault(); + const result = await signIn('credentials', { redirect: false, username, password }); + if (result?.error) { + setShowHelperText(true); + setIsValidUsername(false); + setIsValidPassword(false); + } else { + window.location.href = '/dashboard'; + } + }; + + const handleUsernameChange = (_event: React.FormEvent, value: string) => { + setUsername(value); + }; + + const handlePasswordChange = (_event: React.FormEvent, value: string) => { + setPassword(value); + }; + + return ( +
+ + + + + Login locally with a username and password or via GitHub OAuth + + + + + Join the novel, community-based movement to create truly open-source LLMs + + +
+
+ + + {!isValidUsername && ( + + Invalid Username + + )} + + + + {!isValidPassword && ( + + Invalid password + + )} + + +
+
+ + + + GitHub + {' '} + |{' '} + + Collaborate + {' '} + |{' '} + + Code Of Conduct + + + + + Terms of use + {' '} + |{' '} + + Privacy Policy + + + +
+
+
+ ); +}; + +export default NativeLogin; diff --git a/src/app/login/page.tsx b/src/app/login/page.tsx index cf5e59ce..85feca05 100644 --- a/src/app/login/page.tsx +++ b/src/app/login/page.tsx @@ -1,34 +1,43 @@ // src/app/login/page.tsx 'use client'; -import React, { useState, useEffect } from 'react'; +import React, { useState, useEffect, Suspense } from 'react'; import './githublogin.css'; -import LocalLogin from '@/app/login/locallogin'; +import NativeLogin from '@/app/login/nativelogin'; import GithubLogin from '@/app/login/githublogin'; +import DevModeLogin from './devmodelogin'; const Login: React.FunctionComponent = () => { - const [isProd, setIsProd] = useState(null); + const [deploymentType, setDeploymentType] = useState(); + const [isDevModeEnabled, setIsDevModeEnabled] = useState(false); useEffect(() => { const chooseLoginPage = async () => { try { const res = await fetch('/api/envConfig'); const envConfig = await res.json(); - setIsProd(envConfig.DEPLOYMENT_TYPE !== 'dev'); + setDeploymentType(envConfig.DEPLOYMENT_TYPE); + setIsDevModeEnabled(envConfig.ENABLE_DEV_MODE === 'true'); } catch (error) { console.error('Error fetching environment config:', error); - setIsProd(true); + setDeploymentType('github'); } }; chooseLoginPage(); }, []); - if (isProd === null) { + if (isDevModeEnabled) { + return ; + } + if (deploymentType === 'native') { // Render a loading indicator or null while determining the environment - return null; + return ; } - - return isProd ? : ; + return ( + + + + ); }; export default Login; diff --git a/src/app/page.tsx b/src/app/page.tsx index 6853aa0b..75a82eda 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -1,11 +1,11 @@ // src/app/page.tsx 'use client'; +import { DashboardGithub } from '@/components/Dashboard/Github/dashboard'; import { GithubAccessPopup } from '@/components/GithubAccessPopup'; import * as React from 'react'; import { useState } from 'react'; import { AppLayout } from '../components/AppLayout'; -import { Index } from '../components/Dashboard'; const HomePage: React.FC = () => { const [isWarningConditionAccepted, setIsWarningConditionAccepted] = useState(false); @@ -19,7 +19,7 @@ const HomePage: React.FC = () => { return ( - {isWarningConditionAccepted && } + {isWarningConditionAccepted && } ); }; diff --git a/src/components/AppLayout.tsx b/src/components/AppLayout.tsx index 3023e6ce..c25e4a75 100644 --- a/src/components/AppLayout.tsx +++ b/src/components/AppLayout.tsx @@ -21,11 +21,13 @@ import { PageSidebar } from '@patternfly/react-core/dist/dynamic/components/Page import { PageSidebarBody } from '@patternfly/react-core/dist/dynamic/components/Page'; import { SkipToContent } from '@patternfly/react-core/dist/dynamic/components/SkipToContent'; import { Spinner } from '@patternfly/react-core/dist/dynamic/components/Spinner'; +import { Bullseye } from '@patternfly/react-core/dist/dynamic/layouts/Bullseye'; import UserMenu from './UserMenu/UserMenu'; import { useSession } from 'next-auth/react'; // import { useTheme } from '../context/ThemeContext'; import { useState } from 'react'; - +import '@/components/styles/globals.scss'; +// import '@/components/app.scss'; interface IAppLayout { children: React.ReactNode; } @@ -63,21 +65,17 @@ const AppLayout: React.FunctionComponent = ({ children }) => { }, [session, status, pathname, router]); if (status === 'loading') { - return ; + return ( + + + + ); } if (!session) { return null; // Return nothing if not authenticated to avoid flicker } - //const isExperimentalEnabled = process.env.NEXT_PUBLIC_EXPERIMENTAL_FEATURES === 'true'; - - // Only log if experimental features are enabled - if (isExperimentalEnabled) { - console.log('Is Experimental Enabled:', isExperimentalEnabled); - console.log('Environment Variable:', process.env.NEXT_PUBLIC_EXPERIMENTAL_FEATURES); - } - const routes = [ { path: '/dashboard', label: 'Dashboard' }, { @@ -100,10 +98,8 @@ const AppLayout: React.FunctionComponent = ({ children }) => { path: '/experimental', label: 'Experimental Features', children: [ - { path: '/experimental/dashboard-local/', label: 'Local Dashboard' }, - { path: '/experimental/contribute-local/skill/', label: 'Local Skill' }, - { path: '/experimental/contribute-local/knowledge/', label: 'Local Knowledge' }, - { path: '/experimental/contribute-local/configuration-local/', label: 'Local Configuration' } + { path: '/experimental/fine-tune/', label: 'Fine-tuning' }, + { path: '/experimental/chat-eval/', label: 'Model Chat Eval' } ] } ].filter(Boolean) as Route[]; diff --git a/src/components/Contribute/EditKnowledge/EditKnowledge.tsx b/src/components/Contribute/EditKnowledge/EditKnowledge.tsx index b37a35cf..3a8a2639 100644 --- a/src/components/Contribute/EditKnowledge/EditKnowledge.tsx +++ b/src/components/Contribute/EditKnowledge/EditKnowledge.tsx @@ -8,11 +8,12 @@ import { KnowledgeSchemaVersion } from '@/types/const'; import { fetchPullRequest, fetchFileContent, fetchPullRequestFiles } from '@/utils/github'; import yaml from 'js-yaml'; import axios from 'axios'; -import KnowledgeForm, { KnowledgeEditFormData, KnowledgeFormData, QuestionAndAnswerPair, SeedExample } from '@/components/Contribute/Knowledge'; +import { KnowledgeEditFormData, KnowledgeFormData, QuestionAndAnswerPair, KnowledgeSeedExample } from '@/types'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; import { useEffect, useState } from 'react'; import { Modal, ModalVariant } from '@patternfly/react-core/dist/esm/deprecated/components/Modal/Modal'; import { useRouter } from 'next/navigation'; +import KnowledgeFormGithub from '../Knowledge/Github'; interface EditKnowledgeClientComponentProps { prNumber: number; @@ -83,10 +84,10 @@ const EditKnowledge: React.FC = ({ prNumber } knowledgeExistingFormData.knowledgeDocumentCommit = yamlData.document.commit; knowledgeExistingFormData.documentName = yamlData.document.patterns.join(', '); - const seedExamples: SeedExample[] = []; + const seedExamples: KnowledgeSeedExample[] = []; yamlData.seed_examples.forEach((seed, index) => { // iterate through questions_and_answers and create a new object for each - const example: SeedExample = { + const example: KnowledgeSeedExample = { immutable: index < 5 ? true : false, isExpanded: true, context: seed.context, @@ -175,7 +176,7 @@ const EditKnowledge: React.FC = ({ prNumber } return ( // - + // ); }; diff --git a/src/components/Contribute/EditSkill/EditSkill.tsx b/src/components/Contribute/EditSkill/EditSkill.tsx index f98ae791..3212ae4f 100644 --- a/src/components/Contribute/EditSkill/EditSkill.tsx +++ b/src/components/Contribute/EditSkill/EditSkill.tsx @@ -7,11 +7,11 @@ import { useEffect, useState } from 'react'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; import { Modal, ModalVariant } from '@patternfly/react-core/dist/esm/deprecated/components/Modal/Modal'; import { useRouter } from 'next/navigation'; -import SkillForm, { SkillEditFormData, SkillFormData, SeedExample } from '@/components/Contribute/Skill'; +import SkillFormGithub, { SkillEditFormData } from '@/components/Contribute/Skill/Github'; import { fetchPullRequest, fetchFileContent, fetchPullRequestFiles } from '@/utils/github'; import yaml from 'js-yaml'; import axios from 'axios'; -import { SkillYamlData, AttributionData, PullRequestFile } from '@/types'; +import { SkillYamlData, AttributionData, PullRequestFile, SkillFormData, SkillSeedExample } from '@/types'; import { SkillSchemaVersion } from '@/types/const'; interface EditSkillClientComponentProps { @@ -72,9 +72,9 @@ const EditSkill: React.FC = ({ prNumber }) => { // Populate the form fields with YAML data skillExistingFormData.documentOutline = yamlData.task_description; - const seedExamples: SeedExample[] = []; + const seedExamples: SkillSeedExample[] = []; yamlData.seed_examples.forEach((seed, index) => { - const example: SeedExample = { + const example: SkillSeedExample = { immutable: index < 5 ? true : false, isExpanded: true, context: seed.context || '', @@ -147,7 +147,7 @@ const EditSkill: React.FC = ({ prNumber }) => { ); } - return ; + return ; }; export default EditSkill; diff --git a/src/components/Contribute/Knowledge/AttributionInformation/AttributionInformation.tsx b/src/components/Contribute/Knowledge/AttributionInformation/AttributionInformation.tsx index 87f97851..b606a3ce 100644 --- a/src/components/Contribute/Knowledge/AttributionInformation/AttributionInformation.tsx +++ b/src/components/Contribute/Knowledge/AttributionInformation/AttributionInformation.tsx @@ -5,8 +5,8 @@ import { HelperText } from '@patternfly/react-core/dist/dynamic/components/Helpe import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; import ExclamationCircleIcon from '@patternfly/react-icons/dist/dynamic/icons/exclamation-circle-icon'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; -import { KnowledgeFormData } from '..'; import { checkKnowledgeFormCompletion } from '../validation'; +import { KnowledgeFormData } from '@/types'; interface Props { reset: boolean; diff --git a/src/components/Contribute/Knowledge/AutoFill.ts b/src/components/Contribute/Knowledge/AutoFill.ts index 568db39b..7e05911b 100644 --- a/src/components/Contribute/Knowledge/AutoFill.ts +++ b/src/components/Contribute/Knowledge/AutoFill.ts @@ -1,4 +1,4 @@ -import { KnowledgeFormData, QuestionAndAnswerPair, SeedExample } from '.'; +import { KnowledgeFormData, KnowledgeSeedExample, QuestionAndAnswerPair } from '@/types'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; const questionAndAnswerPairs1: QuestionAndAnswerPair[] = [ @@ -127,7 +127,7 @@ const questionAndAnswerPairs5: QuestionAndAnswerPair[] = [ } ]; -const seedExamples: SeedExample[] = [ +const seedExamples: KnowledgeSeedExample[] = [ { immutable: true, isExpanded: true, @@ -238,12 +238,12 @@ const seedExamples: SeedExample[] = [ { immutable: true, isExpanded: true, - context: `Phoenix is the radiant of two annual meteor showers. The Phoenicids, - also known as the December Phoenicids, were first observed on 3 December 1887. - The shower was particularly intense in December 1956, and is thought related - to the breakup of the short-period comet 289P/Blanpain. It peaks around 4–5 - December, though is not seen every year.[58] A very minor meteor shower peaks - around July 14 with around one meteor an hour, though meteors can be seen + context: `Phoenix is the radiant of two annual meteor showers. The Phoenicids, + also known as the December Phoenicids, were first observed on 3 December 1887. + The shower was particularly intense in December 1956, and is thought related + to the breakup of the short-period comet 289P/Blanpain. It peaks around 4–5 + December, though is not seen every year.[58] A very minor meteor shower peaks + around July 14 with around one meteor an hour, though meteors can be seen anytime from July 3 to 18; this shower is referred to as the July Phoenicids.[59]`, isContextValid: ValidatedOptions.success, questionAndAnswers: questionAndAnswerPairs5 diff --git a/src/components/Contribute/Knowledge/DownloadAttribution/DownloadAttribution.tsx b/src/components/Contribute/Knowledge/DownloadAttribution/DownloadAttribution.tsx index 3f0fe267..d61394fe 100644 --- a/src/components/Contribute/Knowledge/DownloadAttribution/DownloadAttribution.tsx +++ b/src/components/Contribute/Knowledge/DownloadAttribution/DownloadAttribution.tsx @@ -1,7 +1,8 @@ import React from 'react'; -import { KnowledgeFormData } from '..'; import { DropdownItem } from '@patternfly/react-core/dist/esm/components/Dropdown/DropdownItem'; +import { Icon } from '@patternfly/react-core/dist/dynamic/components/Icon'; import FileIcon from '@patternfly/react-icons/dist/esm/icons/file-icon'; +import { KnowledgeFormData } from '@/types'; interface Props { knowledgeFormData: KnowledgeFormData; @@ -29,8 +30,18 @@ const DownloadAttribution: React.FC = ({ knowledgeFormData }) => { }; return ( - - Attribution File + + + + } + > + {' '} + Attribution File ); }; diff --git a/src/components/Contribute/Knowledge/DownloadDropdown/DownloadDropdown.tsx b/src/components/Contribute/Knowledge/DownloadDropdown/DownloadDropdown.tsx index c7aa0dca..2dc99c3f 100644 --- a/src/components/Contribute/Knowledge/DownloadDropdown/DownloadDropdown.tsx +++ b/src/components/Contribute/Knowledge/DownloadDropdown/DownloadDropdown.tsx @@ -1,11 +1,12 @@ import React from 'react'; import { Dropdown } from '@patternfly/react-core/dist/dynamic/components/Dropdown'; import { DropdownList } from '@patternfly/react-core/dist/dynamic/components/Dropdown'; +import { Icon } from '@patternfly/react-core/dist/dynamic/components/Icon'; import { MenuToggle, MenuToggleElement } from '@patternfly/react-core/dist/dynamic/components/MenuToggle'; import DownloadYaml from '../DownloadYaml/DownloadYaml'; import DownloadAttribution from '../DownloadAttribution/DownloadAttribution'; -import { KnowledgeFormData } from '..'; import DownloadIcon from '@patternfly/react-icons/dist/esm/icons/download-icon'; +import { KnowledgeFormData } from '@/types'; interface Props { knowledgeFormData: KnowledgeFormData; @@ -30,8 +31,18 @@ export const DownloadDropdown: React.FunctionComponent = ({ knowledgeForm onSelect={onSelect} onOpenChange={(isOpen: boolean) => setIsOpen(isOpen)} toggle={(toggleRef: React.Ref) => ( - - Download + + {' '} + + } + > + {' '} + Download )} ouiaId="DownloadDropdown" diff --git a/src/components/Contribute/Knowledge/DownloadYaml/DownloadYaml.tsx b/src/components/Contribute/Knowledge/DownloadYaml/DownloadYaml.tsx index c5990b9e..f39b6c2a 100644 --- a/src/components/Contribute/Knowledge/DownloadYaml/DownloadYaml.tsx +++ b/src/components/Contribute/Knowledge/DownloadYaml/DownloadYaml.tsx @@ -1,8 +1,8 @@ import React from 'react'; -import { KnowledgeFormData } from '..'; -import { KnowledgeYamlData } from '@/types'; +import { KnowledgeFormData, KnowledgeYamlData } from '@/types'; import { KnowledgeSchemaVersion } from '@/types/const'; import { dumpYaml } from '@/utils/yamlConfig'; +import { Icon } from '@patternfly/react-core/dist/dynamic/components/Icon'; import { DropdownItem } from '@patternfly/react-core/dist/esm/components/Dropdown/DropdownItem'; import CodeIcon from '@patternfly/react-icons/dist/esm/icons/code-icon'; @@ -43,8 +43,18 @@ const DownloadYaml: React.FC = ({ knowledgeFormData, githubUsername }) => document.body.removeChild(a); }; return ( - - Yaml File + + + + } + > + {' '} + YAML File ); }; diff --git a/src/components/Contribute/Knowledge/DocumentInformation/DocumentInformation.tsx b/src/components/Contribute/Knowledge/Github/DocumentInformation/DocumentInformation.tsx similarity index 81% rename from src/components/Contribute/Knowledge/DocumentInformation/DocumentInformation.tsx rename to src/components/Contribute/Knowledge/Github/DocumentInformation/DocumentInformation.tsx index 53f249d3..786c441c 100644 --- a/src/components/Contribute/Knowledge/DocumentInformation/DocumentInformation.tsx +++ b/src/components/Contribute/Knowledge/Github/DocumentInformation/DocumentInformation.tsx @@ -2,15 +2,15 @@ import React, { useEffect, useState } from 'react'; import { FormFieldGroupHeader, FormGroup, FormHelperText } from '@patternfly/react-core/dist/dynamic/components/Form'; import { Button } from '@patternfly/react-core/dist/dynamic/components/Button'; import { TextInput } from '@patternfly/react-core/dist/dynamic/components/TextInput'; -import { UploadFile } from './../UploadFile'; -import { Alert, AlertActionLink, AlertActionCloseButton } from '@patternfly/react-core/dist/dynamic/components/Alert'; +import { UploadFile } from '../../UploadFile'; +import { Alert, AlertActionLink, AlertActionCloseButton, AlertGroup } from '@patternfly/react-core/dist/dynamic/components/Alert'; import { HelperText } from '@patternfly/react-core/dist/dynamic/components/HelperText'; import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; import ExclamationCircleIcon from '@patternfly/react-icons/dist/dynamic/icons/exclamation-circle-icon'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; -import { KnowledgeFormData } from '..'; -import { checkKnowledgeFormCompletion } from '../validation'; +import { checkKnowledgeFormCompletion } from '../../validation'; import { Modal, ModalVariant } from '@patternfly/react-core/dist/esm/deprecated/components/Modal/Modal'; +import { KnowledgeFormData } from '@/types'; interface Props { reset: boolean; @@ -41,18 +41,18 @@ const DocumentInformation: React.FC = ({ const [uploadedFiles, setUploadedFiles] = useState([]); const [isModalOpen, setIsModalOpen] = useState(false); const [modalText, setModalText] = useState(); - - const [successAlertTitle, setSuccessAlertTitle] = useState(); - const [successAlertMessage, setSuccessAlertMessage] = useState(); - const [successAlertLink, setSuccessAlertLink] = useState(); - - const [failureAlertTitle, setFailureAlertTitle] = useState(); - const [failureAlertMessage, setFailureAlertMessage] = useState(); - + const [alertInfo, setAlertInfo] = useState(); const [validRepo, setValidRepo] = useState(); const [validCommit, setValidCommit] = useState(); const [validDocumentName, setValidDocumentName] = useState(); + interface AlertInfo { + type: 'success' | 'danger' | 'info'; + title: string; + message: string; + link?: string; + } + useEffect(() => { setValidRepo(ValidatedOptions.default); setValidCommit(ValidatedOptions.default); @@ -116,6 +116,13 @@ const DocumentInformation: React.FC = ({ const handleDocumentUpload = async () => { if (uploadedFiles.length > 0) { + const alertInfo: AlertInfo = { + type: 'info', + title: 'Document upload(s) in progress!', + message: 'Document upload(s) is in progress. You will be notified once the upload successfully completes.' + }; + setAlertInfo(alertInfo); + const fileContents: { fileName: string; fileContent: string }[] = []; await Promise.all( @@ -144,9 +151,13 @@ const DocumentInformation: React.FC = ({ }); if (!response.ok) { - setFailureAlertTitle('Failed to upload document'); - setFailureAlertMessage(`This upload failed. ${response.statusText}`); - new Error(response.statusText || 'Failed to upload document'); + const alertInfo: AlertInfo = { + type: 'danger', + title: 'Document upload failed', + message: `Upload failed for the added documents. ${response.statusText}` + }; + setAlertInfo(alertInfo); + new Error(response.statusText || 'Document upload failed'); return; } @@ -156,22 +167,21 @@ const DocumentInformation: React.FC = ({ setKnowledgeDocumentCommit(result.commitSha); setDocumentName(result.documentNames.join(', ')); // Populate the patterns field console.log('Files uploaded:', result.documentNames); - setSuccessAlertTitle('Document uploaded successfully!'); - setSuccessAlertMessage('Documents have been uploaded to your repo to be referenced in the knowledge submission.'); - setSuccessAlertLink(result.prUrl); + const alertInfo: AlertInfo = { + type: 'success', + title: 'Document uploaded successfully!', + message: 'Documents have been uploaded to your repo to be referenced in the knowledge submission.' + }; + if (result.prUrl !== '') { + alertInfo.link = result.prUrl; + } + setAlertInfo(alertInfo); } } }; const onCloseSuccessAlert = () => { - setSuccessAlertTitle(undefined); - setSuccessAlertMessage(undefined); - setSuccessAlertLink(undefined); - }; - - const onCloseFailureAlert = () => { - setFailureAlertTitle(undefined); - setFailureAlertMessage(undefined); + setAlertInfo(undefined); }; const handleAutomaticUpload = () => { @@ -209,17 +219,7 @@ const DocumentInformation: React.FC = ({ return (
- - Document Information * -

- ), - id: 'doc-info-id' - }} - titleDescription="Add the relevant document's information" - /> +
); diff --git a/src/components/Contribute/Knowledge/Submit/Submit.tsx b/src/components/Contribute/Knowledge/Github/Submit/Submit.tsx similarity index 95% rename from src/components/Contribute/Knowledge/Submit/Submit.tsx rename to src/components/Contribute/Knowledge/Github/Submit/Submit.tsx index d0c24bac..9d7416df 100644 --- a/src/components/Contribute/Knowledge/Submit/Submit.tsx +++ b/src/components/Contribute/Knowledge/Github/Submit/Submit.tsx @@ -1,10 +1,10 @@ import React from 'react'; import { Button } from '@patternfly/react-core/dist/dynamic/components/Button'; -import { ActionGroupAlertContent, KnowledgeFormData } from '..'; -import { AttributionData, KnowledgeYamlData } from '@/types'; +import { ActionGroupAlertContent } from '..'; +import { AttributionData, KnowledgeFormData, KnowledgeYamlData } from '@/types'; import { KnowledgeSchemaVersion } from '@/types/const'; import { dumpYaml } from '@/utils/yamlConfig'; -import { validateFields } from '../validation'; +import { validateFields } from '../../validation'; interface Props { disableAction: boolean; diff --git a/src/components/Contribute/Knowledge/Update/Update.tsx b/src/components/Contribute/Knowledge/Github/Update/Update.tsx similarity index 97% rename from src/components/Contribute/Knowledge/Update/Update.tsx rename to src/components/Contribute/Knowledge/Github/Update/Update.tsx index 4824c068..8961e2b7 100644 --- a/src/components/Contribute/Knowledge/Update/Update.tsx +++ b/src/components/Contribute/Knowledge/Github/Update/Update.tsx @@ -1,10 +1,10 @@ import React from 'react'; import { Button } from '@patternfly/react-core/dist/dynamic/components/Button'; -import { ActionGroupAlertContent, KnowledgeFormData } from '..'; -import { AttributionData, KnowledgeYamlData, PullRequestFile } from '@/types'; +import { ActionGroupAlertContent } from '..'; +import { AttributionData, KnowledgeFormData, KnowledgeYamlData, PullRequestFile } from '@/types'; import { KnowledgeSchemaVersion } from '@/types/const'; import { dumpYaml } from '@/utils/yamlConfig'; -import { validateFields } from '../validation'; +import { validateFields } from '../../validation'; import { amendCommit, getGitHubUsername, updatePullRequest } from '@/utils/github'; import { useSession } from 'next-auth/react'; import { useRouter } from 'next/navigation'; diff --git a/src/components/Experimental/ContributeLocal/Knowledge/index.tsx b/src/components/Contribute/Knowledge/Github/index.tsx similarity index 80% rename from src/components/Experimental/ContributeLocal/Knowledge/index.tsx rename to src/components/Contribute/Knowledge/Github/index.tsx index 7a7d03db..96f9b30d 100644 --- a/src/components/Experimental/ContributeLocal/Knowledge/index.tsx +++ b/src/components/Contribute/Knowledge/Github/index.tsx @@ -1,8 +1,8 @@ -// src/components/Experimental/ContributeLocal/Knowledge/index.tsx +// src/components/Contribute/Knowledge/Github/index.tsx 'use client'; import React, { useEffect, useMemo, useState } from 'react'; -import './knowledge.css'; -import { Alert, AlertActionCloseButton } from '@patternfly/react-core/dist/dynamic/components/Alert'; +import '../knowledge.css'; +import { Alert, AlertActionCloseButton, AlertGroup } from '@patternfly/react-core/dist/dynamic/components/Alert'; import { ActionGroup } from '@patternfly/react-core/dist/dynamic/components/Form'; import { getGitHubUsername } from '@/utils/github'; import { useSession } from 'next-auth/react'; @@ -10,9 +10,9 @@ import AuthorInformation from '@/components/Contribute/AuthorInformation'; import { FormType } from '@/components/Contribute/AuthorInformation'; import KnowledgeInformation from '@/components/Contribute/Knowledge/KnowledgeInformation/KnowledgeInformation'; import FilePathInformation from '@/components/Contribute/Knowledge/FilePathInformation/FilePathInformation'; -import DocumentInformation from '@/components/Contribute/Knowledge/DocumentInformation/DocumentInformation'; +import DocumentInformation from '@/components/Contribute/Knowledge/Github/DocumentInformation/DocumentInformation'; import AttributionInformation from '@/components/Contribute/Knowledge/AttributionInformation/AttributionInformation'; -import Submit from './SubmitLocal/Submit'; +import Submit from './Submit/Submit'; import { Breadcrumb } from '@patternfly/react-core/dist/dynamic/components/Breadcrumb'; import { BreadcrumbItem } from '@patternfly/react-core/dist/dynamic/components/Breadcrumb'; import { PageBreadcrumb } from '@patternfly/react-core/dist/dynamic/components/Page'; @@ -25,62 +25,18 @@ import { checkKnowledgeFormCompletion } from '@/components/Contribute/Knowledge/ import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; import { DownloadDropdown } from '@/components/Contribute/Knowledge/DownloadDropdown/DownloadDropdown'; import { ViewDropdown } from '@/components/Contribute/Knowledge/ViewDropdown/ViewDropdown'; -import Update from '@/components/Contribute/Knowledge/Update/Update'; -import { PullRequestFile } from '@/types'; +import Update from '@/components/Contribute/Knowledge/Github/Update/Update'; +import { KnowledgeEditFormData, KnowledgeFormData, KnowledgeYamlData, QuestionAndAnswerPair } from '@/types'; import { Button } from '@patternfly/react-core/dist/esm/components/Button/Button'; import { useRouter } from 'next/navigation'; import { autoFillKnowledgeFields } from '@/components/Contribute/Knowledge/AutoFill'; import { Spinner } from '@patternfly/react-core/dist/esm/components/Spinner'; import { Wizard, WizardStep } from '@patternfly/react-core/dist/esm/components/Wizard'; import { Content } from '@patternfly/react-core/dist/dynamic/components/Content'; -import ReviewSubmission from '@/components/Experimental/ReviewSubmission'; - -export interface QuestionAndAnswerPair { - immutable: boolean; - question: string; - isQuestionValid: ValidatedOptions; - questionValidationError?: string; - answer: string; - isAnswerValid: ValidatedOptions; - answerValidationError?: string; -} - -export interface SeedExample { - immutable: boolean; - isExpanded: boolean; - context: string; - isContextValid: ValidatedOptions; - validationError?: string; - questionAndAnswers: QuestionAndAnswerPair[]; -} - -export interface KnowledgeFormData { - email: string; - name: string; - submissionSummary: string; - domain: string; - documentOutline: string; - filePath: string; - seedExamples: SeedExample[]; - knowledgeDocumentRepositoryUrl: string; - knowledgeDocumentCommit: string; - documentName: string; - titleWork: string; - linkWork: string; - revision: string; - licenseWork: string; - creators: string; -} - -export interface KnowledgeEditFormData { - isEditForm: boolean; - knowledgeVersion: number; - pullRequestNumber: number; - branchName: string; - yamlFile: PullRequestFile; - attributionFile: PullRequestFile; - knowledgeFormData: KnowledgeFormData; -} +import ReviewSubmission from '@/components/Contribute/Knowledge/ReviewSubmission'; +import { Flex } from '@patternfly/react-core/dist/esm/layouts/Flex/Flex'; +import { FlexItem } from '@patternfly/react-core/dist/esm/layouts/Flex/FlexItem'; +import { YamlFileUploadModal } from '../../YamlFileUploadModal'; export interface ActionGroupAlertContent { title: string; @@ -95,8 +51,8 @@ export interface KnowledgeFormProps { knowledgeEditFormData?: KnowledgeEditFormData; } -export const KnowledgeFormLocal: React.FunctionComponent = ({ knowledgeEditFormData }) => { - const [deploymentType, setDeploymentType] = useState(); +export const KnowledgeFormGithub: React.FunctionComponent = ({ knowledgeEditFormData }) => { + const [devModeEnabled, setDevModeEnabled] = useState(); const { data: session } = useSession(); const [githubUsername, setGithubUsername] = useState(''); @@ -129,12 +85,13 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const [disableAction, setDisableAction] = useState(true); const [reset, setReset] = useState(false); + const [isModalOpen, setIsModalOpen] = React.useState(false); const router = useRouter(); const [activeStepIndex] = useState(1); - const emptySeedExample: SeedExample = { + const emptySeedExample: KnowledgeSeedExample = { immutable: true, isExpanded: false, context: '', @@ -164,7 +121,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( ] }; - const [seedExamples, setSeedExamples] = useState([ + const [seedExamples, setSeedExamples] = useState([ emptySeedExample, emptySeedExample, emptySeedExample, @@ -176,7 +133,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const getEnvVariables = async () => { const res = await fetch('/api/envConfig'); const envConfig = await res.json(); - setDeploymentType(envConfig.DEPLOYMENT_TYPE); + setDevModeEnabled(envConfig.ENABLE_DEV_MODE === 'true'); }; getEnvVariables(); }, []); @@ -280,7 +237,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const handleContextInputChange = (seedExampleIndex: number, contextValue: string): void => { setSeedExamples( - seedExamples.map((seedExample: SeedExample, index: number) => + seedExamples.map((seedExample: KnowledgeSeedExample, index: number) => index === seedExampleIndex ? { ...seedExample, @@ -292,7 +249,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( }; const handleContextBlur = (seedExampleIndex: number): void => { - const updatedSeedExamples = seedExamples.map((seedExample: SeedExample, index: number): SeedExample => { + const updatedSeedExamples = seedExamples.map((seedExample: KnowledgeSeedExample, index: number): KnowledgeSeedExample => { if (index === seedExampleIndex) { const { msg, status } = validateContext(seedExample.context); return { @@ -308,7 +265,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const handleQuestionInputChange = (seedExampleIndex: number, questionAndAnswerIndex: number, questionValue: string): void => { setSeedExamples( - seedExamples.map((seedExample: SeedExample, index: number) => + seedExamples.map((seedExample: KnowledgeSeedExample, index: number) => index === seedExampleIndex ? { ...seedExample, @@ -328,7 +285,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const handleQuestionBlur = (seedExampleIndex: number, questionAndAnswerIndex: number): void => { setSeedExamples( - seedExamples.map((seedExample: SeedExample, index: number) => + seedExamples.map((seedExample: KnowledgeSeedExample, index: number) => index === seedExampleIndex ? { ...seedExample, @@ -351,7 +308,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const handleAnswerInputChange = (seedExampleIndex: number, questionAndAnswerIndex: number, answerValue: string): void => { setSeedExamples( - seedExamples.map((seedExample: SeedExample, index: number) => + seedExamples.map((seedExample: KnowledgeSeedExample, index: number) => index === seedExampleIndex ? { ...seedExample, @@ -371,7 +328,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( const handleAnswerBlur = (seedExampleIndex: number, questionAndAnswerIndex: number): void => { setSeedExamples( - seedExamples.map((seedExample: SeedExample, index: number) => + seedExamples.map((seedExample: KnowledgeSeedExample, index: number) => index === seedExampleIndex ? { ...seedExample, @@ -443,6 +400,32 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( setSeedExamples(autoFillKnowledgeFields.seedExamples); }; + const yamlSeedExampleToFormSeedExample = ( + yamlSeedExamples: { context: string; questions_and_answers: { question: string; answer: string }[] }[] + ) => { + return yamlSeedExamples.map((yamlSeedExample) => ({ + immutable: true, + isExpanded: false, + context: yamlSeedExample.context ?? '', + isContextValid: ValidatedOptions.default, + questionAndAnswers: yamlSeedExample.questions_and_answers.map((questionAndAnswer) => ({ + question: questionAndAnswer.question ?? '', + answer: questionAndAnswer.answer ?? '' + })) + })) as KnowledgeSeedExample[]; + }; + + const onYamlUploadKnowledgeFillForm = (data: KnowledgeYamlData): void => { + setName(data.created_by ?? ''); + setDocumentOutline(data.document_outline ?? ''); + setSubmissionSummary(data.document_outline ?? ''); + setDomain(data.domain ?? ''); + setKnowledgeDocumentRepositoryUrl(data.document.repo ?? ''); + setKnowledgeDocumentCommit(data.document.commit ?? ''); + setDocumentName(data.document.patterns.join(', ') ?? ''); + setSeedExamples(yamlSeedExampleToFormSeedExample(data.seed_examples)); + }; + const knowledgeFormData: KnowledgeFormData = { email: email, name: name, @@ -591,17 +574,33 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( - - Knowledge Contribution - + + + + Knowledge Contribution + + + + {devModeEnabled && ( + + )} + {' '} + + + - {deploymentType === 'dev' && ( - - )} + {steps.map((step) => ( @@ -612,27 +611,29 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( {actionGroupAlertContent && ( - } - > -

- {actionGroupAlertContent.waitAlert && } - {actionGroupAlertContent.message} -
- {!actionGroupAlertContent.waitAlert && - actionGroupAlertContent.success && - actionGroupAlertContent.url && - actionGroupAlertContent.url.trim().length > 0 && ( - - View your new branch - - )} -

-
+ + } + > +

+ {actionGroupAlertContent.waitAlert && } + {actionGroupAlertContent.message} +
+ {!actionGroupAlertContent.waitAlert && + actionGroupAlertContent.success && + actionGroupAlertContent.url && + actionGroupAlertContent.url.trim().length > 0 && ( + + View your new branch + + )} +

+
+
)} @@ -652,7 +653,7 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( disableAction={disableAction} knowledgeFormData={knowledgeFormData} setActionGroupAlertContent={setActionGroupAlertContent} - email={email} + githubUsername={githubUsername} resetForm={resetForm} /> )} @@ -667,4 +668,4 @@ export const KnowledgeFormLocal: React.FunctionComponent = ( ); }; -export default KnowledgeFormLocal; +export default KnowledgeFormGithub; diff --git a/src/components/Contribute/Knowledge/KnowledgeInformation/KnowledgeInformation.tsx b/src/components/Contribute/Knowledge/KnowledgeInformation/KnowledgeInformation.tsx index 677d72df..4dc0435c 100644 --- a/src/components/Contribute/Knowledge/KnowledgeInformation/KnowledgeInformation.tsx +++ b/src/components/Contribute/Knowledge/KnowledgeInformation/KnowledgeInformation.tsx @@ -6,8 +6,8 @@ import { HelperText } from '@patternfly/react-core/dist/dynamic/components/Helpe import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; import ExclamationCircleIcon from '@patternfly/react-icons/dist/dynamic/icons/exclamation-circle-icon'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; -import { KnowledgeFormData } from '..'; import { checkKnowledgeFormCompletion } from '../validation'; +import { KnowledgeFormData } from '@/types'; interface Props { reset: boolean; diff --git a/src/components/Contribute/Knowledge/KnowledgeQuestionAnswerPairs/KnowledgeQuestionAnswerPairs.tsx b/src/components/Contribute/Knowledge/KnowledgeQuestionAnswerPairs/KnowledgeQuestionAnswerPairs.tsx index e57cc599..e464529d 100644 --- a/src/components/Contribute/Knowledge/KnowledgeQuestionAnswerPairs/KnowledgeQuestionAnswerPairs.tsx +++ b/src/components/Contribute/Knowledge/KnowledgeQuestionAnswerPairs/KnowledgeQuestionAnswerPairs.tsx @@ -2,13 +2,13 @@ import React from 'react'; import { FormFieldGroupHeader, FormGroup, FormHelperText } from '@patternfly/react-core/dist/dynamic/components/Form'; import { TextArea } from '@patternfly/react-core/dist/dynamic/components/TextArea'; import { ExclamationCircleIcon } from '@patternfly/react-icons/dist/dynamic/icons/'; -import { QuestionAndAnswerPair, SeedExample } from '..'; import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; import { HelperText } from '@patternfly/react-core/dist/dynamic/components/HelperText'; import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import { KnowledgeSeedExample, QuestionAndAnswerPair } from '@/types'; interface Props { - seedExample: SeedExample; + seedExample: KnowledgeSeedExample; seedExampleIndex: number; handleContextInputChange: (seedExampleIndex: number, contextValue: string) => void; handleContextBlur: (seedExampleIndex: number) => void; diff --git a/src/components/Contribute/Knowledge/KnowledgeSeedExample/KnowledgeSeedExample.tsx b/src/components/Contribute/Knowledge/KnowledgeSeedExample/KnowledgeSeedExample.tsx index 9649763f..5e984b2b 100644 --- a/src/components/Contribute/Knowledge/KnowledgeSeedExample/KnowledgeSeedExample.tsx +++ b/src/components/Contribute/Knowledge/KnowledgeSeedExample/KnowledgeSeedExample.tsx @@ -3,11 +3,11 @@ import React from 'react'; import { Accordion, AccordionItem, AccordionContent, AccordionToggle } from '@patternfly/react-core/dist/dynamic/components/Accordion'; import { FormFieldGroupHeader } from '@patternfly/react-core/dist/dynamic/components/Form'; import KnowledgeQuestionAnswerPairs from '../KnowledgeQuestionAnswerPairs/KnowledgeQuestionAnswerPairs'; -import { SeedExample } from '..'; +import type { KnowledgeSeedExample } from '@/types'; import ExternalLinkAltIcon from '@patternfly/react-icons/dist/esm/icons/external-link-alt-icon'; interface Props { - seedExamples: SeedExample[]; + seedExamples: KnowledgeSeedExample[]; handleContextInputChange: (seedExampleIndex: number, contextValue: string) => void; handleContextBlur: (seedExampleIndex: number) => void; handleQuestionInputChange: (seedExampleIndex: number, questionAndAnswerIndex: number, questionValue: string) => void; @@ -51,7 +51,7 @@ const KnowledgeSeedExample: React.FC = ({ /> - {seedExamples.map((seedExample: SeedExample, seedExampleIndex: number) => ( + {seedExamples.map((seedExample: KnowledgeSeedExample, seedExampleIndex: number) => ( toggleSeedExampleExpansion(seedExampleIndex)} id={`seed-example-toggle-${seedExampleIndex}`}> Seed Example {seedExampleIndex + 1} {seedExample.immutable && *} diff --git a/src/components/Contribute/Knowledge/Native/DocumentInformation/DocumentInformation.tsx b/src/components/Contribute/Knowledge/Native/DocumentInformation/DocumentInformation.tsx new file mode 100644 index 00000000..3b81185c --- /dev/null +++ b/src/components/Contribute/Knowledge/Native/DocumentInformation/DocumentInformation.tsx @@ -0,0 +1,408 @@ +// src/components/Contribute/Knowledge/Native/DocumentInformation/DocumentInformation.tsx +import React, { useEffect, useState } from 'react'; +import { FormFieldGroupHeader, FormGroup, FormHelperText } from '@patternfly/react-core/dist/dynamic/components/Form'; +import { Button } from '@patternfly/react-core/dist/dynamic/components/Button'; +import { TextInput } from '@patternfly/react-core/dist/dynamic/components/TextInput'; +import { Alert, AlertActionLink, AlertActionCloseButton } from '@patternfly/react-core/dist/dynamic/components/Alert'; +import { HelperText } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import ExclamationCircleIcon from '@patternfly/react-icons/dist/dynamic/icons/exclamation-circle-icon'; +import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; +import { Modal, ModalVariant } from '@patternfly/react-core/dist/esm/deprecated/components/Modal/Modal'; +import { UploadFile } from '@/components/Contribute/Knowledge/UploadFile'; +import { checkKnowledgeFormCompletion } from '@/components/Contribute/Knowledge/validation'; +import { KnowledgeFormData } from '@/types'; + +interface Props { + reset: boolean; + isEditForm?: boolean; + knowledgeFormData: KnowledgeFormData; + setDisableAction: React.Dispatch>; + + knowledgeDocumentRepositoryUrl: string; + setKnowledgeDocumentRepositoryUrl: React.Dispatch>; + + knowledgeDocumentCommit: string; + setKnowledgeDocumentCommit: React.Dispatch>; + + documentName: string; + setDocumentName: React.Dispatch>; +} + +interface AlertInfo { + type: 'success' | 'danger' | 'info'; + title: string; + message: string; + link?: string; +} + +const DocumentInformation: React.FC = ({ + reset, + isEditForm, + knowledgeFormData, + setDisableAction, + knowledgeDocumentRepositoryUrl, + setKnowledgeDocumentRepositoryUrl, + knowledgeDocumentCommit, + setKnowledgeDocumentCommit, + documentName, + setDocumentName +}) => { + const [useFileUpload, setUseFileUpload] = useState(true); + const [uploadedFiles, setUploadedFiles] = useState([]); + const [isModalOpen, setIsModalOpen] = useState(false); + const [modalText, setModalText] = useState(); + + const [successAlertTitle, setSuccessAlertTitle] = useState(); + const [successAlertMessage, setSuccessAlertMessage] = useState(); + const [successAlertLink, setSuccessAlertLink] = useState(); + + const [failureAlertTitle, setFailureAlertTitle] = useState(); + const [failureAlertMessage, setFailureAlertMessage] = useState(); + const [alertInfo, setAlertInfo] = useState(); + + const [validRepo, setValidRepo] = useState(ValidatedOptions.default); + const [validCommit, setValidCommit] = useState(ValidatedOptions.default); + const [validDocumentName, setValidDocumentName] = useState(ValidatedOptions.default); + + useEffect(() => { + setValidRepo(ValidatedOptions.default); + setValidCommit(ValidatedOptions.default); + setValidDocumentName(ValidatedOptions.default); + }, [reset]); + + useEffect(() => { + if (isEditForm) { + setValidRepo(ValidatedOptions.success); + setValidCommit(ValidatedOptions.success); + setValidDocumentName(ValidatedOptions.success); + } + }, [isEditForm]); + + const validateRepo = (repoStr: string) => { + const repo = repoStr.trim(); + if (repo.length === 0) { + setDisableAction(true); + setValidRepo(ValidatedOptions.error); + return; + } + try { + new URL(repo); + setValidRepo(ValidatedOptions.success); + setDisableAction(!checkKnowledgeFormCompletion(knowledgeFormData)); + return; + } catch (e) { + setDisableAction(true); + setValidRepo(ValidatedOptions.warning); + return; + } + }; + + const validateCommit = (commitStr: string) => { + const commit = commitStr.trim(); + if (commit.length > 0) { + setValidCommit(ValidatedOptions.success); + setDisableAction(!checkKnowledgeFormCompletion(knowledgeFormData)); + return; + } + setDisableAction(true); + setValidCommit(ValidatedOptions.error); + return; + }; + + const validateDocumentName = (document: string) => { + const documentNameStr = document.trim(); + if (documentNameStr.length > 0) { + setValidDocumentName(ValidatedOptions.success); + setDisableAction(!checkKnowledgeFormCompletion(knowledgeFormData)); + return; + } + setDisableAction(true); + setValidDocumentName(ValidatedOptions.error); + return; + }; + + const handleFilesChange = (files: File[]) => { + setUploadedFiles(files); + }; + + const handleDocumentUpload = async () => { + if (uploadedFiles.length > 0) { + const alertInfo: AlertInfo = { + type: 'info', + title: 'Document upload(s) in progress!', + message: 'Document upload(s) is in progress. You will be notified once the upload successfully completes.' + }; + setAlertInfo(alertInfo); + + const fileContents: { fileName: string; fileContent: string }[] = []; + + await Promise.all( + uploadedFiles.map( + (file) => + new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onload = (e) => { + const fileContent = e.target!.result as string; + fileContents.push({ fileName: file.name, fileContent }); + resolve(); + }; + reader.onerror = reject; + reader.readAsText(file); + }) + ) + ); + + if (fileContents.length === uploadedFiles.length) { + try { + const response = await fetch('/api/native/upload', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ files: fileContents }) + }); + + if (response.status === 201) { + const result = await response.json(); + console.log('Files uploaded result:', result); + + setSuccessAlertTitle('Document uploaded successfully!'); + setSuccessAlertMessage('Documents have been uploaded to your repo to be referenced in the knowledge submission.'); + if (result.prUrl && result.prUrl.trim() !== '') { + setSuccessAlertLink(result.prUrl); + } else { + setSuccessAlertLink(undefined); + } + } else { + console.error('Upload failed:', response.statusText); + setFailureAlertTitle('Failed to upload document'); + setFailureAlertMessage(`This upload failed. ${response.statusText}`); + } + } catch (error) { + console.error('Upload error:', error); + setFailureAlertTitle('Failed to upload document'); + setFailureAlertMessage(`This upload failed. ${(error as Error).message}`); + } + } + } + }; + + const onCloseSuccessAlert = () => { + setSuccessAlertTitle(undefined); + setSuccessAlertMessage(undefined); + setSuccessAlertLink(undefined); + }; + + const onCloseFailureAlert = () => { + setFailureAlertTitle(undefined); + setFailureAlertMessage(undefined); + }; + + const handleAutomaticUpload = () => { + if (knowledgeDocumentRepositoryUrl.length > 0 || knowledgeDocumentCommit.length > 0 || documentName.length > 0) { + setModalText('Switching to automatic upload will clear the document information. Are you sure you want to continue?'); + setIsModalOpen(true); + } else { + setUseFileUpload(true); + } + }; + + const handleManualUpload = () => { + if (uploadedFiles.length > 0) { + setModalText('Switching to manual upload will clear the uploaded files. Are you sure you want to continue?'); + setIsModalOpen(true); + } else { + setUseFileUpload(false); + } + }; + + const handleModalContinue = () => { + if (useFileUpload) { + setUploadedFiles([]); + } else { + console.log('Switching to manual entry - clearing repository and document info'); + setKnowledgeDocumentRepositoryUrl(''); + setValidRepo(ValidatedOptions.default); + setKnowledgeDocumentCommit(''); + setValidCommit(ValidatedOptions.default); + setDocumentName(''); + setValidDocumentName(ValidatedOptions.default); + } + setUseFileUpload(!useFileUpload); + setIsModalOpen(false); + }; + + return ( +
+ + Document Information * +

+ ), + id: 'doc-info-id' + }} + titleDescription="Add the relevant document's information" + /> + +
+ + +
+
+ setIsModalOpen(false)} + actions={[ + , + + ]} + > +

{modalText}

+
+ {!useFileUpload ? ( + <> + + setKnowledgeDocumentRepositoryUrl(value)} + onBlur={() => validateRepo(knowledgeDocumentRepositoryUrl)} + /> + {validRepo === ValidatedOptions.error && ( + + + } variant={validRepo}> + Required field + + + + )} + {validRepo === ValidatedOptions.warning && ( + + + } variant="error"> + Please enter a valid URL. + + + + )} + + + setKnowledgeDocumentCommit(value)} + onBlur={() => validateCommit(knowledgeDocumentCommit)} + /> + {validCommit === ValidatedOptions.error && ( + + + } variant={validCommit}> + Valid commit SHA is required. + + + + )} + + + setDocumentName(value)} + onBlur={() => validateDocumentName(documentName)} + /> + {validDocumentName === ValidatedOptions.error && ( + + + } variant={validDocumentName}> + Required field + + + + )} + + + ) : ( + <> + + + + )} + + {/* Informational Alert */} + {alertInfo && ( + setAlertInfo(undefined)} />}> + {alertInfo.message} + {alertInfo.link && ( + + View it here + + )} + + )} + + {/* Success Alert */} + {successAlertTitle && successAlertMessage && ( + } + actionLinks={ + successAlertLink ? ( + + View it here + + ) : null + } + > + {successAlertMessage} + + )} + + {/* Failure Alert */} + {failureAlertTitle && failureAlertMessage && ( + }> + {failureAlertMessage} + + )} +
+ ); +}; + +export default DocumentInformation; diff --git a/src/components/Contribute/Knowledge/Native/KnowledgeQuestionAnswerPairsNative/KnowledgeQuestionAnswerPairs.tsx b/src/components/Contribute/Knowledge/Native/KnowledgeQuestionAnswerPairsNative/KnowledgeQuestionAnswerPairs.tsx new file mode 100644 index 00000000..a0f78b8f --- /dev/null +++ b/src/components/Contribute/Knowledge/Native/KnowledgeQuestionAnswerPairsNative/KnowledgeQuestionAnswerPairs.tsx @@ -0,0 +1,448 @@ +// src/components/Contribute/Knowledge/KnowledgeQuestionAnswerPairs/KnowledgeQuestionAnswerPairs.tsx +import React, { useCallback, useEffect, useRef, useState } from 'react'; +import { FormFieldGroupHeader, FormGroup, FormHelperText } from '@patternfly/react-core/dist/dynamic/components/Form'; +import { TextArea } from '@patternfly/react-core/dist/dynamic/components/TextArea'; +import { ExclamationCircleIcon } from '@patternfly/react-icons/dist/dynamic/icons/'; +import { ValidatedOptions } from '@patternfly/react-core/dist/esm/helpers/constants'; +import { HelperText } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import { HelperTextItem } from '@patternfly/react-core/dist/dynamic/components/HelperText'; +import { KnowledgeSeedExample, QuestionAndAnswerPair } from '@/types'; +import { Modal, ModalVariant } from '@patternfly/react-core/dist/dynamic/components/Modal'; +import { Tooltip } from '@patternfly/react-core/dist/esm/components/Tooltip/Tooltip'; +import { CatalogIcon } from '@patternfly/react-icons/dist/esm/icons/catalog-icon'; +import { Button } from '@patternfly/react-core/dist/dynamic/components/Button'; +import { Spinner } from '@patternfly/react-core/dist/dynamic/components/Spinner'; +import { ExpandableSection } from '@patternfly/react-core/dist/esm/components/ExpandableSection/ExpandableSection'; +import { Content } from '@patternfly/react-core/dist/dynamic/components/Content'; +import { Switch } from '@patternfly/react-core/dist/dynamic/components/Switch'; +import { Card, CardBody, CardHeader } from '@patternfly/react-core/dist/dynamic/components/Card'; +import { Stack, StackItem } from '@patternfly/react-core/dist/dynamic/layouts/Stack'; +import { Alert } from '@patternfly/react-core/dist/dynamic/components/Alert'; + +interface KnowledgeFile { + filename: string; + content: string; + commitSha: string; + commitDate?: string; +} + +interface Props { + seedExample: KnowledgeSeedExample; + seedExampleIndex: number; + handleContextInputChange: (seedExampleIndex: number, contextValue: string) => void; + handleContextBlur: (seedExampleIndex: number) => void; + handleQuestionInputChange: (seedExampleIndex: number, questionAndAnswerIndex: number, questionValue: string) => void; + handleQuestionBlur: (seedExampleIndex: number, questionAndAnswerIndex: number) => void; + handleAnswerInputChange: (seedExampleIndex: number, questionAndAnswerIndex: number, answerValue: string) => void; + handleAnswerBlur: (seedExampleIndex: number, questionAndAnswerIndex: number) => void; + addDocumentInfo: (repositoryUrl: string, commitSha: string, docName: string) => void; + repositoryUrl: string; + commitSha: string; +} + +const KnowledgeQuestionAnswerPairsNative: React.FC = ({ + seedExample, + seedExampleIndex, + handleContextInputChange, + handleContextBlur, + handleQuestionInputChange, + handleQuestionBlur, + handleAnswerInputChange, + handleAnswerBlur, + addDocumentInfo, + repositoryUrl, + commitSha +}) => { + const [isModalOpen, setIsModalOpen] = useState(false); + const [knowledgeFiles, setKnowledgeFiles] = useState([]); + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(''); + const [expandedFiles, setExpandedFiles] = useState>({}); + const [selectedWordCount, setSelectedWordCount] = useState(0); + const [showAllCommits, setShowAllCommits] = useState(false); + + // Ref for the
 elements to track selections TODO: figure out how to make text expansions taller in PF without a custom-pre
+  const preRefs = useRef>({});
+
+  const LOCAL_TAXONOMY_DOCS_ROOT_DIR =
+    process.env.NEXT_PUBLIC_LOCAL_TAXONOMY_DOCS_ROOT_DIR || '/home/yourusername/.instructlab-ui/taxonomy-knowledge-docs';
+
+  const fetchKnowledgeFiles = async () => {
+    setIsLoading(true);
+    setError('');
+    try {
+      const response = await fetch('/api/native/git/knowledge-files', {
+        method: 'POST',
+        headers: { 'Content-Type': 'application/json' },
+        body: JSON.stringify({ branchName: 'main', action: 'diff' })
+      });
+
+      const result = await response.json();
+      if (response.ok) {
+        setKnowledgeFiles(result.files);
+        console.log('Fetched knowledge files:', result.files);
+      } else {
+        setError(result.error || 'Failed to fetch knowledge files.');
+        console.error('Error fetching knowledge files:', result.error);
+      }
+    } catch (err) {
+      setError('An error occurred while fetching knowledge files.');
+      console.error('Error fetching knowledge files:', err);
+    } finally {
+      setIsLoading(false);
+    }
+  };
+
+  const handleOpenModal = () => {
+    setIsModalOpen(true);
+    fetchKnowledgeFiles();
+  };
+
+  const handleCloseModal = () => {
+    setIsModalOpen(false);
+    setKnowledgeFiles([]);
+    setError('');
+    setSelectedWordCount(0);
+    setShowAllCommits(false);
+    window.getSelection()?.removeAllRanges();
+  };
+
+  const handleUseSelectedText = (file: KnowledgeFile) => {
+    const selection = window.getSelection();
+    const selectedText = selection?.toString().trim();
+
+    if (!selectedText) {
+      alert('Please select the text you want to use as context.');
+      return;
+    }
+
+    repositoryUrl = `${LOCAL_TAXONOMY_DOCS_ROOT_DIR}/${file.filename}`;
+    const commitShaValue = file.commitSha;
+    const docName = file.filename;
+
+    console.log(
+      `handleUseSelectedText: selectedText="${selectedText}", repositoryUrl=${repositoryUrl}, commitSha=${commitShaValue}, docName=${docName}`
+    );
+
+    handleContextInputChange(seedExampleIndex, selectedText);
+    handleContextBlur(seedExampleIndex);
+    addDocumentInfo(repositoryUrl, commitShaValue, docName);
+    handleCloseModal();
+  };
+
+  const updateSelectedWordCount = (filename: string) => {
+    const selection = window.getSelection();
+    const preElement = preRefs.current[filename];
+    if (selection && preElement) {
+      const anchorNode = selection.anchorNode;
+      const focusNode = selection.focusNode;
+
+      if (preElement.contains(anchorNode) && preElement.contains(focusNode)) {
+        const selectedText = selection.toString().trim();
+        const wordCount = selectedText.split(/\s+/).filter((word) => word.length > 0).length;
+        setSelectedWordCount(wordCount);
+      } else {
+        setSelectedWordCount(0);
+      }
+    }
+  };
+
+  // Attach event listeners for selection changes
+  useEffect(() => {
+    if (isModalOpen) {
+      const handleSelectionChange = () => {
+        // Iterate through all expanded files and update word count
+        Object.keys(expandedFiles).forEach((filename) => {
+          if (expandedFiles[filename]) {
+            updateSelectedWordCount(filename);
+          }
+        });
+      };
+      document.addEventListener('selectionchange', handleSelectionChange);
+      return () => {
+        document.removeEventListener('selectionchange', handleSelectionChange);
+      };
+    } else {
+      setSelectedWordCount(0);
+    }
+  }, [isModalOpen, expandedFiles]);
+
+  const toggleFileContent = (filename: string) => {
+    setExpandedFiles((prev) => ({
+      ...prev,
+      [filename]: !prev[filename]
+    }));
+    console.log(`toggleFileContent: filename=${filename}, expanded=${!expandedFiles[filename]}`);
+  };
+
+  // Group files by commitSha
+  const groupedFiles = knowledgeFiles.reduce>((acc, file) => {
+    if (!acc[file.commitSha]) {
+      acc[file.commitSha] = [];
+    }
+    acc[file.commitSha].push(file);
+    return acc;
+  }, {});
+
+  // Extract commit dates for sorting
+  const commitDateMap: Record = {};
+  knowledgeFiles.forEach((file) => {
+    if (file.commitDate && !commitDateMap[file.commitSha]) {
+      commitDateMap[file.commitSha] = file.commitDate;
+    }
+  });
+
+  // Sort the commit SHAs based on commitDate in descending order (latest first)
+  const sortedCommitShas = Object.keys(groupedFiles).sort((a, b) => {
+    const dateA = new Date(commitDateMap[a] || '').getTime();
+    const dateB = new Date(commitDateMap[b] || '').getTime();
+    return dateB - dateA;
+  });
+
+  // Enforce single commit SHA and repository URL
+  const isSameCommit = (fileCommitSha: string): boolean => {
+    if (!commitSha) {
+      return true;
+    }
+    return fileCommitSha === commitSha;
+  };
+
+  // Determine which commits to display based on the toggle
+  const commitsToDisplay = showAllCommits ? sortedCommitShas : sortedCommitShas.slice(0, 1);
+
+  const setPreRef = useCallback(
+    (filename: string) => (el: HTMLPreElement | null) => {
+      preRefs.current[filename] = el;
+    },
+    []
+  );
+
+  return (
+    
+      Select context from your knowledge files
} position="top"> + + + +