diff --git a/README.md b/README.md
index cff63853..957109bf 100644
--- a/README.md
+++ b/README.md
@@ -29,6 +29,7 @@ them to be used as appliances.
| 🖥️ **[Node Management][]** | Hostname, uptime, OS info, disk, memory, load |
| 🌐 **[Network Management][]** | DNS read/update, ping |
| ⚙️ **[Command Execution][]** | Remote exec and shell across managed hosts |
+| 📁 **[File Management][]** | Upload, deploy, and template files with SHA-based idempotency |
| 📊 **[System Facts][]** | Agent-collected system facts — architecture, kernel, FQDN, CPUs, network interfaces, service/package manager |
| 🔄 **[Agent Lifecycle][]** | Node conditions (memory, disk, load pressure), graceful drain/cordon for maintenance |
| ⚡ **[Async Job System][]** | NATS JetStream with KV-first architecture — broadcast, load-balanced, and label-based routing across hosts |
@@ -42,6 +43,7 @@ them to be used as appliances.
[Node Management]: https://osapi-io.github.io/osapi/sidebar/features/node-management
[Network Management]: https://osapi-io.github.io/osapi/sidebar/features/network-management
[Command Execution]: https://osapi-io.github.io/osapi/sidebar/features/command-execution
+[File Management]: https://osapi-io.github.io/osapi/sidebar/features/file-management
[Async Job System]: https://osapi-io.github.io/osapi/sidebar/features/job-system
[Health]: https://osapi-io.github.io/osapi/sidebar/features/health-checks
[Metrics]: https://osapi-io.github.io/osapi/sidebar/features/metrics
diff --git a/cmd/agent_helpers.go b/cmd/agent_helpers.go
index 2e632a03..4962bd99 100644
--- a/cmd/agent_helpers.go
+++ b/cmd/agent_helpers.go
@@ -28,6 +28,7 @@ import (
"github.com/retr0h/osapi/internal/cli"
"github.com/retr0h/osapi/internal/config"
"github.com/retr0h/osapi/internal/job"
+ fileProv "github.com/retr0h/osapi/internal/provider/file"
)
// setupAgent connects to NATS, creates providers, and builds the agent
@@ -47,6 +48,10 @@ func setupAgent(
providerFactory := agent.NewProviderFactory(log)
hostProvider, diskProvider, memProvider, loadProvider, dnsProvider, pingProvider, netinfoProvider, commandProvider := providerFactory.CreateProviders()
+ // Create file provider if Object Store and file-state KV are configured
+ hostname, _ := job.GetAgentHostname(appConfig.Agent.Hostname)
+ fileProvider := createFileProvider(ctx, log, b, namespace, hostname)
+
a := agent.New(
appFs,
appConfig,
@@ -61,9 +66,46 @@ func setupAgent(
pingProvider,
netinfoProvider,
commandProvider,
+ fileProvider,
b.registryKV,
b.factsKV,
)
return a, b
}
+
+// createFileProvider creates a file provider if Object Store and file-state KV
+// are configured. Returns nil if either is unavailable.
+func createFileProvider(
+ ctx context.Context,
+ log *slog.Logger,
+ b *natsBundle,
+ namespace string,
+ hostname string,
+) fileProv.Provider {
+ if appConfig.NATS.Objects.Bucket == "" || appConfig.NATS.FileState.Bucket == "" {
+ return nil
+ }
+
+ objStoreName := job.ApplyNamespaceToInfraName(namespace, appConfig.NATS.Objects.Bucket)
+ objStore, err := b.nc.ObjectStore(ctx, objStoreName)
+ if err != nil {
+ log.Warn("Object Store not available, file operations disabled",
+ slog.String("bucket", objStoreName),
+ slog.String("error", err.Error()),
+ )
+ return nil
+ }
+
+ fileStateKVConfig := cli.BuildFileStateKVConfig(namespace, appConfig.NATS.FileState)
+ fileStateKV, err := b.nc.CreateOrUpdateKVBucketWithConfig(ctx, fileStateKVConfig)
+ if err != nil {
+ log.Warn("file state KV not available, file operations disabled",
+ slog.String("bucket", fileStateKVConfig.Bucket),
+ slog.String("error", err.Error()),
+ )
+ return nil
+ }
+
+ return fileProv.New(log, appFs, objStore, fileStateKV, hostname)
+}
diff --git a/cmd/api_helpers.go b/cmd/api_helpers.go
index 72254b8a..c664f396 100644
--- a/cmd/api_helpers.go
+++ b/cmd/api_helpers.go
@@ -32,6 +32,7 @@ import (
natsclient "github.com/osapi-io/nats-client/pkg/client"
"github.com/retr0h/osapi/internal/api"
+ "github.com/retr0h/osapi/internal/api/file"
"github.com/retr0h/osapi/internal/api/health"
"github.com/retr0h/osapi/internal/audit"
"github.com/retr0h/osapi/internal/cli"
@@ -62,6 +63,8 @@ type ServerManager interface {
GetMetricsHandler(metricsHandler http.Handler, path string) []func(e *echo.Echo)
// GetAuditHandler returns audit handler for registration.
GetAuditHandler(store audit.Store) []func(e *echo.Echo)
+ // GetFileHandler returns file handler for registration.
+ GetFileHandler(objStore file.ObjectStoreManager) []func(e *echo.Echo)
// RegisterHandlers registers a list of handlers with the Echo instance.
RegisterHandlers(handlers []func(e *echo.Echo))
}
@@ -75,6 +78,7 @@ type natsBundle struct {
registryKV jetstream.KeyValue
factsKV jetstream.KeyValue
stateKV jetstream.KeyValue
+ objStore file.ObjectStoreManager
}
// setupAPIServer connects to NATS, creates the API server with all handlers,
@@ -119,7 +123,7 @@ func setupAPIServer(
sm := api.New(appConfig, log, serverOpts...)
registerAPIHandlers(
sm, b.jobClient, checker, metricsProvider,
- metricsHandler, metricsPath, auditStore,
+ metricsHandler, metricsPath, auditStore, b.objStore,
)
return sm, b
@@ -173,6 +177,19 @@ func connectNATSBundle(
}
}
+ // Create or update Object Store bucket for file management API
+ var objStore file.ObjectStoreManager
+ if appConfig.NATS.Objects.Bucket != "" {
+ objStoreConfig := cli.BuildObjectStoreConfig(namespace, appConfig.NATS.Objects)
+ var objErr error
+ objStore, objErr = nc.CreateOrUpdateObjectStore(ctx, objStoreConfig)
+ if objErr != nil {
+ log.Warn("Object Store not available, file endpoints disabled",
+ slog.String("bucket", objStoreConfig.Bucket),
+ slog.String("error", objErr.Error()))
+ }
+ }
+
jc, err := jobclient.New(log, nc, &jobclient.Options{
Timeout: 30 * time.Second,
KVBucket: jobsKV,
@@ -192,6 +209,7 @@ func connectNATSBundle(
registryKV: registryKV,
factsKV: factsKV,
stateKV: stateKV,
+ objStore: objStore,
}
}
@@ -395,6 +413,7 @@ func registerAPIHandlers(
metricsHandler http.Handler,
metricsPath string,
auditStore audit.Store,
+ objStore file.ObjectStoreManager,
) {
startTime := time.Now()
@@ -409,6 +428,9 @@ func registerAPIHandlers(
if auditStore != nil {
handlers = append(handlers, sm.GetAuditHandler(auditStore)...)
}
+ if objStore != nil {
+ handlers = append(handlers, sm.GetFileHandler(objStore)...)
+ }
sm.RegisterHandlers(handlers)
}
diff --git a/cmd/client_file.go b/cmd/client_file.go
new file mode 100644
index 00000000..c196302e
--- /dev/null
+++ b/cmd/client_file.go
@@ -0,0 +1,35 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "github.com/spf13/cobra"
+)
+
+// clientFileCmd represents the clientFile command.
+var clientFileCmd = &cobra.Command{
+ Use: "file",
+ Short: "The file subcommand",
+}
+
+func init() {
+ clientCmd.AddCommand(clientFileCmd)
+}
diff --git a/cmd/client_file_delete.go b/cmd/client_file_delete.go
new file mode 100644
index 00000000..2f7c7cf8
--- /dev/null
+++ b/cmd/client_file_delete.go
@@ -0,0 +1,64 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+
+ "github.com/retr0h/osapi/internal/cli"
+)
+
+// clientFileDeleteCmd represents the clientFileDelete command.
+var clientFileDeleteCmd = &cobra.Command{
+ Use: "delete",
+ Short: "Delete a file from the Object Store",
+ Long: `Delete a specific file from the OSAPI Object Store.`,
+ Run: func(cmd *cobra.Command, _ []string) {
+ ctx := cmd.Context()
+ name, _ := cmd.Flags().GetString("name")
+
+ resp, err := sdkClient.File.Delete(ctx, name)
+ if err != nil {
+ cli.HandleError(err, logger)
+ return
+ }
+
+ if jsonOutput {
+ fmt.Println(string(resp.RawJSON()))
+ return
+ }
+
+ fmt.Println()
+ cli.PrintKV("Name", resp.Data.Name)
+ cli.PrintKV("Deleted", fmt.Sprintf("%v", resp.Data.Deleted))
+ },
+}
+
+func init() {
+ clientFileCmd.AddCommand(clientFileDeleteCmd)
+
+ clientFileDeleteCmd.PersistentFlags().
+ String("name", "", "Name of the file in the Object Store (required)")
+
+ _ = clientFileDeleteCmd.MarkPersistentFlagRequired("name")
+}
diff --git a/cmd/client_file_get.go b/cmd/client_file_get.go
new file mode 100644
index 00000000..b44368e2
--- /dev/null
+++ b/cmd/client_file_get.go
@@ -0,0 +1,65 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+
+ "github.com/retr0h/osapi/internal/cli"
+)
+
+// clientFileGetCmd represents the clientFileGet command.
+var clientFileGetCmd = &cobra.Command{
+ Use: "get",
+ Short: "Get file metadata",
+ Long: `Get metadata for a specific file stored in the OSAPI Object Store.`,
+ Run: func(cmd *cobra.Command, _ []string) {
+ ctx := cmd.Context()
+ name, _ := cmd.Flags().GetString("name")
+
+ resp, err := sdkClient.File.Get(ctx, name)
+ if err != nil {
+ cli.HandleError(err, logger)
+ return
+ }
+
+ if jsonOutput {
+ fmt.Println(string(resp.RawJSON()))
+ return
+ }
+
+ fmt.Println()
+ cli.PrintKV("Name", resp.Data.Name)
+ cli.PrintKV("SHA256", resp.Data.SHA256)
+ cli.PrintKV("Size", fmt.Sprintf("%d", resp.Data.Size))
+ },
+}
+
+func init() {
+ clientFileCmd.AddCommand(clientFileGetCmd)
+
+ clientFileGetCmd.PersistentFlags().
+ String("name", "", "Name of the file in the Object Store (required)")
+
+ _ = clientFileGetCmd.MarkPersistentFlagRequired("name")
+}
diff --git a/cmd/client_file_list.go b/cmd/client_file_list.go
new file mode 100644
index 00000000..16ac7aa3
--- /dev/null
+++ b/cmd/client_file_list.go
@@ -0,0 +1,77 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+
+ "github.com/retr0h/osapi/internal/cli"
+)
+
+// clientFileListCmd represents the clientFileList command.
+var clientFileListCmd = &cobra.Command{
+ Use: "list",
+ Short: "List stored files",
+ Long: `List all files stored in the OSAPI Object Store.`,
+ Run: func(cmd *cobra.Command, _ []string) {
+ ctx := cmd.Context()
+ resp, err := sdkClient.File.List(ctx)
+ if err != nil {
+ cli.HandleError(err, logger)
+ return
+ }
+
+ if jsonOutput {
+ fmt.Println(string(resp.RawJSON()))
+ return
+ }
+
+ files := resp.Data.Files
+ if len(files) == 0 {
+ fmt.Println("No files found.")
+ return
+ }
+
+ rows := make([][]string, 0, len(files))
+ for _, f := range files {
+ rows = append(rows, []string{
+ f.Name,
+ f.SHA256,
+ fmt.Sprintf("%d", f.Size),
+ })
+ }
+
+ sections := []cli.Section{
+ {
+ Title: fmt.Sprintf("Files (%d)", resp.Data.Total),
+ Headers: []string{"NAME", "SHA256", "SIZE"},
+ Rows: rows,
+ },
+ }
+ cli.PrintCompactTable(sections)
+ },
+}
+
+func init() {
+ clientFileCmd.AddCommand(clientFileListCmd)
+}
diff --git a/cmd/client_file_upload.go b/cmd/client_file_upload.go
new file mode 100644
index 00000000..5710a41d
--- /dev/null
+++ b/cmd/client_file_upload.go
@@ -0,0 +1,96 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+
+ "github.com/spf13/cobra"
+
+ osapi "github.com/osapi-io/osapi-sdk/pkg/osapi"
+ "github.com/retr0h/osapi/internal/cli"
+)
+
+// clientFileUploadCmd represents the clientFileUpload command.
+var clientFileUploadCmd = &cobra.Command{
+ Use: "upload",
+ Short: "Upload a file to the Object Store",
+ Long: `Upload a local file to the OSAPI Object Store for later deployment.`,
+ Run: func(cmd *cobra.Command, _ []string) {
+ ctx := cmd.Context()
+ name, _ := cmd.Flags().GetString("name")
+ filePath, _ := cmd.Flags().GetString("file")
+ contentType, _ := cmd.Flags().GetString("content-type")
+ force, _ := cmd.Flags().GetBool("force")
+
+ f, err := os.Open(filePath)
+ if err != nil {
+ cli.LogFatal(logger, "failed to open file", err)
+ }
+ defer func() { _ = f.Close() }()
+
+ var opts []osapi.UploadOption
+ if force {
+ opts = append(opts, osapi.WithForce())
+ }
+
+ resp, err := sdkClient.File.Upload(ctx, name, contentType, f, opts...)
+ if err != nil {
+ cli.HandleError(err, logger)
+ return
+ }
+
+ if jsonOutput {
+ rawJSON := resp.RawJSON()
+ if rawJSON == nil {
+ out, _ := json.Marshal(resp.Data)
+ rawJSON = out
+ }
+ fmt.Println(string(rawJSON))
+ return
+ }
+
+ fmt.Println()
+ cli.PrintKV("Name", resp.Data.Name)
+ cli.PrintKV("SHA256", resp.Data.SHA256)
+ cli.PrintKV("Size", fmt.Sprintf("%d", resp.Data.Size))
+ cli.PrintKV("Changed", fmt.Sprintf("%v", resp.Data.Changed))
+ cli.PrintKV("Content-Type", resp.Data.ContentType)
+ },
+}
+
+func init() {
+ clientFileCmd.AddCommand(clientFileUploadCmd)
+
+ clientFileUploadCmd.PersistentFlags().
+ String("name", "", "Name for the file in the Object Store (required)")
+ clientFileUploadCmd.PersistentFlags().
+ String("file", "", "Path to the local file to upload (required)")
+ clientFileUploadCmd.PersistentFlags().
+ String("content-type", "raw", "File type: raw or template (default raw)")
+ clientFileUploadCmd.PersistentFlags().
+ Bool("force", false, "Force upload even if file already exists with different content")
+
+ _ = clientFileUploadCmd.MarkPersistentFlagRequired("name")
+ _ = clientFileUploadCmd.MarkPersistentFlagRequired("file")
+}
diff --git a/cmd/client_node_file.go b/cmd/client_node_file.go
new file mode 100644
index 00000000..e4a87377
--- /dev/null
+++ b/cmd/client_node_file.go
@@ -0,0 +1,35 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "github.com/spf13/cobra"
+)
+
+// clientNodeFileCmd represents the clientNodeFile command.
+var clientNodeFileCmd = &cobra.Command{
+ Use: "file",
+ Short: "The file subcommand",
+}
+
+func init() {
+ clientNodeCmd.AddCommand(clientNodeFileCmd)
+}
diff --git a/cmd/client_node_file_deploy.go b/cmd/client_node_file_deploy.go
new file mode 100644
index 00000000..e1cc5cbb
--- /dev/null
+++ b/cmd/client_node_file_deploy.go
@@ -0,0 +1,139 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/osapi-io/osapi-sdk/pkg/osapi"
+ "github.com/spf13/cobra"
+
+ "github.com/retr0h/osapi/internal/cli"
+)
+
+// clientNodeFileDeployCmd represents the clientNodeFileDeploy command.
+var clientNodeFileDeployCmd = &cobra.Command{
+ Use: "deploy",
+ Short: "Deploy a file from Object Store to a host",
+ Long: `Deploy a file from the OSAPI Object Store to the target host's filesystem.
+The file is fetched from the Object Store and written to the specified path.
+SHA-256 idempotency ensures unchanged files are not rewritten.`,
+ Run: func(cmd *cobra.Command, _ []string) {
+ ctx := cmd.Context()
+ host, _ := cmd.Flags().GetString("target")
+ objectName, _ := cmd.Flags().GetString("object")
+ path, _ := cmd.Flags().GetString("path")
+ contentType, _ := cmd.Flags().GetString("content-type")
+ mode, _ := cmd.Flags().GetString("mode")
+ owner, _ := cmd.Flags().GetString("owner")
+ group, _ := cmd.Flags().GetString("group")
+ varFlags, _ := cmd.Flags().GetStringSlice("var")
+
+ if host == "_all" {
+ fmt.Print("This will deploy the file to ALL hosts. Continue? [y/N] ")
+ var confirm string
+ if _, err := fmt.Scanln(&confirm); err != nil || (confirm != "y" && confirm != "Y") {
+ fmt.Println("Aborted.")
+ return
+ }
+ }
+
+ vars := parseVarFlags(varFlags)
+
+ resp, err := sdkClient.Node.FileDeploy(ctx, osapi.FileDeployOpts{
+ Target: host,
+ ObjectName: objectName,
+ Path: path,
+ ContentType: contentType,
+ Mode: mode,
+ Owner: owner,
+ Group: group,
+ Vars: vars,
+ })
+ if err != nil {
+ cli.HandleError(err, logger)
+ return
+ }
+
+ if jsonOutput {
+ fmt.Println(string(resp.RawJSON()))
+ return
+ }
+
+ if resp.Data.JobID != "" {
+ fmt.Println()
+ cli.PrintKV("Job ID", resp.Data.JobID)
+ }
+
+ changed := resp.Data.Changed
+ changedPtr := &changed
+ results := []cli.MutationResultRow{
+ {
+ Hostname: resp.Data.Hostname,
+ Changed: changedPtr,
+ },
+ }
+ headers, rows := cli.BuildMutationTable(results, nil)
+ cli.PrintCompactTable([]cli.Section{{Headers: headers, Rows: rows}})
+ },
+}
+
+// parseVarFlags converts a slice of "key=value" strings into a map.
+func parseVarFlags(
+ flags []string,
+) map[string]interface{} {
+ if len(flags) == 0 {
+ return nil
+ }
+
+ vars := make(map[string]interface{}, len(flags))
+ for _, f := range flags {
+ parts := strings.SplitN(f, "=", 2)
+ if len(parts) == 2 {
+ vars[parts[0]] = parts[1]
+ }
+ }
+
+ return vars
+}
+
+func init() {
+ clientNodeFileCmd.AddCommand(clientNodeFileDeployCmd)
+
+ clientNodeFileDeployCmd.PersistentFlags().
+ String("object", "", "Name of the file in the Object Store (required)")
+ clientNodeFileDeployCmd.PersistentFlags().
+ String("path", "", "Destination path on the target filesystem (required)")
+ clientNodeFileDeployCmd.PersistentFlags().
+ String("content-type", "raw", "Content type: raw or template (default raw)")
+ clientNodeFileDeployCmd.PersistentFlags().
+ String("mode", "", "File permission mode (e.g., 0644)")
+ clientNodeFileDeployCmd.PersistentFlags().
+ String("owner", "", "File owner user")
+ clientNodeFileDeployCmd.PersistentFlags().
+ String("group", "", "File owner group")
+ clientNodeFileDeployCmd.PersistentFlags().
+ StringSlice("var", []string{}, "Template variable as key=value (repeatable)")
+
+ _ = clientNodeFileDeployCmd.MarkPersistentFlagRequired("object")
+ _ = clientNodeFileDeployCmd.MarkPersistentFlagRequired("path")
+}
diff --git a/cmd/client_node_file_status.go b/cmd/client_node_file_status.go
new file mode 100644
index 00000000..c09aba2a
--- /dev/null
+++ b/cmd/client_node_file_status.go
@@ -0,0 +1,80 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+
+ "github.com/retr0h/osapi/internal/cli"
+)
+
+// clientNodeFileStatusCmd represents the clientNodeFileStatus command.
+var clientNodeFileStatusCmd = &cobra.Command{
+ Use: "status",
+ Short: "Check deployment status of a file on a host",
+ Long: `Check the deployment status of a file on the target host.
+Reports whether the file is in-sync, drifted, or missing.`,
+ Run: func(cmd *cobra.Command, _ []string) {
+ ctx := cmd.Context()
+ host, _ := cmd.Flags().GetString("target")
+ path, _ := cmd.Flags().GetString("path")
+
+ resp, err := sdkClient.Node.FileStatus(ctx, host, path)
+ if err != nil {
+ cli.HandleError(err, logger)
+ return
+ }
+
+ if jsonOutput {
+ fmt.Println(string(resp.RawJSON()))
+ return
+ }
+
+ if resp.Data.JobID != "" {
+ fmt.Println()
+ cli.PrintKV("Job ID", resp.Data.JobID)
+ }
+
+ sha := ""
+ if resp.Data.SHA256 != "" {
+ sha = resp.Data.SHA256
+ }
+ results := []cli.ResultRow{
+ {
+ Hostname: resp.Data.Hostname,
+ Fields: []string{resp.Data.Path, resp.Data.Status, sha},
+ },
+ }
+ headers, rows := cli.BuildBroadcastTable(results, []string{"PATH", "STATUS", "SHA256"})
+ cli.PrintCompactTable([]cli.Section{{Headers: headers, Rows: rows}})
+ },
+}
+
+func init() {
+ clientNodeFileCmd.AddCommand(clientNodeFileStatusCmd)
+
+ clientNodeFileStatusCmd.PersistentFlags().
+ String("path", "", "Filesystem path to check (required)")
+
+ _ = clientNodeFileStatusCmd.MarkPersistentFlagRequired("path")
+}
diff --git a/cmd/nats_helpers.go b/cmd/nats_helpers.go
index 2fbce3c5..42e62b9f 100644
--- a/cmd/nats_helpers.go
+++ b/cmd/nats_helpers.go
@@ -180,6 +180,22 @@ func setupJetStream(
}
}
+ // Create Object Store bucket for file content
+ if appConfig.NATS.Objects.Bucket != "" {
+ objStoreConfig := cli.BuildObjectStoreConfig(namespace, appConfig.NATS.Objects)
+ if _, err := nc.CreateOrUpdateObjectStore(ctx, objStoreConfig); err != nil {
+ return fmt.Errorf("create Object Store bucket %s: %w", objStoreConfig.Bucket, err)
+ }
+ }
+
+ // Create file-state KV bucket for deployment SHA tracking
+ if appConfig.NATS.FileState.Bucket != "" {
+ fileStateKVConfig := cli.BuildFileStateKVConfig(namespace, appConfig.NATS.FileState)
+ if _, err := nc.CreateOrUpdateKVBucketWithConfig(ctx, fileStateKVConfig); err != nil {
+ return fmt.Errorf("create file-state KV bucket %s: %w", fileStateKVConfig.Bucket, err)
+ }
+ }
+
// Create DLQ stream
dlqMaxAge, _ := time.ParseDuration(appConfig.NATS.DLQ.MaxAge)
dlqStorage := cli.ParseJetstreamStorageType(appConfig.NATS.DLQ.Storage)
diff --git a/configs/osapi.yaml b/configs/osapi.yaml
index 3a09cd5a..b6ccdf76 100644
--- a/configs/osapi.yaml
+++ b/configs/osapi.yaml
@@ -103,6 +103,17 @@ nats:
storage: file
replicas: 1
+ objects:
+ bucket: file-objects
+ max_bytes: 10737418240
+ storage: file
+ replicas: 1
+
+ file_state:
+ bucket: file-state
+ storage: file
+ replicas: 1
+
telemetry:
tracing:
enabled: true
diff --git a/docs/docs/gen/api/delete-file-by-name.api.mdx b/docs/docs/gen/api/delete-file-by-name.api.mdx
new file mode 100644
index 00000000..589a7ea3
--- /dev/null
+++ b/docs/docs/gen/api/delete-file-by-name.api.mdx
@@ -0,0 +1,624 @@
+---
+id: delete-file-by-name
+title: "Delete a file"
+description: "Delete a file from the Object Store."
+sidebar_label: "Delete a file"
+hide_title: true
+hide_table_of_contents: true
+api: eJztV02P2zYQ/SvEnFqA9nq38UVADg6yC7hI2yDZIIetsRhLI4tZiVTI0a5dQf+9GMrfdpMU6KEL7EmixOF7895IHLaQUUi9qdk4Cwm8pZKYFKrclKRy7yrFBak/5l8oZfWRnachaGBcBEju4MaUdP8bWlxQRZbvJ++n9xJ572ryKGsGmGnYjqbZFkNC36x+x4pAQ6C08YZXkNy18IbQk580XAiELJc8ecMEs26moUaPFTH5ECdbWSDpLxqM5FAjF6DB09fGeMogYd+QPkr0tiAlQcrlMcOYr7Gn2f5pQcNy4LA2g9RltCA7oCV7HPQitPCIpcmQhcYGU1fGvr7UFS5fX43H0GkIaUEVynRe1TI1sDd2ARoqY9+RXUi2lxoqXG5GV+NxJxl7CrWzgSLY1Wgkl8NkREuVRVkzFZo0pRDypixX4lXqLJNlicK6Lk0anbj4EiS0PSXmYvagofbiG5seuBf6lP63Zd2QEnmFDC2xqsvo2MLY5TB1Nhd51vP2AObOlYT2BOFzQVyQ35n2hGEDc4Agrnfdfh3cbcpkgzbTwIYjHZGwL8wPa7mh6yT81TnBpzaa3hOQRf9Docl757+v9ETtjTdyx1jFBbJyadp4f6QJ3KApKVPslCf2hh5JBUZuwrB3gdGU4QfAs8zILZZqHaNw7hrekTgLmzUk0Jb4yfkHxaYi13CElk9rD9dYpgX5s/XVJykBByDj0Uj82hh6LbNOvLw89fKTxYYL581flKmBmryfqgdaqW3ZvBj7HIz95cxf0fm5yTKyaqCmNjR5blJDllVNvjIhxN3pxd3n4O6rf9jzrGOVu8ZmL//f52Dk+NxeGif2O7ixi22n8OLm/9vNTkNFXDjp6Pt+SrSX1juBCzHxopXGqIvdvX/s+/XZrtX/KAb2Hu03/FvOBXMN67459oNxEuj1zY3zFTIk8OvnWxAyxuYuhq8ZTxbyq9+dTWRjBw1CpE/+cjgajkSs2gWu0O6a3MNT0LFq7a40f/i41KfEtOSLukRjBbbxpSzWS9afc0BDEinMNBQusDxv2zkG+uTLrpPHXxvyq17JR/QG55LsXQuZCXKfQZJjGY6PO/ucf/qw7mx+Vv/uEHQ2i/VDtCsRF8tGRqDhgVabY1k36zQUhBn5SLV/NUlTqnkv6OQrl7PPtsTeXr+7vr0GDXhYJkdlEQHOcmrbfsateyDbdVuKLGPh2HV/A5giMbM=
+sidebar_class_name: "delete api-method"
+info_path: gen/api/agent-management-api
+custom_edit_url: null
+---
+
+import ApiTabs from "@theme/ApiTabs";
+import DiscriminatorTabs from "@theme/DiscriminatorTabs";
+import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
+import SecuritySchemes from "@theme/ApiExplorer/SecuritySchemes";
+import MimeTabs from "@theme/MimeTabs";
+import ParamsItem from "@theme/ParamsItem";
+import ResponseSamples from "@theme/ResponseSamples";
+import SchemaItem from "@theme/SchemaItem";
+import SchemaTabs from "@theme/SchemaTabs";
+import Heading from "@theme/Heading";
+import OperationTabs from "@theme/OperationTabs";
+import TabItem from "@theme/TabItem";
+
+
+
+
+
+
+
+
+
+
+Delete a file from the Object Store.
+
+
+
+
+
+
+
+ Path Parameters
+
+
+
+
+
+
+
+
+
+ File deleted successfully.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Invalid file name.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unauthorized - API key required
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Forbidden - Insufficient permissions
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ File not found.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Error deleting file.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/file-management-api-file-operations.tag.mdx b/docs/docs/gen/api/file-management-api-file-operations.tag.mdx
new file mode 100644
index 00000000..91f119ee
--- /dev/null
+++ b/docs/docs/gen/api/file-management-api-file-operations.tag.mdx
@@ -0,0 +1,20 @@
+---
+id: file-management-api-file-operations
+title: "File"
+description: "File"
+custom_edit_url: null
+---
+
+
+
+Object Store file management operations.
+
+
+
+```mdx-code-block
+import DocCardList from '@theme/DocCardList';
+import {useCurrentSidebarCategory} from '@docusaurus/theme-common';
+
+
+```
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/get-file-by-name.api.mdx b/docs/docs/gen/api/get-file-by-name.api.mdx
new file mode 100644
index 00000000..64a7e748
--- /dev/null
+++ b/docs/docs/gen/api/get-file-by-name.api.mdx
@@ -0,0 +1,642 @@
+---
+id: get-file-by-name
+title: "Get file metadata"
+description: "Get metadata for a specific file in the Object Store."
+sidebar_label: "Get file metadata"
+hide_title: true
+hide_table_of_contents: true
+api: eJztV0tv20YQ/iuLOSUAZUsK5VgCclCAOFHRNkHsoAdXMIbkUNyY3GV2h7YUgf+9GFJPS4HTojkY8Elcambnewy5nCUk5GOnS9bWwAjeE6uCGBNkVKl1CpUvKdapjlWqc1LaKM5IfYy+Uszqkq2jEwiAceZhdA0XOqebP9DgjAoyfDP+NLmRvBtbkkMp4mEawGY1Sdqikvd28ScWBAF4iiuneQGj6yW8JXTkxhVnsr/sNXKECUzraQAlOiyIyfkm1kj+qP0JQAuhEjmDABx9q7SjBEbsKgoesL7KSEmSsmnD7odM/zYQwLxjsdSd2CY0I9OhOTvstAIs4Q5znSALjHXNoNDmTS8ocP6mPxhAHYCPMypQwnlRSqhnp80MAii0+Z3MTMj2Aihwvl71B4NaGDvypTWemmL9bld+9smIlBsPxZvYGibDEollmeu4Uf70q5fw5SEY2zCGAEonPrFui7XiHkJ+XEoBQXMsyrxxZ6bN/CS2Jm2kyLA/OHt838sP405/cKYy9NmeTSt2+zXoVdSNw7A/PE/jXtwLh5hGaRifD4dnaTTsh/3XSGGPwrNwGA1fhTGGw8Fw2Itenw/60fnKJP19l682TDNyB8AauSVWGiZaMPk9KL1uP6w3Hty0mz1G9oO93xL0ma3yREWk2BEyJSqpJEslVOZ2oV44vFfWKaaizJHpZdulWzEc3kNd7z4D1+tHZKX+iuwDmNMAWHOzhZCcmNR+XnUf1LXsGB7rv4lpnoEWvNT5H3uQnLPucf3Game97pYmV3GGrGwcV85Rst80F6hzShRb5YidpjtSnpErfyLtkBCjzv1PFE8SLZeYq1WOwshWvAVxtGxSkZQ2xPfW3SrWBdmKm9LypvmJTrzakJSEvSKDblf8Wtv5TqIOvOwdevnFYMWZdfo7Jaqjxp8m6pYWatNJz8Y+BWNfHTkkrIt0kpBRHTUxvkpTHWsyrEpyhfa+Oaif3X0K7oY/+AQwllVqK5M8v3+fgpGDY2dpE7iWQ8789Bd93D0b+4uMrQMoiDMrc86MGuFlIhnBqVh5upQPpLqZedxdO8ZMtwPQpbjXGrQ7Bm0AZ8wlrMYJWUdNEASriwvrCmQYwW9/XTVfgNqktklfwR3P5JW/HdfkgIcABEjLvHfSPemKUqX1XKDZzgHNpLjXjQ9lW25787+PlS1PpjmfljlqI1gql8vurY7tSAgBjBpc0wAy61nuL5cRevri8rqW298qcotW3jt0GiNR4HoJifZyncAoxdw/HA13Sbz4vPrsean+3cB4lMXqJpqFKI55JSsI4JYW6xG2ntYBZIQJuQZq+9c4jqnknaSD517mxE3TvX93BQHgfuM8aJRm96OAlss24srekqnrDT6WtQCs638AlluUZQ==
+sidebar_class_name: "get api-method"
+info_path: gen/api/agent-management-api
+custom_edit_url: null
+---
+
+import ApiTabs from "@theme/ApiTabs";
+import DiscriminatorTabs from "@theme/DiscriminatorTabs";
+import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
+import SecuritySchemes from "@theme/ApiExplorer/SecuritySchemes";
+import MimeTabs from "@theme/MimeTabs";
+import ParamsItem from "@theme/ParamsItem";
+import ResponseSamples from "@theme/ResponseSamples";
+import SchemaItem from "@theme/SchemaItem";
+import SchemaTabs from "@theme/SchemaTabs";
+import Heading from "@theme/Heading";
+import OperationTabs from "@theme/OperationTabs";
+import TabItem from "@theme/TabItem";
+
+
+
+
+
+
+
+
+
+
+Get metadata for a specific file in the Object Store.
+
+
+
+
+
+
+
+ Path Parameters
+
+
+
+
+
+
+
+
+
+ File metadata.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Invalid file name.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unauthorized - API key required
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Forbidden - Insufficient permissions
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ File not found.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Error retrieving file metadata.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/get-files.api.mdx b/docs/docs/gen/api/get-files.api.mdx
new file mode 100644
index 00000000..8b1729ac
--- /dev/null
+++ b/docs/docs/gen/api/get-files.api.mdx
@@ -0,0 +1,495 @@
+---
+id: get-files
+title: "List stored files"
+description: "List all files stored in the Object Store."
+sidebar_label: "List stored files"
+hide_title: true
+hide_table_of_contents: true
+api: eJztVk1v20YQ/SuLObUALUsy5Zi6qUCcqGjRoFbQgysIS+5Q3JjcZXaHthWB/72YpUx92IBdIAV6yIlcYj7emzfLmS0o9JnTNWlrYAq/aU9ClqXIdYleeLIOldBGUIHij/QLZiRu+OMAIiC59jC9hWtd4up3aeQaKzS0mn2ar9h/ZWt0kiN7WEbQn+YKpvABif08ROAxa5ymDUxvt/ALSodu1lDBoTnM1KFUsGyXETj0tTUePUy3MB4O+fECAZs/IQ80GGtmDaEhdpB1XeosIDn/4tlrCz4rsJL8RpsaYQo2cIUIase4SXc5Q7wDM+mc3EAEmrDyr7sbWeGBlSenzRqiEw6LAgVbMg+uOydlDvgoq7pkR7PW5nGQWZNDG4Ev5Hhy+Xrcm4+zs/HkUhTSF4exxa44xznwIh1mcTxOrvJslI3iROZpHmdXSXKZp8k4Hr+TGI8wvoyTNLmIMxknkyQZpe+uJuP0ajIJyPS3Q77aEK7RPQPGjSDYljst3VAnWQ9lNBzHbS/hqgv2GtmP9mFP0Be2KZVIUZBDSaiEathLKKxLuxE/OfkgrBOEVV1Kwp8Hf5ujYjj5AG3LDfi10Q4V92YQs6/+juwJzGUEpCmEYJJzk1uuy1t6to2ALMnyDfVbsJ0wTZWi40B91/cEJqfg893d61KcwGRAf+6uGrQt+8bD0fPL9tnIhgrr9DdU4kzMPs3FHW5En+e7XTt0zrrXNZ+Jg/NThwdfQYUkYbOscQ7VcaNfS12iEmSFQ3Ia71F4ktR0GigkqUv/huRKaX6Vpdj5CJnahvYgXkyrGuTUBunBujtBukLbUEidWfWW27PoSbLDsezDIYv3pO17tnom7MVzYa+tS7VSaMSZmBvf5LnONBoSNbpKex9+6T/U/f+rO3lpRgZDUWpP/Av83iPyh5r/kZptBBVSYXl7WmMovOQtCc5ZwrBHuXt0vJIt90vVDcvWKXO4WvVIC6KafYMZTCENRhDtXq6tqyTBFH79axEmoOYRxu47nLM1/xf22x9PAYiAgXSUR4PhYMglqq2nSpr9HtQNvsOpd1qv7b4p/+Vq2pEjfKTzupTaMIDGhWHaVa0bgTz5CuuJz9ttKj1+dmXb8uevDbpNV8t76bRMme7tso2gQKnQhW31DjdcgyzDmgW5l2UT1sLT+8PLay/eh/cLiEAe63BS9xD9ack0m4PY221nsbB3aNoWoh0I4jO0y7Zt/wF3Hg3w
+sidebar_class_name: "get api-method"
+info_path: gen/api/agent-management-api
+custom_edit_url: null
+---
+
+import ApiTabs from "@theme/ApiTabs";
+import DiscriminatorTabs from "@theme/DiscriminatorTabs";
+import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
+import SecuritySchemes from "@theme/ApiExplorer/SecuritySchemes";
+import MimeTabs from "@theme/MimeTabs";
+import ParamsItem from "@theme/ParamsItem";
+import ResponseSamples from "@theme/ResponseSamples";
+import SchemaItem from "@theme/SchemaItem";
+import SchemaTabs from "@theme/SchemaTabs";
+import Heading from "@theme/Heading";
+import OperationTabs from "@theme/OperationTabs";
+import TabItem from "@theme/TabItem";
+
+
+
+
+
+
+
+
+
+
+List all files stored in the Object Store.
+
+
+
+
+
+
+
+
+ List of stored files.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+ files
+
+ object[]
+
+
+
+ required
+
+
+
+
+
+
+ List of stored files.
+
+
+
-
+
+ Array [
+
+
+
+
+
+
+
+
+
+ -
+
+ ]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unauthorized - API key required
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Forbidden - Insufficient permissions
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Error listing files.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/post-file.api.mdx b/docs/docs/gen/api/post-file.api.mdx
new file mode 100644
index 00000000..7ffd030f
--- /dev/null
+++ b/docs/docs/gen/api/post-file.api.mdx
@@ -0,0 +1,724 @@
+---
+id: post-file
+title: "Upload a file"
+description: "Upload a file to the Object Store."
+sidebar_label: "Upload a file"
+hide_title: true
+hide_table_of_contents: true
+api: eJztWN9vGzcM/lcIPW3AxXFcO409DEM6LGsGFA2aBH1IgoB34vnUnKWrpIvjGve/D5Tu/CNx1j6sQAvkJfBFoviRH0lRXApJLrOq8spoMRGXVWlQAkKuSgJvwBcE79NPlHk498ZSTyTC49SJyZU4USXdvkONU5qR9rfHZ6e3LHdrKrLIJzpxk4jV16kUE3FmnGdBkQhHWW2VX4jJ1VK8IbRkj2tf8NF8zGRulSdx09wkokKLM/JkXdiscUZiInJjMz5IMfTPNdmFSISlz7WyJMUkx9JR8sjCjwVp8LamBNJFhc4FE6WakvOQFZTdAWoJWM5x4SBACDsYUQ8+kK+tdpAVqKckf+eDwNIUrSzJOTA5zAvyBdkglBntSXuQKs/JOsitmYUFelDOKz0FE3zbu9bsj6ygGYrJUvhFxfalxpSEvCQpx7r0rU0Nu4TtJOffGLlgkW0rL1rETGEdOO1tuSY4QLTwWHxWl15VaP1+buxsT6IPQJ5AinhFIirLvHpFjlcjIatdzlulp+Kx7xkV72Q3dT4FpXcGGT3grCr5MD1V+qGXGZ2LZoX5Nmr6msa3Zr7W5ApTlxJSAm8JPUmQNUuBpKo0ix5cC4vzaxFZd5Au+C+6PeV+g2vhaVaV6OlagCUtmc+58gX8bcDTg9/v1mP8TJn3HDPvIrkrBlkH26frGUd6/OpkxU2ThODfZRozg3xCqjSGWH+G9NZHPdE0m6RfRZpaBTdNE5ddZbSLNA76B09jibO1DSKS4OosI+fyuiwXzNNGDGFVlSoLqb7/ybHs9w6gFaqQnf8RNa7Awejw6wrO3x7vDUaHUKArtqJ05dJNHfQq7WfD4WB8lGcH2cFwjHmaD7Oj8fgwT8eD4eA10vCAhofDcTp+NcxwOB6Nxwfp66PRID0ajQIy9WXTcKU9Tck+ARZI4L2cLyEwt6Ac9AdDTo5Ylp4pIY/K4KpKbRrYVbYenHCl4Wr2ND8BS0soF1BQKQF1W8ViOvBmxxx1voy1NWbBCjAXoO+azfCLxTkYC11q/foIQci8ZxKkDZeWnbVfHwG+SYRXPhzG/MTL80ObUCLm17Dff5pSp/oeSyWhreFQ4aKr0f9TPpG1xn7do8ew8d0FfJAFX6AHk2W1tSS34/4EVUmS7xZL3iq6J3Aefe16HNGSPKrSfYNyKRX/xBJaGcDU1H4NYqdaWYdrTZOfG3sHXs3I1L4XLwf5Lcl0sTKSBbaUjPp9Zq2j9S/e9YTRHUXyUmPtC2PVF5KwB8dnp3BHC1jF1guxPwOxr3bcfsamSkrSsAen2tV5rjLFhbIiO1POhS73hd2fgd3xM71Nd5uFprxt6mLHHi7E9uaHS0fwR3hyxLbfGzD3ZEOzGO+WlyD40YNgtOs2DhvbXpIbiK6ZfKHzx6azScSMfGF4rFAZFzyPPD8Q+3k3YbD3cWZwsx43nDNvkZrNocMKauF91b3GQwsdNomk/XHSvcL++XgR+kelcxPEW6DH4em3notwMyASwUCizQe9fq/PPmLQM9Tr98/2/OWxs5briPzGQU00KLxOqxKVZqW1Lfmo6Kmr9i2YiIIdOLkSy2WKji5t2TT87zhV4ZmLVA7T8rm5yia2H3jEstMjd7TYGCbdY1nzHsFDlnu0iq3mCGoSURBKssEdUejPqH3vgs9dy+6apjRJJ3ScZVT5je1PqgurXoX22fvzC46+dtAzC4kVhgHh3GT9M4LE7XB+FL4BfLuEerGBYrmMOy7MHemmEZ1jPH+LhucF/wKdJ/xv
+sidebar_class_name: "post api-method"
+info_path: gen/api/agent-management-api
+custom_edit_url: null
+---
+
+import ApiTabs from "@theme/ApiTabs";
+import DiscriminatorTabs from "@theme/DiscriminatorTabs";
+import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
+import SecuritySchemes from "@theme/ApiExplorer/SecuritySchemes";
+import MimeTabs from "@theme/MimeTabs";
+import ParamsItem from "@theme/ParamsItem";
+import ResponseSamples from "@theme/ResponseSamples";
+import SchemaItem from "@theme/SchemaItem";
+import SchemaTabs from "@theme/SchemaTabs";
+import Heading from "@theme/Heading";
+import OperationTabs from "@theme/OperationTabs";
+import TabItem from "@theme/TabItem";
+
+
+
+
+
+
+
+
+
+
+Upload a file to the Object Store.
+
+
+
+
+
+
+
+ Query Parameters
+
+
+
+
+
+
+
+ Body
+
+ required
+
+
+
+
+
+ The file to upload.
+
+
+
+
+
+
+
+
+
+
+
+
+
+ File uploaded successfully.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Invalid request payload.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unauthorized - API key required
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Forbidden - Insufficient permissions
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ File already exists with different content. Use ?force=true to overwrite.
+
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Error uploading file.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/post-node-file-deploy.api.mdx b/docs/docs/gen/api/post-node-file-deploy.api.mdx
new file mode 100644
index 00000000..c62c0f0f
--- /dev/null
+++ b/docs/docs/gen/api/post-node-file-deploy.api.mdx
@@ -0,0 +1,681 @@
+---
+id: post-node-file-deploy
+title: "Deploy a file from Object Store to the host"
+description: "Deploy a file from Object Store to the host"
+sidebar_label: "Deploy a file from Object Store to the host"
+hide_title: true
+hide_table_of_contents: true
+api: eJztWM1u20gMfpUBTwmgOE632YOBHtJtC2Sx2wZtih5iw6AlRppEmlFnqDiuIWAfYp9wn2TBGck/sXebFi3QQy+JNSI5nO8jKXKWYGtyyNqa8wxGcGE9v7YZvdIlvaC6tAtIgDH3MLoCeTH9Ew3mVJHh6dnF+dTI2sqGh0kCntLGaV7A6GoJzwkdubOGC7FwrUsazZ1mgkk7SaBGhxUxOR+EDVYEIyis5/AzAW1gBDVyAQk4+thoRxmM2DWUQEY+dbqWfWEEl+hyYoU5GVa9hUQ58uTuKFPONqxNru6wbEgdTNEsEjXFsjxMlHWqxBmVylNJKVunDm5pMQqih4OxgQTujyzW+ii1GeVkjuieHR5FYJZwh6XOkMX33smk0ubZSRLeTDn4Bm0CPi2oQtHhRS3ynp02OSRQafMHmVxwOmkFG7FEnp/bbCHyD0+fWsNkWF5hXZc6DQQc33hBY7m7kZ3dUMqQQO2ELtYUXI/L04j8rlfbGL/GipS9VlyQEiqVNuH3m2BEvWPraPD1YFV4/+zJ6Sm0bRI5/6xDL8izNuHkSjSUjQ5FwIOPfuGZqq/3KnhT2ewR8EjSqJpcpb0Xj0RLHdAgHyRqDMNfnz4dw+FAwsDODblHGgyyqvHkgmrubFN/kWrQCLpdzEyj6udM/BallUipf/76W43B4XwMki5jYKrqEpnGINiSaSpJcIdzKRjdO6kGX4y6NWSvnzmcq5UZoeAOnd8Xzg+qQKei7tBpnJXk1bwgozZPrrTf8R+zTIsJLC820kMyTfZe597VVsIkfWnaAnaSAGsuqWMhltG3MZuhbaNBX1vjYwo+GT6Rf3v4y4KqurEzhWlKNVM2gG+W+jd2NtXZ5+PgsiB1/qLP+9QRMmXiVIipVbF+lJ1YnrlAVrWzKXlPWTDbm0sLNDltejWztiQ0O+Y+FMQFuXUxmqNXmHKDZblQ8pFhMgN4yF936mTzK9Nv+h/MRa466p4Oh7tsnZsQyUqbuuFvSBE5Zx9RJ87UxnNPVNCNUNs0bZyLwUP3WNXxiKhLQd8qR+w03ZHyjNz4QERGjLr0j9h8lTqq01E4sw2vndi7bdaQbG2I59bdKtYVWcEulKmtaqsNU05ubzzFQ4rC1ianw6Fw1ZP5UqR2eDzZ5fG9wYYL6/Snb5ppP2n8jjT+sqd4WjfTWSbp/5PDH5/D0/0llcnJSVZ+/yTyxyay3e3OQ/uC8fN87Wy1NSmIq3Ie+RDLAERcWJlB6/gcJwA4lgnzeNl/rdtjMXac9bNpmO7C+Lgxeb6TUIhsb86fq9MXzDV0w1hoMYIQJN2PV9ZVyDCC3z9chv5BQuztehx72UPyYHxas9n5vh7uAvrr567/Xy90Xf16YbtV7/rqrgcWl7S5tuFEHR1nobFaD+fq7OJcNMj5SMbJYDgYSiQIvBWGlOnc/jKetjheQcp0z8d1idrIHo0r5V2E4QqEQkhgtNFyyVbBWOBxEtsxkV0uZ+jpvSvbVpY/NuQWkd2+pQ9XBZn28juD0TWW/uFtwHJdLODgbdf9HarvfEewF4puEc0iEFg28gQJ3NJi86qjnUgvTZhJXFwtu9fdCHZ0KUbW6jvlr016jbMwJ/yv7GQj1y7evLuUuO8uGbo4jdEmf4Orto53O3ILIWtLKNHkDeYiG21KSOJ2kj1IqnCqvWAsl1Hi0t6SadsVNizPAkzb/gvzTXnK
+sidebar_class_name: "post api-method"
+info_path: gen/api/agent-management-api
+custom_edit_url: null
+---
+
+import ApiTabs from "@theme/ApiTabs";
+import DiscriminatorTabs from "@theme/DiscriminatorTabs";
+import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
+import SecuritySchemes from "@theme/ApiExplorer/SecuritySchemes";
+import MimeTabs from "@theme/MimeTabs";
+import ParamsItem from "@theme/ParamsItem";
+import ResponseSamples from "@theme/ResponseSamples";
+import SchemaItem from "@theme/SchemaItem";
+import SchemaTabs from "@theme/SchemaTabs";
+import Heading from "@theme/Heading";
+import OperationTabs from "@theme/OperationTabs";
+import TabItem from "@theme/TabItem";
+
+
+
+
+
+
+
+
+
+
+Deploy a file from Object Store to the host
+
+
+
+
+
+
+
+ Path Parameters
+
+
+
+
+
+
+
+ Body
+
+ required
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ vars
+
+ object
+
+
+
+
+
+
+ Template variables when content_type is "template".
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ File deploy job accepted.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Invalid input.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unauthorized.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Forbidden.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Internal error.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/post-node-file-status.api.mdx b/docs/docs/gen/api/post-node-file-status.api.mdx
new file mode 100644
index 00000000..35821209
--- /dev/null
+++ b/docs/docs/gen/api/post-node-file-status.api.mdx
@@ -0,0 +1,606 @@
+---
+id: post-node-file-status
+title: "Check deployment status of a file on the host"
+description: "Check deployment status of a file on the host"
+sidebar_label: "Check deployment status of a file on the host"
+hide_title: true
+hide_table_of_contents: true
+api: eJztV9tu4zYQ/RVinhJAjp202QcBffBud9EUvQSJF32wDYMWxxZjidSSoySuIKAf0S/slxRDSb7EbpMWW2AL5EmixLmdc0TNVGALdJK0NVcKYri2nn6yCj/oDG9JUukhApJLD/EY+MXsR2nkEnM0NBteX80MP9v48DCNwGNSOk1riMcVvEXp0A1LStnDQmcYO5QKpvU0gkI6mSOh82GvkTlCDKn1FG4j0AZiKCSlEIHDT6V2qCAmV2IECn3idMFhIYaRdEskIZdoSHQeIuHQo7tHJZwtSZuluJdZieJkJs06EjOZZaeRsE5kco6Z8JhhQtaJkxWu47D19GxiIILHnpWF7iVW4RJNDx/JyV6DSwX3MtNKEufeJRnl2nxzHoU3Mwq5QR2BT1LMJdvQuuD9npw2S4gg1+YHNEuG6bxmbNgTenpr1Zr3P60+sYbQEL+SRZHpJODfv/OMRnUYyM7vMCGIoHDMFmkMqQdsj6SzDy6rwa89YS7YQJAVSYrJ6uxfIwN1Xe9SOm4ymUZAmjJsYzYKvGmQYJNg4wtrfJP+xWDAl8NshQ+mnOBnQurOzmdaPY/VKEVx9a2wC0EpisShJFTizs7PWAAbbb/IT6NmSiWJwtkEvUcV3HbuXkYfu1rsUxisG4xeRn8AFMUfv/0uJqBNz69NMoFITEA5vSBUvLBOTCDX3muznEATI5UXl2+ej/GudI6Lvf1u2Lu4fNPhx2kLa4TSfnUGTzXTchLtHhntadHW9heCaiTUKurrYyK6MkG7QpuipM8oI3TOuufhGIqddQdGsG3kYJOEAVOcGT7KvGhKlDpjhVjhkJzG++13UHMEkjp7Ad9DpTTfyky0NkLObUnbJI6GVSVyaIP0YN1KkM7RMnY1g6d2Ja8N4RLdUaE2RbLBXpDLwYC56sh8z7sOeDw/5PGjkSWl1ulfG7BeafzyafzqyJlu3VwrheaVw/8Dh5fHj1RCx5Vs8n4l8ssmsj74S3PXJxQWmV3zCNBCwWDKzb+aK+IfMne1SKnluaJo1k2/An2eGvpV99eu+2za9928EVr2MBPsTBO3LIaG792ZYlN/SlRA22Hzeh42QdTefLAulwQxfP/LKPQRLLKbbY/9vgOl64k7ImueQhY2RGqBGoa2bDsIieH1FUTAOTcwnZ8NzgahP7OechnE3A43/xTBPfw3xRI+Ur/IpDYcpXTZNu8xMLgQQbzTFLHjvaYoeI/HUFVz6fGjy+qaH38q0a0b3O+l03LO9Y4rUNrzvYJ4ITP/dPiqth8ynNy0/dmp+I9HsqNQtA+lWTMhvBtigAhWuN6dLOsp9+IoFbpQX/P6XVNFb8ROtuYHR1MddRbDJMGC/nbvdOcruP75dsSKbGe6PHzB4OQDj7fyoUnVFs0kzUMfP6sgk2ZZyiXvbXyyfuW+/J/IPVR1FIyqanaM7ApNXW+wIV4zMHX9J25Nkv4=
+sidebar_class_name: "post api-method"
+info_path: gen/api/agent-management-api
+custom_edit_url: null
+---
+
+import ApiTabs from "@theme/ApiTabs";
+import DiscriminatorTabs from "@theme/DiscriminatorTabs";
+import MethodEndpoint from "@theme/ApiExplorer/MethodEndpoint";
+import SecuritySchemes from "@theme/ApiExplorer/SecuritySchemes";
+import MimeTabs from "@theme/MimeTabs";
+import ParamsItem from "@theme/ParamsItem";
+import ResponseSamples from "@theme/ResponseSamples";
+import SchemaItem from "@theme/SchemaItem";
+import SchemaTabs from "@theme/SchemaTabs";
+import Heading from "@theme/Heading";
+import OperationTabs from "@theme/OperationTabs";
+import TabItem from "@theme/TabItem";
+
+
+
+
+
+
+
+
+
+
+Check deployment status of a file on the host
+
+
+
+
+
+
+
+ Path Parameters
+
+
+
+
+
+
+
+ Body
+
+ required
+
+
+
+
+
+
+
+
+
+
+
+
+
+ File status.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Invalid input.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unauthorized.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Forbidden.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Internal error.
+
+
+
+
+
+
+
+
+
+
+ Schema
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/docs/gen/api/sidebar.ts b/docs/docs/gen/api/sidebar.ts
index 9309eedc..aa4cb467 100644
--- a/docs/docs/gen/api/sidebar.ts
+++ b/docs/docs/gen/api/sidebar.ts
@@ -84,6 +84,40 @@ const sidebar: SidebarsConfig = {
},
],
},
+ {
+ type: "category",
+ label: "File",
+ link: {
+ type: "doc",
+ id: "gen/api/file-management-api-file-operations",
+ },
+ items: [
+ {
+ type: "doc",
+ id: "gen/api/post-file",
+ label: "Upload a file",
+ className: "api-method post",
+ },
+ {
+ type: "doc",
+ id: "gen/api/get-files",
+ label: "List stored files",
+ className: "api-method get",
+ },
+ {
+ type: "doc",
+ id: "gen/api/get-file-by-name",
+ label: "Get file metadata",
+ className: "api-method get",
+ },
+ {
+ type: "doc",
+ id: "gen/api/delete-file-by-name",
+ label: "Delete a file",
+ className: "api-method delete",
+ },
+ ],
+ },
{
type: "category",
label: "Health",
@@ -202,6 +236,18 @@ const sidebar: SidebarsConfig = {
label: "Retrieve uptime",
className: "api-method get",
},
+ {
+ type: "doc",
+ id: "gen/api/post-node-file-deploy",
+ label: "Deploy a file from Object Store to the host",
+ className: "api-method post",
+ },
+ {
+ type: "doc",
+ id: "gen/api/post-node-file-status",
+ label: "Check deployment status of a file on the host",
+ className: "api-method post",
+ },
],
},
{
diff --git a/docs/docs/sidebar/architecture/architecture.md b/docs/docs/sidebar/architecture/architecture.md
index 9ef80d87..c9ddbd59 100644
--- a/docs/docs/sidebar/architecture/architecture.md
+++ b/docs/docs/sidebar/architecture/architecture.md
@@ -142,6 +142,7 @@ configure them — see the Features section:
load
- [Network Management](../features/network-management.md) — DNS, ping
- [Command Execution](../features/command-execution.md) — exec, shell
+- [File Management](../features/file-management.md) — upload, deploy, templates
- [Job System](../features/job-system.md) — async job processing and routing
- [Audit Logging](../features/audit-logging.md) — API audit trail and export
- [Health Checks](../features/health-checks.md) — liveness, readiness, status
diff --git a/docs/docs/sidebar/features/file-management.md b/docs/docs/sidebar/features/file-management.md
new file mode 100644
index 00000000..d8bab436
--- /dev/null
+++ b/docs/docs/sidebar/features/file-management.md
@@ -0,0 +1,163 @@
+---
+sidebar_position: 9
+---
+
+# File Management
+
+OSAPI can upload files to a central Object Store and deploy them to managed
+hosts with SHA-based idempotency. File operations run through the
+[job system](job-system.md), so the API server never writes to the filesystem
+directly -- agents handle all deployment.
+
+## What It Does
+
+| Operation | Description |
+| --------- | ------------------------------------------------------ |
+| Upload | Store a file (base64-encoded) in the NATS Object Store |
+| List | List all files stored in the Object Store |
+| Get | Retrieve metadata for a specific stored file |
+| Delete | Remove a file from the Object Store |
+| Deploy | Deploy a file from Object Store to agent filesystem |
+| Status | Check whether a deployed file is in-sync or drifted |
+
+**Upload / List / Get / Delete** manage files in the central NATS Object Store.
+Files are stored by name and tracked with SHA-256 checksums. These operations
+are synchronous REST calls -- they do not go through the job system.
+
+**Deploy** creates an asynchronous job that fetches the file from the Object
+Store and writes it to the target path on the agent's filesystem. Deploy
+supports optional file permissions (mode, owner, group) and Go template
+rendering.
+
+**Status** creates an asynchronous job that compares the current file on disk
+against its expected SHA-256 from the file-state KV bucket. It reports one of
+three states: `in-sync`, `drifted`, or `missing`.
+
+## How It Works
+
+### File Upload Flow
+
+1. The CLI (or SDK) computes a SHA-256 of the local file and calls
+ `GET /file/{name}` to check whether the Object Store already holds the same
+ content. If the SHA matches, the upload is skipped entirely (no bytes sent
+ over the network).
+2. If the file is new (404) or the SHA differs, the CLI sends the file via
+ multipart `POST /file`.
+3. On the server side, if a file with the same name already exists and the
+ content differs, the server rejects the upload with **409 Conflict** unless
+ `?force=true` is passed.
+4. If the content is identical, the server returns `changed: false` without
+ rewriting the object.
+5. With `--force`, both the SDK pre-check and the server-side digest guard are
+ bypassed — the file is always written and `changed: true` is returned.
+
+### File Deploy Flow
+
+1. The CLI posts a deploy request specifying the Object Store file name, target
+ path, and optional permissions.
+2. The API server creates a job and publishes it to NATS.
+3. An agent picks up the job, fetches the file from Object Store, and computes
+ its SHA-256.
+4. The agent checks the file-state KV for a previous deploy. If the SHA matches,
+ the file is skipped (idempotent no-op).
+5. If the content differs, the agent writes the file to disk and updates the
+ file-state KV with the new SHA-256.
+6. The result (changed, SHA-256, path) is written back to NATS KV.
+
+You can target a specific host, broadcast to all hosts with `_all`, or route by
+label.
+
+### SHA-Based Idempotency
+
+Every deploy operation computes a SHA-256 of the file content and compares it
+against the previously deployed SHA stored in the file-state KV bucket. If the
+hashes match, the file is not rewritten. This makes repeated deploys safe and
+efficient -- only actual changes hit the filesystem.
+
+The file-state KV has no TTL, so deploy state persists indefinitely until
+explicitly removed.
+
+## Template Rendering
+
+When `content_type` is set to `template`, the file content is processed as a Go
+`text/template` before being written to disk. The template context provides
+three top-level fields:
+
+| Field | Description |
+| ----------- | -------------------------------------- |
+| `.Facts` | Agent's collected system facts (map) |
+| `.Vars` | User-supplied template variables (map) |
+| `.Hostname` | Target agent's hostname (string) |
+
+### Example Template
+
+A configuration file that adapts to each host:
+
+```text
+# Generated for {{ .Hostname }}
+listen_address = {{ .Vars.listen_address }}
+workers = {{ .Facts.cpu_count }}
+arch = {{ .Facts.architecture }}
+```
+
+Deploy it with template variables:
+
+```bash
+osapi client node file deploy \
+ --object-name app.conf.tmpl \
+ --path /etc/app/app.conf \
+ --content-type template \
+ --var listen_address=0.0.0.0:8080 \
+ --target _all
+```
+
+Each agent renders the template with its own facts and hostname, so the same
+template produces host-specific configuration across a fleet.
+
+## Configuration
+
+File management uses two NATS infrastructure components in addition to the
+general job infrastructure:
+
+- **Object Store** (`nats.objects`) -- stores uploaded file content. Configured
+ with bucket name, max size, storage backend, and chunk size.
+- **File State KV** (`nats.file_state`) -- tracks deploy state (SHA-256, path,
+ timestamps) per host. Has no TTL -- state persists until explicitly removed.
+
+See [Configuration](../usage/configuration.md) for the full reference.
+
+```yaml
+nats:
+ objects:
+ bucket: 'file-objects'
+ max_bytes: 104857600
+ storage: 'file'
+ replicas: 1
+ max_chunk_size: 262144
+
+ file_state:
+ bucket: 'file-state'
+ storage: 'file'
+ replicas: 1
+```
+
+## Permissions
+
+| Endpoint | Permission |
+| ----------------------------------- | ------------ |
+| `POST /file` (upload) | `file:write` |
+| `GET /file` (list) | `file:read` |
+| `GET /file/{name}` (get) | `file:read` |
+| `DELETE /file/{name}` (delete) | `file:write` |
+| `POST /node/{hostname}/file/deploy` | `file:write` |
+| `POST /node/{hostname}/file/status` | `file:read` |
+
+The `admin` and `write` roles include both `file:read` and `file:write`. The
+`read` role includes only `file:read`.
+
+## Related
+
+- [System Facts](system-facts.md) -- facts available in template context
+- [Job System](job-system.md) -- how async job processing works
+- [Authentication & RBAC](authentication.md) -- permissions and roles
+- [Architecture](../architecture/architecture.md) -- system design overview
diff --git a/docs/docs/sidebar/usage/cli/client/file/delete.md b/docs/docs/sidebar/usage/cli/client/file/delete.md
new file mode 100644
index 00000000..2b0b976e
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/file/delete.md
@@ -0,0 +1,25 @@
+# Delete
+
+Delete a file from the Object Store:
+
+```bash
+$ osapi client file delete --name app.conf
+
+ Name: app.conf
+ Deleted: true
+```
+
+## JSON Output
+
+Use `--json` to get the full API response:
+
+```bash
+$ osapi client file delete --name app.conf --json
+```
+
+## Flags
+
+| Flag | Description | Default |
+| ------------ | --------------------------------------------------- | ------- |
+| `--name` | Name of the file in the Object Store (**required**) | |
+| `-j, --json` | Output raw JSON response | |
diff --git a/docs/docs/sidebar/usage/cli/client/file/file.mdx b/docs/docs/sidebar/usage/cli/client/file/file.mdx
new file mode 100644
index 00000000..7df603bd
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/file/file.mdx
@@ -0,0 +1,8 @@
+# File
+
+CLI for managing files in the OSAPI Object Store — upload, list, get metadata,
+and delete.
+
+import DocCardList from '@theme/DocCardList';
+
+
diff --git a/docs/docs/sidebar/usage/cli/client/file/get.md b/docs/docs/sidebar/usage/cli/client/file/get.md
new file mode 100644
index 00000000..8fa64eb1
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/file/get.md
@@ -0,0 +1,26 @@
+# Get
+
+Get metadata for a specific file in the Object Store:
+
+```bash
+$ osapi client file get --name app.conf
+
+ Name: app.conf
+ SHA256: a1b2c3d4e5f6...
+ Size: 1234
+```
+
+## JSON Output
+
+Use `--json` to get the full API response:
+
+```bash
+$ osapi client file get --name app.conf --json
+```
+
+## Flags
+
+| Flag | Description | Default |
+| ------------ | --------------------------------------------------- | ------- |
+| `--name` | Name of the file in the Object Store (**required**) | |
+| `-j, --json` | Output raw JSON response | |
diff --git a/docs/docs/sidebar/usage/cli/client/file/list.md b/docs/docs/sidebar/usage/cli/client/file/list.md
new file mode 100644
index 00000000..7495f963
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/file/list.md
@@ -0,0 +1,35 @@
+# List
+
+List all files stored in the OSAPI Object Store:
+
+```bash
+$ osapi client file list
+
+ Files (3)
+ NAME SHA256 SIZE
+ app.conf a1b2c3d4e5f6... 1234
+ app.conf.tmpl f6e5d4c3b2a1... 567
+ nginx.conf 1a2b3c4d5e6f... 2048
+```
+
+When no files are stored:
+
+```bash
+$ osapi client file list
+
+ No files found.
+```
+
+## JSON Output
+
+Use `--json` to get the full API response:
+
+```bash
+$ osapi client file list --json
+```
+
+## Flags
+
+| Flag | Description | Default |
+| ------------ | ------------------------ | ------- |
+| `-j, --json` | Output raw JSON response | |
diff --git a/docs/docs/sidebar/usage/cli/client/file/upload.md b/docs/docs/sidebar/usage/cli/client/file/upload.md
new file mode 100644
index 00000000..71ad8b51
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/file/upload.md
@@ -0,0 +1,46 @@
+# Upload
+
+Upload a local file to the OSAPI Object Store for later deployment:
+
+```bash
+$ osapi client file upload --name app.conf --file /tmp/app.conf
+
+ Name: app.conf
+ SHA256: a1b2c3d4e5f6...
+ Size: 1234
+ Changed: true
+ Content-Type: raw
+```
+
+Upload a template file:
+
+```bash
+$ osapi client file upload --name app.conf.tmpl --file /tmp/app.conf.tmpl \
+ --content-type template
+```
+
+Re-uploading the same content is a no-op (`Changed: false`). If the file already
+exists with different content, the upload is rejected with a 409 Conflict. Use
+`--force` to overwrite:
+
+```bash
+$ osapi client file upload --name app.conf --file /tmp/app.conf --force
+```
+
+## JSON Output
+
+Use `--json` to get the full API response:
+
+```bash
+$ osapi client file upload --name app.conf --file /tmp/app.conf --json
+```
+
+## Flags
+
+| Flag | Description | Default |
+| ---------------- | ---------------------------------------------------- | ------- |
+| `--name` | Name for the file in the Object Store (**required**) | |
+| `--file` | Path to the local file to upload (**required**) | |
+| `--content-type` | File type: `raw` or `template` | `raw` |
+| `--force` | Force upload even if file exists with different data | |
+| `-j, --json` | Output raw JSON response | |
diff --git a/docs/docs/sidebar/usage/cli/client/node/file/deploy.md b/docs/docs/sidebar/usage/cli/client/node/file/deploy.md
new file mode 100644
index 00000000..f308ad84
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/node/file/deploy.md
@@ -0,0 +1,144 @@
+# Deploy
+
+Deploy a file from the Object Store to the target host's filesystem. SHA-256
+idempotency ensures unchanged files are not rewritten.
+
+```bash
+$ osapi client node file deploy --object app.conf --path /etc/app/app.conf
+
+ Job ID: 550e8400-e29b-41d4-a716-446655440000
+ Hostname: server1
+ Changed: true
+```
+
+Deploy with file permissions:
+
+```bash
+$ osapi client node file deploy \
+ --object app.conf \
+ --path /etc/app/app.conf \
+ --mode 0644 \
+ --owner root \
+ --group root
+```
+
+Deploy a template with variables. Each agent renders the template with its own
+facts and hostname:
+
+```bash
+$ osapi client node file deploy \
+ --object app.conf.tmpl \
+ --path /etc/app/app.conf \
+ --content-type template \
+ --var listen_address=0.0.0.0:8080 \
+ --var max_workers=16 \
+ --target _all
+```
+
+## Template Rendering
+
+When `--content-type template` is set, file content is processed as a Go
+[text/template](https://pkg.go.dev/text/template) before being written to disk.
+The template context provides three top-level fields:
+
+| Field | Type | Description |
+| ----------- | ---------------- | ------------------------------------------ |
+| `.Facts` | `map[string]any` | Agent's collected system facts |
+| `.Vars` | `map[string]any` | User-supplied variables from `--var` flags |
+| `.Hostname` | `string` | Target agent's hostname |
+
+### Available Facts
+
+Facts are collected automatically by each agent and include all fields from the
+agent's fact registration: `architecture`, `kernel_version`, `cpu_count`,
+`fqdn`, `service_mgr`, `package_mgr`, `primary_interface`, `interfaces`,
+`routes`, plus any custom facts. Access them with `index`:
+
+```text
+arch={{ index .Facts "architecture" }}
+cpus={{ index .Facts "cpu_count" }}
+fqdn={{ index .Facts "fqdn" }}
+```
+
+### Template Examples
+
+Simple variable substitution:
+
+```text
+listen = {{ .Vars.listen_address }}
+workers = {{ .Vars.max_workers }}
+```
+
+Conditionals:
+
+```text
+{{ if eq .Vars.env "prod" }}
+log_level = warn
+{{ else }}
+log_level = debug
+{{ end }}
+```
+
+Host-specific configuration using facts:
+
+```text
+# Generated for {{ .Hostname }}
+server_name = {{ .Hostname }}
+arch = {{ index .Facts "architecture" }}
+cpus = {{ index .Facts "cpu_count" }}
+```
+
+Iterating over a list variable (`--var` values are strings, so pass lists via
+the SDK or orchestrator):
+
+```text
+{{ range .Vars.servers }}
+upstream {{ . }};
+{{ end }}
+```
+
+When targeting all hosts, the CLI prompts for confirmation:
+
+```bash
+$ osapi client node file deploy --object app.conf --path /etc/app/app.conf --target _all
+
+ This will deploy the file to ALL hosts. Continue? [y/N] y
+
+ Job ID: 550e8400-e29b-41d4-a716-446655440000
+ Hostname: server1
+ Changed: true
+```
+
+Target by label to deploy to a group of servers:
+
+```bash
+$ osapi client node file deploy \
+ --object nginx.conf \
+ --path /etc/nginx/nginx.conf \
+ --target group:web
+```
+
+See [File Management](../../../../../features/file-management.md) for details on
+template rendering and SHA-based idempotency.
+
+## JSON Output
+
+Use `--json` to get the full API response:
+
+```bash
+$ osapi client node file deploy --object app.conf --path /etc/app/app.conf --json
+```
+
+## Flags
+
+| Flag | Description | Default |
+| ---------------- | -------------------------------------------------------- | ------- |
+| `--object` | Name of the file in the Object Store (**required**) | |
+| `--path` | Destination path on the target filesystem (**required**) | |
+| `--content-type` | Content type: `raw` or `template` | `raw` |
+| `--mode` | File permission mode (e.g., `0644`) | |
+| `--owner` | File owner user | |
+| `--group` | File owner group | |
+| `--var` | Template variable as `key=value` (repeatable) | `[]` |
+| `-T, --target` | Target: `_any`, `_all`, hostname, or label (`group:web`) | `_any` |
+| `-j, --json` | Output raw JSON response | |
diff --git a/docs/docs/sidebar/usage/cli/client/node/file/file.mdx b/docs/docs/sidebar/usage/cli/client/node/file/file.mdx
new file mode 100644
index 00000000..aad6027c
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/node/file/file.mdx
@@ -0,0 +1,7 @@
+# File
+
+CLI for deploying files to target nodes and checking deployment status.
+
+import DocCardList from '@theme/DocCardList';
+
+
diff --git a/docs/docs/sidebar/usage/cli/client/node/file/status.md b/docs/docs/sidebar/usage/cli/client/node/file/status.md
new file mode 100644
index 00000000..ca89faae
--- /dev/null
+++ b/docs/docs/sidebar/usage/cli/client/node/file/status.md
@@ -0,0 +1,53 @@
+# Status
+
+Check the deployment status of a file on the target host. Reports whether the
+file is `in-sync`, `drifted`, or `missing`.
+
+```bash
+$ osapi client node file status --path /etc/app/app.conf
+
+ Job ID: 550e8400-e29b-41d4-a716-446655440000
+ Hostname: server1
+ Path: /etc/app/app.conf
+ Status: in-sync
+ SHA256: a1b2c3d4e5f6...
+```
+
+When a file has been modified on disk:
+
+```bash
+$ osapi client node file status --path /etc/app/app.conf
+
+ Job ID: 550e8400-e29b-41d4-a716-446655440000
+ Hostname: server1
+ Path: /etc/app/app.conf
+ Status: drifted
+ SHA256: 9f8e7d6c5b4a...
+```
+
+When a file has not been deployed or was deleted:
+
+```bash
+$ osapi client node file status --path /etc/app/app.conf
+
+ Job ID: 550e8400-e29b-41d4-a716-446655440000
+ Hostname: server1
+ Path: /etc/app/app.conf
+ Status: missing
+```
+
+## JSON Output
+
+Use `--json` to get the full API response:
+
+```bash
+$ osapi client node file status --path /etc/app/app.conf --json
+```
+
+## Flags
+
+| Flag | Description | Default |
+| -------------- | -------------------------------------------------------- | ------- |
+| `--path` | Filesystem path to check (**required**) | |
+| `-T, --target` | Target: `_any`, `_all`, hostname, or label (`group:web`) | `_any` |
+| `-j, --json` | Output raw JSON response | |
diff --git a/docs/docs/sidebar/usage/configuration.md b/docs/docs/sidebar/usage/configuration.md
index 843b07ed..3a2ded4a 100644
--- a/docs/docs/sidebar/usage/configuration.md
+++ b/docs/docs/sidebar/usage/configuration.md
@@ -57,6 +57,14 @@ uppercased:
| `nats.state.bucket` | `OSAPI_NATS_STATE_BUCKET` |
| `nats.state.storage` | `OSAPI_NATS_STATE_STORAGE` |
| `nats.state.replicas` | `OSAPI_NATS_STATE_REPLICAS` |
+| `nats.objects.bucket` | `OSAPI_NATS_OBJECTS_BUCKET` |
+| `nats.objects.max_bytes` | `OSAPI_NATS_OBJECTS_MAX_BYTES` |
+| `nats.objects.storage` | `OSAPI_NATS_OBJECTS_STORAGE` |
+| `nats.objects.replicas` | `OSAPI_NATS_OBJECTS_REPLICAS` |
+| `nats.objects.max_chunk_size` | `OSAPI_NATS_OBJECTS_MAX_CHUNK_SIZE` |
+| `nats.file_state.bucket` | `OSAPI_NATS_FILE_STATE_BUCKET` |
+| `nats.file_state.storage` | `OSAPI_NATS_FILE_STATE_STORAGE` |
+| `nats.file_state.replicas` | `OSAPI_NATS_FILE_STATE_REPLICAS` |
| `telemetry.tracing.enabled` | `OSAPI_TELEMETRY_TRACING_ENABLED` |
| `telemetry.tracing.exporter` | `OSAPI_TELEMETRY_TRACING_EXPORTER` |
| `telemetry.tracing.otlp_endpoint` | `OSAPI_TELEMETRY_TRACING_OTLP_ENDPOINT` |
@@ -133,11 +141,11 @@ OSAPI uses fine-grained `resource:verb` permissions for access control. Each API
endpoint requires a specific permission. Built-in roles expand to a default set
of permissions:
-| Role | Permissions |
-| ------- | -------------------------------------------------------------------------------------------------------------------------------------------------- |
-| `admin` | `agent:read`, `agent:write`, `node:read`, `network:read`, `network:write`, `job:read`, `job:write`, `health:read`, `audit:read`, `command:execute` |
-| `write` | `agent:read`, `node:read`, `network:read`, `network:write`, `job:read`, `job:write`, `health:read` |
-| `read` | `agent:read`, `node:read`, `network:read`, `job:read`, `health:read` |
+| Role | Permissions |
+| ------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| `admin` | `agent:read`, `agent:write`, `node:read`, `network:read`, `network:write`, `job:read`, `job:write`, `health:read`, `audit:read`, `command:execute`, `file:read`, `file:write` |
+| `write` | `agent:read`, `node:read`, `network:read`, `network:write`, `job:read`, `job:write`, `health:read`, `file:read`, `file:write` |
+| `read` | `agent:read`, `node:read`, `network:read`, `job:read`, `health:read`, `file:read` |
### Custom Roles
@@ -234,7 +242,7 @@ api:
# Custom roles with fine-grained permissions.
# Permissions: agent:read, agent:write, node:read, network:read,
# network:write, job:read, job:write, health:read,
- # audit:read, command:execute
+ # audit:read, command:execute, file:read, file:write
# roles:
# ops:
# permissions:
@@ -340,6 +348,29 @@ nats:
# Number of KV replicas.
replicas: 1
+ # ── Object Store (file uploads) ─────────────────────────
+ objects:
+ # Object Store bucket for uploaded file content.
+ bucket: 'file-objects'
+ # Maximum total size of the bucket in bytes.
+ max_bytes: 104857600 # 100 MiB
+ # Storage backend: "file" or "memory".
+ storage: 'file'
+ # Number of Object Store replicas.
+ replicas: 1
+ # Maximum chunk size for uploads in bytes.
+ max_chunk_size: 262144 # 256 KiB
+
+ # ── File state KV bucket ────────────────────────────────
+ file_state:
+ # KV bucket for file deploy state tracking.
+ # No TTL — state persists until explicitly removed.
+ bucket: 'file-state'
+ # Storage backend: "file" or "memory".
+ storage: 'file'
+ # Number of KV replicas.
+ replicas: 1
+
# ── Dead Letter Queue ─────────────────────────────────────
dlq:
# Maximum age of messages in the DLQ.
@@ -513,6 +544,24 @@ agent:
| `storage` | string | `"file"` or `"memory"` |
| `replicas` | int | Number of KV replicas |
+### `nats.objects`
+
+| Key | Type | Description |
+| ---------------- | ------ | ------------------------------------ |
+| `bucket` | string | Object Store bucket for file uploads |
+| `max_bytes` | int | Maximum bucket size in bytes |
+| `storage` | string | `"file"` or `"memory"` |
+| `replicas` | int | Number of Object Store replicas |
+| `max_chunk_size` | int | Maximum chunk size for uploads |
+
+### `nats.file_state`
+
+| Key | Type | Description |
+| ---------- | ------ | ---------------------------------------- |
+| `bucket` | string | KV bucket for file deploy state (no TTL) |
+| `storage` | string | `"file"` or `"memory"` |
+| `replicas` | int | Number of KV replicas |
+
### `nats.dlq`
| Key | Type | Description |
diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts
index 68fcf3d9..47690288 100644
--- a/docs/docusaurus.config.ts
+++ b/docs/docusaurus.config.ts
@@ -115,6 +115,11 @@ const config: Config = {
label: 'Command Execution',
docId: 'sidebar/features/command-execution'
},
+ {
+ type: 'doc',
+ label: 'File Management',
+ docId: 'sidebar/features/file-management'
+ },
{
type: 'doc',
label: 'Health Checks',
diff --git a/docs/plans/2026-03-06-file-deploy-template-design.md b/docs/plans/2026-03-06-file-deploy-template-design.md
new file mode 100644
index 00000000..e9cf012a
--- /dev/null
+++ b/docs/plans/2026-03-06-file-deploy-template-design.md
@@ -0,0 +1,275 @@
+# File Deploy & Template Rendering Design
+
+## Context
+
+OSAPI manages system configuration through async jobs. Current operations (DNS,
+disk, memory, commands) send small JSON payloads through NATS KV. File
+management — deploying config files, rendering templates with per-host facts —
+requires transferring larger blobs and tracking deployed state for idempotency.
+
+Ansible's approach transfers the full file every run to verify whether it
+changed. We want SHA-based idempotency: compute the hash of what should be on
+disk, compare against what was last deployed, and skip the transfer when nothing
+changed.
+
+## Goals
+
+- Upload files to a central store (NATS Object Store) via the REST API
+- Deploy files to agent hosts with mode, owner, and group control
+- Render Go `text/template` files agent-side using live facts + user vars
+- SHA-based idempotency — skip transfer when content hasn't changed
+- Report `changed: true/false` so orchestrator guards (`OnlyIfChanged`) work
+- **Shared primitive** — the Object Store layer is reusable by future providers
+ (firmware, packages, certs, scripts), not tied to the file provider
+
+## Design Decisions
+
+- **Approach B: single operation with `content_type` flag.** One `file.deploy`
+ operation. A `content_type` field (`raw` or `template`) controls whether the
+ agent renders content before writing. SHA is computed on the **rendered
+ output** for templates, so fact changes trigger redeployment.
+- **NATS Object Store** for blob storage. It handles chunking automatically (KV
+ has a ~1MB value limit). Files are uploaded once and pulled by agents on
+ demand.
+- **Dedicated `file-state` KV bucket** for SHA tracking. Keyed by
+ `.`. No TTL — deployed state persists until
+ explicitly removed. Separate from `agent-state` to keep concerns clean.
+ Visible to the API server for fleet-wide deployment status.
+- **Agent-side template rendering.** Raw Go template stored in Object Store.
+ Agent renders locally using its cached facts + user-supplied vars. Consistent
+ with how `@fact.*` resolution works today — each host gets its own output.
+- **Mode + owner/group in job params.** Agent sets permissions after writing.
+ Defaults to umask/current user when not specified.
+
+## Architecture
+
+### Shared Object Store Primitive
+
+The Object Store client is a **shared agent dependency** — injected at startup
+like `execManager`, `hostProvider`, or `factsKV`. Any provider can use it to
+pull blobs.
+
+```
+┌─────────────────────────────────┐
+│ Object Store │ ← shared NATS resource
+│ (file-objects bucket) │
+└──────────┬──────────────────────┘
+ │
+ ┌─────┴──────┐
+ │ Agent │
+ │ .objStore │ ← injected handle
+ └─────┬──────┘
+ │
+ ┌────────┼────────────┬───────────────┐
+ │ │ │ │
+file firmware package cert
+provider provider provider provider
+(now) (future) (future) (future)
+```
+
+Future providers that would consume the Object Store:
+
+| Provider | Operation | Usage |
+| ----------------- | ---------------------------------------------- | ----- |
+| `firmware.update` | Pull binary, run flash tool |
+| `package.install` | Pull `.deb`/`.rpm`, install via `dpkg`/`rpm` |
+| `cert.deploy` | Pull TLS cert/key, write with restricted perms |
+| `script.run` | Pull script file, execute with args |
+
+Each provider owns its domain logic but shares: Object Store download, SHA
+comparison, and state tracking from the `file-state` KV bucket.
+
+### Data Flow
+
+**Upload phase** (new REST endpoint):
+
+1. Client sends file content via `POST /file` with metadata (name)
+2. API server stores content in NATS Object Store (`file-objects`)
+3. Returns object reference: `{name, sha256, size}`
+
+**Deploy phase** (job system — `file.deploy` operation):
+
+1. Client creates job with `file.deploy` targeting host(s)
+2. Job data: object name, destination path, mode, owner, group, content_type,
+ optional template vars
+3. Agent pulls object from Object Store
+4. If `content_type: "template"` — renders with Go `text/template`
+5. Computes SHA of final content (rendered or raw)
+6. Checks `file-state` KV — if SHA matches, returns `changed: false`
+7. If different — writes file, sets perms, updates state KV, returns
+ `changed: true`
+
+**Status check** (read-only — `file.status` operation):
+
+1. Agent reads local file SHA, compares against `file-state` KV
+2. Reports: in-sync, drifted, or missing
+
+## Data Structures
+
+### NATS Configuration
+
+```yaml
+nats:
+ objects:
+ bucket: 'file-objects'
+ max_bytes: 524288000 # 500 MiB
+ storage: 'file'
+ replicas: 1
+
+ file_state:
+ bucket: 'file-state'
+ storage: 'file'
+ replicas: 1
+ # No TTL — deployed file state persists
+```
+
+### File State KV Entry
+
+Keyed by `.`:
+
+```json
+{
+ "object_name": "nginx.conf",
+ "path": "/etc/nginx/nginx.conf",
+ "sha256": "abc123...",
+ "mode": "0644",
+ "owner": "root",
+ "group": "root",
+ "deployed_at": "2026-03-06T...",
+ "content_type": "raw"
+}
+```
+
+### Job Request Data (`file.deploy`)
+
+```json
+{
+ "object_name": "nginx.conf",
+ "path": "/etc/nginx/nginx.conf",
+ "mode": "0644",
+ "owner": "root",
+ "group": "root",
+ "content_type": "template",
+ "vars": {
+ "worker_count": 4,
+ "upstream": "10.0.0.5"
+ }
+}
+```
+
+### Template Rendering Context
+
+```go
+type TemplateContext struct {
+ Facts *job.FactsRegistration
+ Vars map[string]any
+ Hostname string
+}
+```
+
+Example template:
+
+```
+worker_processes {{ .Vars.worker_count }};
+# Running on {{ .Hostname }} ({{ .Facts.Architecture }})
+server {{ .Vars.upstream }}:{{ if eq .Facts.Architecture "arm64" }}8081{{ else }}8080{{ end }};
+```
+
+## API Endpoints
+
+| Method | Path | Permission | Description |
+| --------------------- | ------------ | --------------------------- | ----------- |
+| `POST /file` | `file:write` | Upload file to Object Store |
+| `GET /file` | `file:read` | List stored objects |
+| `GET /file/{name}` | `file:read` | Get object metadata |
+| `DELETE /file/{name}` | `file:write` | Remove stored object |
+
+Deploy and status go through the existing job system as `file.deploy` and
+`file.status` operations. No new job endpoints needed.
+
+### Permissions
+
+New permissions: `file:read`, `file:write`. Added to `admin` and `write`
+built-in roles.
+
+## Agent-Side Architecture
+
+The agent gets two new dependencies:
+
+- **`objectStore`** — NATS Object Store handle. Any provider can use it.
+- **`fileStateKV`** — dedicated KV for tracking deployed file SHAs.
+
+The file provider implements:
+
+- `Deploy(req) → (Result, error)` — pull from Object Store, optionally render
+ template, SHA compare, write file, set perms, update state
+- `Status(req) → (Result, error)` — read-only: compare local file SHA against
+ state KV
+
+The processor dispatch adds a `file` category alongside `node`, `network`,
+`command`.
+
+## SDK & Orchestrator Integration
+
+### SDK (`osapi-sdk`)
+
+New `FileService`:
+
+- `Upload(ctx, name, content)` — upload to Object Store
+- `List(ctx)` — list stored objects
+- `Get(ctx, name)` — get object metadata
+- `Delete(ctx, name)` — remove object
+
+Deploy uses existing `Job.Create()` with operation `file.deploy`.
+
+### Orchestrator (`osapi-orchestrator`)
+
+```go
+o := orchestrator.New(client)
+
+upload := o.FileUpload("nginx.conf", "./local/nginx.conf.tmpl")
+deploy := o.FileTemplate("_all", "nginx.conf", "/etc/nginx/nginx.conf",
+ map[string]any{"worker_count": 4},
+ orchestrator.WithMode("0644"),
+ orchestrator.WithOwner("root", "root"),
+).After(upload)
+
+reload := o.CommandExec("_all", "nginx", []string{"-s", "reload"}).
+ After(deploy).
+ OnlyIfChanged()
+```
+
+- `FileDeploy()` — raw file deploy step
+- `FileTemplate()` — deploy with `content_type: "template"`
+- `OnlyIfChanged` works naturally via `changed` response field
+- Template vars support `@fact.*` references (resolved agent-side)
+
+## Verification
+
+After implementation:
+
+```bash
+# Upload a file
+osapi client file upload --name nginx.conf --file ./nginx.conf
+
+# Deploy raw file
+osapi client node file deploy \
+ --object nginx.conf \
+ --path /etc/nginx/nginx.conf \
+ --mode 0644 --owner root --group root \
+ --target _all
+
+# Deploy template
+osapi client node file deploy \
+ --object nginx.conf.tmpl \
+ --path /etc/nginx/nginx.conf \
+ --content-type template \
+ --var worker_count=4 \
+ --mode 0644 --owner root --group root \
+ --target _all
+
+# Check status (idempotent re-run should show changed: false)
+osapi client node file status \
+ --path /etc/nginx/nginx.conf \
+ --target _all
+```
diff --git a/docs/plans/2026-03-06-file-deploy-template.md b/docs/plans/2026-03-06-file-deploy-template.md
new file mode 100644
index 00000000..1260446b
--- /dev/null
+++ b/docs/plans/2026-03-06-file-deploy-template.md
@@ -0,0 +1,1983 @@
+# File Deploy & Template Rendering Implementation Plan
+
+> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to
+> implement this plan task-by-task.
+
+**Goal:** Add file management (upload/list/get/delete via Object Store), file
+deployment with SHA-based idempotency, and Go template rendering with per-host
+facts.
+
+**Architecture:** NATS Object Store as shared blob storage, dedicated
+`file-state` KV for SHA tracking, single `file.deploy` job operation with
+`content_type` flag (raw/template), agent-side `text/template` rendering. Object
+Store is a shared primitive — future providers (firmware, packages, certs) reuse
+the same infrastructure.
+
+**Tech Stack:** Go 1.25, NATS JetStream Object Store, `text/template`,
+oapi-codegen, testify/suite, gomock.
+
+**Design doc:** `docs/plans/2026-03-06-file-deploy-template-design.md`
+
+---
+
+## Prerequisites
+
+### nats-client Object Store Support
+
+The `github.com/osapi-io/nats-client` package needs Object Store methods before
+this plan can start. Add to the nats-client repo:
+
+```go
+// In pkg/client/types.go or new objectstore.go
+func (c *Client) CreateOrUpdateObjectStore(
+ ctx context.Context,
+ cfg jetstream.ObjectStoreConfig,
+) (jetstream.ObjectStore, error)
+
+func (c *Client) ObjectStore(
+ ctx context.Context,
+ name string,
+) (jetstream.ObjectStore, error)
+```
+
+Then update `internal/messaging/types.go` in osapi to add:
+
+```go
+CreateOrUpdateObjectStore(
+ ctx context.Context,
+ cfg jetstream.ObjectStoreConfig,
+) (jetstream.ObjectStore, error)
+
+ObjectStore(
+ ctx context.Context,
+ name string,
+) (jetstream.ObjectStore, error)
+```
+
+This is a separate PR on the nats-client repo. Once merged, `go get` the new
+version before starting Task 1.
+
+---
+
+## Task 1: NATS Configuration for Object Store + File-State KV
+
+Add config structs, builder functions, and startup creation for the two new NATS
+resources.
+
+**Files:**
+
+- Modify: `internal/config/types.go`
+- Modify: `internal/cli/nats.go`
+- Modify: `internal/cli/nats_public_test.go`
+- Modify: `cmd/nats_helpers.go`
+- Modify: `internal/messaging/types.go`
+- Modify: `docs/docs/sidebar/usage/configuration.md`
+
+### Step 1: Add config structs
+
+In `internal/config/types.go`, add two new types and fields to `NATS`:
+
+```go
+// NATSObjects configuration for the NATS Object Store bucket.
+type NATSObjects struct {
+ // Bucket is the Object Store bucket name for file content.
+ Bucket string `mapstructure:"bucket"`
+ MaxBytes int64 `mapstructure:"max_bytes"`
+ Storage string `mapstructure:"storage"` // "file" or "memory"
+ Replicas int `mapstructure:"replicas"`
+}
+
+// NATSFileState configuration for the file deployment state KV bucket.
+// No TTL — deployed file state persists until explicitly removed.
+type NATSFileState struct {
+ // Bucket is the KV bucket name for file deployment SHA tracking.
+ Bucket string `mapstructure:"bucket"`
+ Storage string `mapstructure:"storage"` // "file" or "memory"
+ Replicas int `mapstructure:"replicas"`
+}
+```
+
+Add to `NATS` struct:
+
+```go
+type NATS struct {
+ // ... existing fields ...
+ Objects NATSObjects `mapstructure:"objects,omitempty"`
+ FileState NATSFileState `mapstructure:"file_state,omitempty"`
+}
+```
+
+### Step 2: Add NATSClient Object Store methods
+
+In `internal/messaging/types.go`, add to the `NATSClient` interface:
+
+```go
+// Object Store operations
+CreateOrUpdateObjectStore(
+ ctx context.Context,
+ cfg jetstream.ObjectStoreConfig,
+) (jetstream.ObjectStore, error)
+ObjectStore(
+ ctx context.Context,
+ name string,
+) (jetstream.ObjectStore, error)
+```
+
+### Step 3: Add builder functions
+
+In `internal/cli/nats.go`, add:
+
+```go
+// BuildObjectStoreConfig builds a jetstream.ObjectStoreConfig from
+// objects config values.
+func BuildObjectStoreConfig(
+ namespace string,
+ objectsCfg config.NATSObjects,
+) jetstream.ObjectStoreConfig {
+ bucket := job.ApplyNamespaceToInfraName(namespace, objectsCfg.Bucket)
+
+ return jetstream.ObjectStoreConfig{
+ Bucket: bucket,
+ MaxBytes: objectsCfg.MaxBytes,
+ Storage: ParseJetstreamStorageType(objectsCfg.Storage),
+ Replicas: objectsCfg.Replicas,
+ }
+}
+
+// BuildFileStateKVConfig builds a jetstream.KeyValueConfig from
+// file state config values. No TTL — deployed state persists.
+func BuildFileStateKVConfig(
+ namespace string,
+ fileStateCfg config.NATSFileState,
+) jetstream.KeyValueConfig {
+ bucket := job.ApplyNamespaceToInfraName(namespace, fileStateCfg.Bucket)
+
+ return jetstream.KeyValueConfig{
+ Bucket: bucket,
+ Storage: ParseJetstreamStorageType(fileStateCfg.Storage),
+ Replicas: fileStateCfg.Replicas,
+ }
+}
+```
+
+### Step 4: Add startup creation
+
+In `cmd/nats_helpers.go` `setupJetStream()`, add after the state KV block:
+
+```go
+// Create Object Store bucket for file content
+if appConfig.NATS.Objects.Bucket != "" {
+ objStoreConfig := cli.BuildObjectStoreConfig(namespace, appConfig.NATS.Objects)
+ if _, err := nc.CreateOrUpdateObjectStore(ctx, objStoreConfig); err != nil {
+ return fmt.Errorf("create Object Store bucket %s: %w", objStoreConfig.Bucket, err)
+ }
+}
+
+// Create file-state KV bucket for deployment SHA tracking
+if appConfig.NATS.FileState.Bucket != "" {
+ fileStateKVConfig := cli.BuildFileStateKVConfig(namespace, appConfig.NATS.FileState)
+ if _, err := nc.CreateOrUpdateKVBucketWithConfig(ctx, fileStateKVConfig); err != nil {
+ return fmt.Errorf("create file-state KV bucket %s: %w", fileStateKVConfig.Bucket, err)
+ }
+}
+```
+
+### Step 5: Add default config values
+
+Add to `osapi.yaml` and the configuration docs the new sections:
+
+```yaml
+nats:
+ objects:
+ bucket: 'file-objects'
+ max_bytes: 524288000 # 500 MiB
+ storage: 'file'
+ replicas: 1
+
+ file_state:
+ bucket: 'file-state'
+ storage: 'file'
+ replicas: 1
+```
+
+### Step 6: Run tests and verify
+
+```bash
+go build ./...
+just go::unit
+```
+
+### Step 7: Commit
+
+```bash
+git add internal/config/types.go internal/cli/nats.go \
+ internal/messaging/types.go cmd/nats_helpers.go
+git commit -m "feat(config): add Object Store and file-state KV config"
+```
+
+---
+
+## Task 2: Permissions — Add file:read and file:write
+
+Add file permissions to the auth system before creating API endpoints.
+
+**Files:**
+
+- Modify: `internal/authtoken/permissions.go`
+- Modify: `internal/authtoken/permissions_public_test.go`
+
+### Step 1: Add permission constants
+
+In `internal/authtoken/permissions.go`, add:
+
+```go
+const (
+ // ... existing ...
+ PermFileRead Permission = "file:read"
+ PermFileWrite Permission = "file:write"
+)
+```
+
+Add to `AllPermissions`:
+
+```go
+var AllPermissions = []Permission{
+ // ... existing ...
+ PermFileRead,
+ PermFileWrite,
+}
+```
+
+Add to `DefaultRolePermissions`:
+
+```go
+"admin": {
+ // ... existing ...
+ PermFileRead,
+ PermFileWrite,
+},
+"write": {
+ // ... existing ...
+ PermFileRead,
+ PermFileWrite,
+},
+"read": {
+ // ... existing ...
+ PermFileRead,
+},
+```
+
+### Step 2: Update permission tests
+
+Add test cases to the existing permissions test suite to verify the new
+permissions resolve correctly for admin, write, and read roles.
+
+### Step 3: Run tests
+
+```bash
+go test ./internal/authtoken/... -count=1 -v
+```
+
+### Step 4: Commit
+
+```bash
+git add internal/authtoken/permissions.go \
+ internal/authtoken/permissions_public_test.go
+git commit -m "feat(auth): add file:read and file:write permissions"
+```
+
+---
+
+## Task 3: File API Domain — OpenAPI Spec + Code Generation
+
+Create the `/file` REST API domain for Object Store management.
+
+**Files:**
+
+- Create: `internal/api/file/gen/api.yaml`
+- Create: `internal/api/file/gen/cfg.yaml`
+- Create: `internal/api/file/gen/generate.go`
+- Generated: `internal/api/file/gen/file.gen.go`
+
+### Step 1: Write OpenAPI spec
+
+Create `internal/api/file/gen/api.yaml`:
+
+```yaml
+openapi: '3.0.0'
+info:
+ title: File Management API
+ version: 1.0.0
+
+tags:
+ - name: file
+ x-displayName: File
+ description: Manage files in the Object Store.
+
+paths:
+ /file:
+ post:
+ operationId: PostFile
+ summary: Upload a file to Object Store
+ description: >
+ Stores file content in NATS Object Store. Returns the object reference
+ with SHA256 and size.
+ tags: [file]
+ security:
+ - BearerAuth:
+ - 'file:write'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileUploadRequest'
+ responses:
+ '201':
+ description: File uploaded successfully.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileUploadResponse'
+ '400':
+ description: Invalid input.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal server error.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ get:
+ operationId: GetFiles
+ summary: List stored files
+ description: Returns metadata for all files in the Object Store.
+ tags: [file]
+ security:
+ - BearerAuth:
+ - 'file:read'
+ responses:
+ '200':
+ description: List of stored files.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileListResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal server error.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ /file/{name}:
+ get:
+ operationId: GetFileByName
+ summary: Get file metadata
+ description: Returns metadata for a specific file in the Object Store.
+ tags: [file]
+ security:
+ - BearerAuth:
+ - 'file:read'
+ parameters:
+ - $ref: '#/components/parameters/FileName'
+ responses:
+ '200':
+ description: File metadata.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileInfoResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '404':
+ description: File not found.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal server error.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ delete:
+ operationId: DeleteFile
+ summary: Delete a file from Object Store
+ description: Removes a file from the Object Store.
+ tags: [file]
+ security:
+ - BearerAuth:
+ - 'file:write'
+ parameters:
+ - $ref: '#/components/parameters/FileName'
+ responses:
+ '200':
+ description: File deleted.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeleteResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '404':
+ description: File not found.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal server error.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+components:
+ securitySchemes:
+ BearerAuth:
+ type: http
+ scheme: bearer
+ bearerFormat: JWT
+
+ parameters:
+ FileName:
+ name: name
+ in: path
+ required: true
+ schema:
+ type: string
+ description: The name of the file in the Object Store.
+ # NOTE: path param x-oapi-codegen-extra-tags does not generate
+ # tags on RequestObject structs in strict-server mode.
+ # Validated manually in handler.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+
+ schemas:
+ FileUploadRequest:
+ type: object
+ properties:
+ name:
+ type: string
+ description: >
+ Name to store the file under in the Object Store.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+ content:
+ type: string
+ format: byte
+ description: >
+ Base64-encoded file content.
+ x-oapi-codegen-extra-tags:
+ validate: required
+ required: [name, content]
+
+ FileUploadResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ sha256:
+ type: string
+ size:
+ type: integer
+ format: int64
+ required: [name, sha256, size]
+
+ FileListResponse:
+ type: object
+ properties:
+ files:
+ type: array
+ items:
+ $ref: '#/components/schemas/FileInfo'
+ required: [files]
+
+ FileInfo:
+ type: object
+ properties:
+ name:
+ type: string
+ sha256:
+ type: string
+ size:
+ type: integer
+ format: int64
+ required: [name, size]
+
+ FileInfoResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ sha256:
+ type: string
+ size:
+ type: integer
+ format: int64
+ required: [name, sha256, size]
+
+ FileDeleteResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ deleted:
+ type: boolean
+ required: [name, deleted]
+```
+
+### Step 2: Write codegen config
+
+Create `internal/api/file/gen/cfg.yaml`:
+
+```yaml
+package: gen
+generate:
+ strict-server: true
+ echo-server: true
+ models: true
+import-mapping:
+ ../../common/gen/api.yaml: github.com/retr0h/osapi/internal/api/common/gen
+output: file.gen.go
+```
+
+### Step 3: Write generate directive
+
+Create `internal/api/file/gen/generate.go`:
+
+```go
+package gen
+
+//go:generate go run github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen --config cfg.yaml api.yaml
+```
+
+### Step 4: Generate code
+
+```bash
+go generate ./internal/api/file/gen/...
+```
+
+### Step 5: Commit
+
+```bash
+git add internal/api/file/gen/
+git commit -m "feat(api): add file domain OpenAPI spec and codegen"
+```
+
+---
+
+## Task 4: File API Handler — Upload, List, Get, Delete
+
+Implement the file API handler with all four endpoints.
+
+**Files:**
+
+- Create: `internal/api/file/types.go`
+- Create: `internal/api/file/file.go`
+- Create: `internal/api/file/file_upload.go`
+- Create: `internal/api/file/file_list.go`
+- Create: `internal/api/file/file_get.go`
+- Create: `internal/api/file/file_delete.go`
+- Create: `internal/api/file/file_upload_public_test.go`
+- Create: `internal/api/file/file_list_public_test.go`
+- Create: `internal/api/file/file_get_public_test.go`
+- Create: `internal/api/file/file_delete_public_test.go`
+
+### Step 1: Write types.go
+
+```go
+package file
+
+import (
+ "context"
+ "log/slog"
+
+ "github.com/nats-io/nats.go/jetstream"
+)
+
+// ObjectStoreManager abstracts NATS Object Store operations for testing.
+type ObjectStoreManager interface {
+ PutBytes(
+ ctx context.Context,
+ name string,
+ data []byte,
+ ) (*jetstream.ObjectInfo, error)
+ GetBytes(
+ ctx context.Context,
+ name string,
+ ) ([]byte, error)
+ GetInfo(
+ ctx context.Context,
+ name string,
+ ) (*jetstream.ObjectInfo, error)
+ Delete(
+ ctx context.Context,
+ name string,
+ ) error
+ List(
+ ctx context.Context,
+ ) ([]*jetstream.ObjectInfo, error)
+}
+
+// File handles file management REST API endpoints.
+type File struct {
+ objStore ObjectStoreManager
+ logger *slog.Logger
+}
+```
+
+**Note:** The `ObjectStoreManager` interface wraps `jetstream.ObjectStore` so
+handlers can be tested with mocks. The actual `jetstream.ObjectStore` satisfies
+this interface. Verify that the `jetstream.ObjectStore` interface matches — the
+`List` method may return a lister instead of a slice; adapt accordingly.
+
+### Step 2: Write file.go factory
+
+```go
+package file
+
+import (
+ "log/slog"
+
+ gen "github.com/retr0h/osapi/internal/api/file/gen"
+)
+
+var _ gen.StrictServerInterface = (*File)(nil)
+
+// New creates a new File handler.
+func New(
+ logger *slog.Logger,
+ objStore ObjectStoreManager,
+) *File {
+ return &File{
+ objStore: objStore,
+ logger: logger,
+ }
+}
+```
+
+### Step 3: Write upload handler (file_upload.go)
+
+Decode base64 content from request body, store in Object Store, return
+reference. Use `validation.Struct(request.Body)` for input validation.
+
+### Step 4: Write failing tests for upload
+
+Create `file_upload_public_test.go` with table-driven suite:
+
+- when valid upload succeeds (201)
+- when name is empty (400, validation error)
+- when content is empty (400, validation error)
+- when Object Store put fails (500)
+
+Include `TestPostFileHTTP` and `TestPostFileRBACHTTP` methods.
+
+### Step 5: Implement remaining handlers
+
+Follow the same test-first pattern for list, get, delete:
+
+- `file_list.go` — iterate Object Store, return file info array
+- `file_get.go` — get info by name, return 404 if not found
+- `file_delete.go` — delete by name, return 404 if not found
+
+### Step 6: Run tests
+
+```bash
+go test ./internal/api/file/... -count=1 -v
+```
+
+### Step 7: Commit
+
+```bash
+git add internal/api/file/
+git commit -m "feat(api): implement file upload, list, get, delete handlers"
+```
+
+---
+
+## Task 5: File API Server Wiring
+
+Wire the file handler into the API server.
+
+**Files:**
+
+- Create: `internal/api/handler_file.go`
+- Create: `internal/api/handler_file_public_test.go`
+- Modify: `internal/api/types.go`
+- Modify: `internal/api/handler.go`
+- Modify: `cmd/api_helpers.go`
+
+### Step 1: Create handler_file.go
+
+Follow the pattern from `handler_node.go`. All file endpoints require
+authentication. The handler factory takes an `ObjectStoreManager`:
+
+```go
+func (s *Server) GetFileHandler(
+ objStore file.ObjectStoreManager,
+) []func(e *echo.Echo) {
+ var tokenManager TokenValidator = authtoken.New(s.logger)
+
+ fileHandler := file.New(s.logger, objStore)
+
+ strictHandler := fileGen.NewStrictHandler(
+ fileHandler,
+ []fileGen.StrictMiddlewareFunc{
+ func(handler strictecho.StrictEchoHandlerFunc, _ string) strictecho.StrictEchoHandlerFunc {
+ return scopeMiddleware(
+ handler,
+ tokenManager,
+ s.appConfig.API.Server.Security.SigningKey,
+ fileGen.BearerAuthScopes,
+ s.customRoles,
+ )
+ },
+ },
+ )
+
+ return []func(e *echo.Echo){
+ func(e *echo.Echo) {
+ fileGen.RegisterHandlers(e, strictHandler)
+ },
+ }
+}
+```
+
+### Step 2: Update types.go
+
+No new fields needed on `Server` — the Object Store is passed directly to
+`GetFileHandler()`.
+
+### Step 3: Update handler.go
+
+In `registerAPIHandlers()` (or equivalent), add:
+
+```go
+handlers = append(handlers, sm.GetFileHandler(objStore)...)
+```
+
+### Step 4: Update startup wiring
+
+In `cmd/api_helpers.go`, create the Object Store handle at startup and pass it
+to the file handler:
+
+```go
+// Create Object Store handle for file management API
+var objStore jetstream.ObjectStore
+if appConfig.NATS.Objects.Bucket != "" {
+ objStoreName := job.ApplyNamespaceToInfraName(namespace, appConfig.NATS.Objects.Bucket)
+ objStore, err = nc.ObjectStore(ctx, objStoreName)
+ // handle error
+}
+```
+
+### Step 5: Add handler test
+
+Create `handler_file_public_test.go` following the pattern of
+`handler_node_public_test.go`.
+
+### Step 6: Update combined OpenAPI spec
+
+Add the file spec to `internal/api/gen/api.yaml` merged spec.
+
+### Step 7: Run tests and verify
+
+```bash
+go build ./...
+go test ./internal/api/... -count=1 -v
+```
+
+### Step 8: Commit
+
+```bash
+git add internal/api/handler_file.go internal/api/handler_file_public_test.go \
+ internal/api/types.go internal/api/handler.go \
+ cmd/api_helpers.go internal/api/gen/api.yaml
+git commit -m "feat(api): wire file handler into API server"
+```
+
+---
+
+## Task 6: Job Types + File Provider Interface
+
+Define operation constants, request/response types, and the file provider
+interface.
+
+**Files:**
+
+- Modify: `internal/job/types.go`
+- Create: `internal/provider/file/types.go`
+- Create: `internal/provider/file/mocks/types.gen.go`
+- Create: `internal/provider/file/mocks/mocks.go`
+
+### Step 1: Add operation constants
+
+In `internal/job/types.go`:
+
+```go
+// File operations
+const (
+ OperationFileDeployExecute = "file.deploy.execute"
+ OperationFileStatusGet = "file.status.get"
+)
+```
+
+### Step 2: Define file state type
+
+In `internal/job/types.go`, add the file state KV entry structure:
+
+```go
+// FileState represents a deployed file's state in the file-state KV.
+// Keyed by ..
+type FileState struct {
+ ObjectName string `json:"object_name"`
+ Path string `json:"path"`
+ SHA256 string `json:"sha256"`
+ Mode string `json:"mode,omitempty"`
+ Owner string `json:"owner,omitempty"`
+ Group string `json:"group,omitempty"`
+ DeployedAt string `json:"deployed_at"`
+ ContentType string `json:"content_type"`
+}
+```
+
+### Step 3: Define provider interface
+
+Create `internal/provider/file/types.go`:
+
+```go
+package file
+
+import "context"
+
+// DeployRequest contains parameters for deploying a file to disk.
+type DeployRequest struct {
+ ObjectName string `json:"object_name"`
+ Path string `json:"path"`
+ Mode string `json:"mode,omitempty"`
+ Owner string `json:"owner,omitempty"`
+ Group string `json:"group,omitempty"`
+ ContentType string `json:"content_type"` // "raw" or "template"
+ Vars map[string]any `json:"vars,omitempty"`
+}
+
+// DeployResult contains the result of a file deploy operation.
+type DeployResult struct {
+ Changed bool `json:"changed"`
+ SHA256 string `json:"sha256"`
+ Path string `json:"path"`
+}
+
+// StatusRequest contains parameters for checking file status.
+type StatusRequest struct {
+ Path string `json:"path"`
+}
+
+// StatusResult contains the result of a file status check.
+type StatusResult struct {
+ Path string `json:"path"`
+ Status string `json:"status"` // "in-sync", "drifted", "missing"
+ SHA256 string `json:"sha256,omitempty"`
+}
+
+// Provider defines the interface for file operations.
+type Provider interface {
+ Deploy(
+ ctx context.Context,
+ req DeployRequest,
+ ) (*DeployResult, error)
+ Status(
+ ctx context.Context,
+ req StatusRequest,
+ ) (*StatusResult, error)
+}
+```
+
+### Step 4: Generate mocks
+
+Create `internal/provider/file/mocks/mocks.go`:
+
+```go
+package mocks
+
+//go:generate mockgen -source=../types.go -destination=types.gen.go -package=mocks
+```
+
+Run:
+
+```bash
+go generate ./internal/provider/file/mocks/...
+```
+
+### Step 5: Commit
+
+```bash
+git add internal/job/types.go internal/provider/file/
+git commit -m "feat(file): add job operation constants and provider interface"
+```
+
+---
+
+## Task 7: File Provider Implementation — Deploy with SHA Idempotency
+
+Implement the core deploy logic: pull from Object Store, SHA compare, write
+file, set permissions, update state KV.
+
+**Files:**
+
+- Create: `internal/provider/file/provider.go`
+- Create: `internal/provider/file/deploy.go`
+- Create: `internal/provider/file/deploy_public_test.go`
+- Create: `internal/provider/file/status.go`
+- Create: `internal/provider/file/status_public_test.go`
+
+### Step 1: Write provider constructor
+
+Create `internal/provider/file/provider.go`:
+
+```go
+package file
+
+import (
+ "context"
+ "log/slog"
+
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/spf13/afero"
+
+ "github.com/retr0h/osapi/internal/job"
+)
+
+// FileProvider implements file deploy and status operations.
+type FileProvider struct {
+ logger *slog.Logger
+ fs afero.Fs
+ objStore jetstream.ObjectStore
+ stateKV jetstream.KeyValue
+ hostname string
+ cachedFacts *job.FactsRegistration
+}
+
+// New creates a new FileProvider.
+func New(
+ logger *slog.Logger,
+ fs afero.Fs,
+ objStore jetstream.ObjectStore,
+ stateKV jetstream.KeyValue,
+ hostname string,
+ cachedFacts *job.FactsRegistration,
+) *FileProvider {
+ return &FileProvider{
+ logger: logger,
+ fs: fs,
+ objStore: objStore,
+ stateKV: stateKV,
+ hostname: hostname,
+ cachedFacts: cachedFacts,
+ }
+}
+```
+
+**Note:** The provider uses `afero.Fs` for filesystem abstraction (testable
+without writing real files). The `objStore` and `stateKV` are NATS JetStream
+interfaces — mock them in tests.
+
+### Step 2: Write failing deploy tests
+
+Create `deploy_public_test.go` with table-driven cases:
+
+| Case | Setup | Expected |
+| ------------------------------- | --------------------------------------------------------- | ------------------------------ |
+| when deploy succeeds (new file) | Mock: objStore returns content, stateKV has no entry | changed: true, file written |
+| when deploy succeeds (changed) | Mock: objStore returns content, stateKV has different SHA | changed: true, file written |
+| when deploy skips (unchanged) | Mock: objStore returns content, stateKV has same SHA | changed: false, no write |
+| when Object Store get fails | Mock: objStore returns error | error |
+| when file write fails | Mock: fs write fails | error |
+| when state KV put fails | Mock: stateKV put fails | error |
+| when mode is set | Mock: success | file written with correct mode |
+
+### Step 3: Implement deploy
+
+Create `deploy.go`. Core logic:
+
+1. Pull content from Object Store: `objStore.GetBytes(ctx, req.ObjectName)`
+2. If `content_type == "template"`, render (delegate to Task 8)
+3. Compute SHA256 of final content
+4. Build state key: `hostname + "." + sha256(req.Path)`
+5. Check `stateKV.Get(ctx, stateKey)` — if SHA matches, return
+ `{changed: false}`
+6. Write file using `afero.WriteFile(fs, req.Path, content, mode)`
+7. If owner/group set, `fs.Chown` (skip if not root or on macOS)
+8. Update stateKV with new `FileState`
+9. Return `{changed: true, sha256: sha}`
+
+```go
+func (p *FileProvider) Deploy(
+ ctx context.Context,
+ req DeployRequest,
+) (*DeployResult, error) {
+ // 1. Pull content from Object Store
+ content, err := p.objStore.GetBytes(ctx, req.ObjectName)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get object %q: %w", req.ObjectName, err)
+ }
+
+ // 2. Template rendering (if applicable)
+ if req.ContentType == "template" {
+ content, err = p.renderTemplate(content, req.Vars)
+ if err != nil {
+ return nil, fmt.Errorf("failed to render template: %w", err)
+ }
+ }
+
+ // 3. Compute SHA of final content
+ sha := computeSHA256(content)
+
+ // 4. Check state for idempotency
+ stateKey := buildStateKey(p.hostname, req.Path)
+ existing, _ := p.stateKV.Get(ctx, stateKey)
+ if existing != nil {
+ var state job.FileState
+ if json.Unmarshal(existing.Value(), &state) == nil && state.SHA256 == sha {
+ return &DeployResult{Changed: false, SHA256: sha, Path: req.Path}, nil
+ }
+ }
+
+ // 5. Write file
+ mode := parseFileMode(req.Mode)
+ if err := afero.WriteFile(p.fs, req.Path, content, mode); err != nil {
+ return nil, fmt.Errorf("failed to write file %q: %w", req.Path, err)
+ }
+
+ // 6. Update state KV
+ state := job.FileState{
+ ObjectName: req.ObjectName,
+ Path: req.Path,
+ SHA256: sha,
+ Mode: req.Mode,
+ Owner: req.Owner,
+ Group: req.Group,
+ DeployedAt: time.Now().UTC().Format(time.RFC3339),
+ ContentType: req.ContentType,
+ }
+ stateBytes, _ := json.Marshal(state)
+ if _, err := p.stateKV.Put(ctx, stateKey, stateBytes); err != nil {
+ return nil, fmt.Errorf("failed to update file state: %w", err)
+ }
+
+ return &DeployResult{Changed: true, SHA256: sha, Path: req.Path}, nil
+}
+```
+
+### Step 4: Implement helper functions
+
+```go
+func computeSHA256(data []byte) string {
+ h := sha256.Sum256(data)
+ return hex.EncodeToString(h[:])
+}
+
+func buildStateKey(hostname, path string) string {
+ pathHash := computeSHA256([]byte(path))
+ return hostname + "." + pathHash
+}
+
+func parseFileMode(mode string) os.FileMode {
+ if mode == "" {
+ return 0o644
+ }
+ m, err := strconv.ParseUint(mode, 8, 32)
+ if err != nil {
+ return 0o644
+ }
+ return os.FileMode(m)
+}
+```
+
+### Step 5: Write failing status tests
+
+Create `status_public_test.go`:
+
+| Case | Setup | Expected |
+| ------------------- | ----------------------------------- | ----------------- |
+| when file in sync | Local SHA matches state KV SHA | status: "in-sync" |
+| when file drifted | Local SHA differs from state KV SHA | status: "drifted" |
+| when file missing | File doesn't exist on disk | status: "missing" |
+| when no state entry | stateKV has no entry for path | status: "missing" |
+
+### Step 6: Implement status
+
+```go
+func (p *FileProvider) Status(
+ ctx context.Context,
+ req StatusRequest,
+) (*StatusResult, error) {
+ stateKey := buildStateKey(p.hostname, req.Path)
+
+ entry, err := p.stateKV.Get(ctx, stateKey)
+ if err != nil {
+ return &StatusResult{Path: req.Path, Status: "missing"}, nil
+ }
+
+ var state job.FileState
+ if err := json.Unmarshal(entry.Value(), &state); err != nil {
+ return nil, fmt.Errorf("failed to parse file state: %w", err)
+ }
+
+ // Check if file exists on disk
+ data, err := afero.ReadFile(p.fs, req.Path)
+ if err != nil {
+ return &StatusResult{Path: req.Path, Status: "missing"}, nil
+ }
+
+ localSHA := computeSHA256(data)
+ if localSHA == state.SHA256 {
+ return &StatusResult{Path: req.Path, Status: "in-sync", SHA256: localSHA}, nil
+ }
+
+ return &StatusResult{Path: req.Path, Status: "drifted", SHA256: localSHA}, nil
+}
+```
+
+### Step 7: Run tests
+
+```bash
+go test ./internal/provider/file/... -count=1 -v
+```
+
+### Step 8: Commit
+
+```bash
+git add internal/provider/file/
+git commit -m "feat(file): implement deploy with SHA idempotency and status check"
+```
+
+---
+
+## Task 8: Template Rendering
+
+Add Go `text/template` rendering support to the file provider.
+
+**Files:**
+
+- Create: `internal/provider/file/template.go`
+- Create: `internal/provider/file/template_public_test.go`
+
+### Step 1: Define template context
+
+In `template.go`:
+
+```go
+// TemplateContext is the data available to Go templates during rendering.
+type TemplateContext struct {
+ Facts *job.FactsRegistration
+ Vars map[string]any
+ Hostname string
+}
+```
+
+### Step 2: Write failing template tests
+
+Create `template_public_test.go`:
+
+| Case | Template | Vars/Facts | Expected |
+| ---------------------------- | ------------------------------------------------------------------ | ------------------------------- | ------------------ |
+| when simple var substitution | `server {{ .Vars.host }}` | `{"host":"10.0.0.1"}` | `server 10.0.0.1` |
+| when fact reference | `arch: {{ .Facts.Architecture }}` | Facts with Architecture="amd64" | `arch: amd64` |
+| when conditional | `{{ if eq .Facts.Architecture "arm64" }}arm{{ else }}x86{{ end }}` | Architecture="amd64" | `x86` |
+| when hostname | `# {{ .Hostname }}` | hostname="web-01" | `# web-01` |
+| when invalid template syntax | `{{ .Invalid` | — | error |
+| when nil facts | `{{ .Hostname }}` | nil facts | uses hostname only |
+
+### Step 3: Implement renderTemplate
+
+```go
+func (p *FileProvider) renderTemplate(
+ rawTemplate []byte,
+ vars map[string]any,
+) ([]byte, error) {
+ tmpl, err := template.New("file").Parse(string(rawTemplate))
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse template: %w", err)
+ }
+
+ ctx := TemplateContext{
+ Facts: p.cachedFacts,
+ Vars: vars,
+ Hostname: p.hostname,
+ }
+
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, ctx); err != nil {
+ return nil, fmt.Errorf("failed to execute template: %w", err)
+ }
+
+ return buf.Bytes(), nil
+}
+```
+
+### Step 4: Run tests
+
+```bash
+go test ./internal/provider/file/... -count=1 -v
+```
+
+### Step 5: Commit
+
+```bash
+git add internal/provider/file/template.go \
+ internal/provider/file/template_public_test.go
+git commit -m "feat(file): add Go text/template rendering with facts and vars"
+```
+
+---
+
+## Task 9: Agent Wiring + Processor Dispatch
+
+Add Object Store, file-state KV, and file provider to the agent. Add `file`
+category to the processor dispatcher.
+
+**Files:**
+
+- Modify: `internal/agent/types.go`
+- Modify: `internal/agent/agent.go` (New constructor)
+- Create: `internal/agent/processor_file.go`
+- Create: `internal/agent/processor_file_test.go`
+- Modify: `internal/agent/processor.go`
+- Modify: `internal/agent/processor_test.go`
+- Modify: `cmd/agent_helpers.go`
+- Modify: `cmd/api_helpers.go`
+
+### Step 1: Update Agent struct
+
+In `internal/agent/types.go`, add:
+
+```go
+import (
+ // ... existing ...
+ fileProv "github.com/retr0h/osapi/internal/provider/file"
+)
+
+type Agent struct {
+ // ... existing fields ...
+
+ // File provider for file deploy/status operations
+ fileProvider fileProv.Provider
+
+ // Object Store handle (shared primitive for future providers)
+ objStore jetstream.ObjectStore
+
+ // File-state KV for SHA tracking
+ fileStateKV jetstream.KeyValue
+}
+```
+
+### Step 2: Update constructor
+
+In `internal/agent/agent.go`, add parameters to `New()`:
+
+```go
+func New(
+ // ... existing params ...
+ fileProvider fileProv.Provider,
+ objStore jetstream.ObjectStore,
+ fileStateKV jetstream.KeyValue,
+) *Agent {
+```
+
+### Step 3: Create processor_file.go
+
+```go
+func (a *Agent) processFileOperation(
+ jobRequest job.Request,
+) (json.RawMessage, error) {
+ baseOperation := strings.Split(jobRequest.Operation, ".")[0]
+
+ switch baseOperation {
+ case "deploy":
+ return a.processFileDeploy(jobRequest)
+ case "status":
+ return a.processFileStatus(jobRequest)
+ default:
+ return nil, fmt.Errorf("unsupported file operation: %s", jobRequest.Operation)
+ }
+}
+
+func (a *Agent) processFileDeploy(
+ jobRequest job.Request,
+) (json.RawMessage, error) {
+ var req fileProv.DeployRequest
+ if err := json.Unmarshal(jobRequest.Data, &req); err != nil {
+ return nil, fmt.Errorf("failed to parse file deploy data: %w", err)
+ }
+
+ result, err := a.fileProvider.Deploy(context.Background(), req)
+ if err != nil {
+ return nil, fmt.Errorf("file deploy failed: %w", err)
+ }
+
+ return json.Marshal(result)
+}
+
+func (a *Agent) processFileStatus(
+ jobRequest job.Request,
+) (json.RawMessage, error) {
+ var req fileProv.StatusRequest
+ if err := json.Unmarshal(jobRequest.Data, &req); err != nil {
+ return nil, fmt.Errorf("failed to parse file status data: %w", err)
+ }
+
+ result, err := a.fileProvider.Status(context.Background(), req)
+ if err != nil {
+ return nil, fmt.Errorf("file status failed: %w", err)
+ }
+
+ return json.Marshal(result)
+}
+```
+
+### Step 4: Update processor.go dispatch
+
+Add to `processJobOperation()`:
+
+```go
+case "file":
+ return a.processFileOperation(jobRequest)
+```
+
+### Step 5: Write processor tests
+
+Add test cases to `processor_test.go` for the file category, and create
+`processor_file_test.go` for the file sub-dispatch.
+
+### Step 6: Update startup wiring
+
+In `cmd/agent_helpers.go`:
+
+```go
+// Create Object Store handle
+var objStore jetstream.ObjectStore
+if appConfig.NATS.Objects.Bucket != "" {
+ objStoreName := job.ApplyNamespaceToInfraName(namespace, appConfig.NATS.Objects.Bucket)
+ objStore, _ = nc.ObjectStore(ctx, objStoreName)
+}
+
+// Create file-state KV
+var fileStateKV jetstream.KeyValue
+if appConfig.NATS.FileState.Bucket != "" {
+ fileStateKVConfig := cli.BuildFileStateKVConfig(namespace, appConfig.NATS.FileState)
+ fileStateKV, _ = nc.CreateOrUpdateKVBucketWithConfig(ctx, fileStateKVConfig)
+}
+
+// Create file provider (after agent hostname is resolved)
+fileProvider := fileProv.New(log, appFs, objStore, fileStateKV, hostname, nil)
+
+a := agent.New(
+ // ... existing args ...
+ fileProvider,
+ objStore,
+ fileStateKV,
+)
+```
+
+**Note:** The file provider's `cachedFacts` is initially nil and gets updated
+when facts are collected. Add a method or field update in the facts collection
+loop to keep the file provider's facts current.
+
+### Step 7: Update all existing tests that call agent.New()
+
+Every test that constructs an `Agent` needs the new parameters. Pass `nil` for
+file provider, objStore, and fileStateKV in tests that don't exercise file
+operations.
+
+### Step 8: Run tests
+
+```bash
+go build ./...
+go test ./internal/agent/... -count=1 -v
+```
+
+### Step 9: Commit
+
+```bash
+git add internal/agent/ cmd/agent_helpers.go cmd/api_helpers.go
+git commit -m "feat(agent): wire file provider and Object Store into agent"
+```
+
+---
+
+## Task 10: Job Client Methods for File Deploy/Status
+
+Add convenience methods to the job client for triggering file operations.
+
+**Files:**
+
+- Modify: `internal/job/client/types.go` (JobClient interface)
+- Create: `internal/job/client/file.go`
+- Create: `internal/job/client/file_public_test.go`
+- Modify: `internal/job/mocks/job_client.gen.go` (regenerate)
+
+### Step 1: Add interface methods
+
+In `internal/job/client/types.go`, add to `JobClient`:
+
+```go
+// File operations
+ModifyFileDeploy(
+ ctx context.Context,
+ hostname string,
+ objectName string,
+ path string,
+ contentType string,
+ mode string,
+ owner string,
+ group string,
+ vars map[string]any,
+) (string, string, bool, error)
+
+QueryFileStatus(
+ ctx context.Context,
+ hostname string,
+ path string,
+) (string, *file.StatusResult, error)
+```
+
+### Step 2: Write failing tests
+
+Test the job creation, subject routing, and response parsing.
+
+### Step 3: Implement methods
+
+Follow the pattern of `ModifyNetworkDNS` and `QueryNodeStatus`:
+
+```go
+func (c *Client) ModifyFileDeploy(
+ ctx context.Context,
+ hostname string,
+ objectName string,
+ path string,
+ contentType string,
+ mode string,
+ owner string,
+ group string,
+ vars map[string]any,
+) (string, string, bool, error) {
+ data, _ := json.Marshal(file.DeployRequest{
+ ObjectName: objectName,
+ Path: path,
+ Mode: mode,
+ Owner: owner,
+ Group: group,
+ ContentType: contentType,
+ Vars: vars,
+ })
+
+ req := &job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: job.OperationFileDeployExecute,
+ Data: json.RawMessage(data),
+ }
+
+ subject := job.BuildSubjectFromTarget(job.JobsModifyPrefix, hostname)
+ jobID, resp, err := c.publishAndWait(ctx, subject, req)
+ if err != nil {
+ return "", "", false, err
+ }
+
+ changed := resp.Changed != nil && *resp.Changed
+ return jobID, resp.Hostname, changed, nil
+}
+```
+
+### Step 4: Regenerate mocks
+
+```bash
+go generate ./internal/job/mocks/...
+```
+
+### Step 5: Run tests
+
+```bash
+go test ./internal/job/client/... -count=1 -v
+```
+
+### Step 6: Commit
+
+```bash
+git add internal/job/client/ internal/job/mocks/
+git commit -m "feat(job): add file deploy and status job client methods"
+```
+
+---
+
+## Task 11: Node API Endpoints for File Deploy/Status
+
+Add REST endpoints for triggering file deploy and status through the node
+domain.
+
+**Files:**
+
+- Modify: `internal/api/node/gen/api.yaml`
+- Regenerate: `internal/api/node/gen/node.gen.go`
+- Create: `internal/api/node/file_deploy_post.go`
+- Create: `internal/api/node/file_deploy_post_public_test.go`
+- Create: `internal/api/node/file_status_post.go`
+- Create: `internal/api/node/file_status_post_public_test.go`
+
+### Step 1: Add to node OpenAPI spec
+
+Add paths and schemas to `internal/api/node/gen/api.yaml`:
+
+```yaml
+/node/{hostname}/file/deploy:
+ post:
+ operationId: PostNodeFileDeploy
+ summary: Deploy a file from Object Store to the host
+ security:
+ - BearerAuth:
+ - 'file:write'
+ parameters:
+ - $ref: '#/components/parameters/Hostname'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeployRequest'
+ responses:
+ '202':
+ description: File deploy job accepted.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeployResponse'
+ '400':
+ description: Invalid input.
+ '500':
+ description: Internal error.
+
+/node/{hostname}/file/status:
+ post:
+ operationId: PostNodeFileStatus
+ summary: Check deployment status of a file on the host
+ security:
+ - BearerAuth:
+ - 'file:read'
+ parameters:
+ - $ref: '#/components/parameters/Hostname'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileStatusRequest'
+ responses:
+ '200':
+ description: File status.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileStatusResponse'
+ '400':
+ description: Invalid input.
+ '500':
+ description: Internal error.
+```
+
+Add schemas:
+
+```yaml
+FileDeployRequest:
+ type: object
+ properties:
+ object_name:
+ type: string
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+ path:
+ type: string
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1
+ mode:
+ type: string
+ owner:
+ type: string
+ group:
+ type: string
+ content_type:
+ type: string
+ enum: [raw, template]
+ x-oapi-codegen-extra-tags:
+ validate: required,oneof=raw template
+ vars:
+ type: object
+ additionalProperties: true
+ required: [object_name, path, content_type]
+
+FileStatusRequest:
+ type: object
+ properties:
+ path:
+ type: string
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1
+ required: [path]
+```
+
+### Step 2: Regenerate
+
+```bash
+go generate ./internal/api/node/gen/...
+```
+
+### Step 3: Implement handlers
+
+Follow the pattern of `network_dns_put_by_interface.go`. Each handler:
+
+1. Validates hostname
+2. Validates request body
+3. Calls the job client method
+4. Returns the response
+
+### Step 4: Write tests
+
+Table-driven tests with HTTP wiring and RBAC tests for each endpoint.
+
+### Step 5: Run tests
+
+```bash
+go test ./internal/api/node/... -count=1 -v
+```
+
+### Step 6: Commit
+
+```bash
+git add internal/api/node/
+git commit -m "feat(api): add node file deploy and status endpoints"
+```
+
+---
+
+## Task 12: CLI Commands
+
+Add CLI commands for file management and file deployment.
+
+**Files:**
+
+- Create: `cmd/client_file.go` — parent command
+- Create: `cmd/client_file_upload.go`
+- Create: `cmd/client_file_list.go`
+- Create: `cmd/client_file_get.go`
+- Create: `cmd/client_file_delete.go`
+- Create: `cmd/client_node_file.go` — parent under node
+- Create: `cmd/client_node_file_deploy.go`
+- Create: `cmd/client_node_file_status.go`
+
+### Step 1: File management commands
+
+`osapi client file upload`:
+
+```
+--name Name for the file in Object Store (required)
+--file Path to local file to upload (required)
+```
+
+`osapi client file list` — no extra flags
+
+`osapi client file get --name ` — show metadata
+
+`osapi client file delete --name ` — remove from Object Store
+
+### Step 2: Node file commands
+
+`osapi client node file deploy`:
+
+```
+--object Object name in Object Store (required)
+--path Destination path on host (required)
+--content-type "raw" or "template" (default: "raw")
+--mode File mode (e.g., "0644")
+--owner File owner
+--group File group
+--var Template var (key=value, repeatable)
+-T, --target Target host (default: _any)
+-j, --json Raw JSON output
+```
+
+`osapi client node file status`:
+
+```
+--path File path to check (required)
+-T, --target Target host (default: _any)
+-j, --json Raw JSON output
+```
+
+### Step 3: Implement commands
+
+Follow the pattern of `cmd/client_node_command_exec.go`. Read local file, base64
+encode, call SDK upload. For deploy, call SDK deploy. Handle all response codes
+in switch block.
+
+### Step 4: Test manually
+
+```bash
+go build ./... && ./osapi client file upload --help
+./osapi client node file deploy --help
+```
+
+### Step 5: Commit
+
+```bash
+git add cmd/client_file*.go cmd/client_node_file*.go
+git commit -m "feat(cli): add file upload/list/get/delete and deploy/status commands"
+```
+
+---
+
+## Task 13: SDK Integration
+
+Update the `osapi-sdk` to support the new file endpoints.
+
+**Files (in osapi-sdk repo):**
+
+- Copy: `pkg/osapi/gen/file/api.yaml` (from osapi)
+- Create: `pkg/osapi/file.go` — FileService
+- Modify: `.gilt.yml` — add file spec overlay
+- Regenerate client code
+
+### Step 1: Add file API spec to SDK
+
+Copy `internal/api/file/gen/api.yaml` → `pkg/osapi/gen/file/api.yaml`.
+
+### Step 2: Update gilt overlay
+
+Add file domain to `.gilt.yml` so `just generate` pulls the spec.
+
+### Step 3: Create FileService
+
+```go
+type FileService struct {
+ client *Client
+}
+
+func (s *FileService) Upload(ctx context.Context, name string, content []byte) (*FileInfo, error)
+func (s *FileService) List(ctx context.Context) ([]FileInfo, error)
+func (s *FileService) Get(ctx context.Context, name string) (*FileInfo, error)
+func (s *FileService) Delete(ctx context.Context, name string) error
+```
+
+Deploy/status use the existing job system through `NodeService` or as separate
+methods.
+
+### Step 4: Regenerate and test
+
+```bash
+just generate
+go test ./...
+```
+
+### Step 5: Commit and push SDK
+
+Separate PR on osapi-sdk repo.
+
+---
+
+## Task 14: Orchestrator Integration
+
+Add file operations to `osapi-orchestrator`.
+
+**Files (in osapi-orchestrator repo):**
+
+- Create: `pkg/orchestrator/file.go`
+- Create: example `examples/file-deploy/main.go`
+
+### Step 1: Add orchestrator steps
+
+```go
+func (o *Orchestrator) FileUpload(name, localPath string) *Step
+func (o *Orchestrator) FileDeploy(target, objectName, destPath string, opts ...FileOption) *Step
+func (o *Orchestrator) FileTemplate(target, objectName, destPath string, vars map[string]any, opts ...FileOption) *Step
+```
+
+`FileOption` funcs:
+
+```go
+func WithMode(mode string) FileOption
+func WithOwner(owner, group string) FileOption
+```
+
+### Step 2: OnlyIfChanged integration
+
+`FileDeploy` and `FileTemplate` return `changed: true/false` in the result, so
+`OnlyIfChanged()` guards work naturally:
+
+```go
+upload := o.FileUpload("nginx.conf", "./local/nginx.conf.tmpl")
+deploy := o.FileTemplate("_all", "nginx.conf", "/etc/nginx/nginx.conf",
+ map[string]any{"worker_count": 4},
+ WithMode("0644"),
+ WithOwner("root", "root"),
+).After(upload)
+
+reload := o.CommandExec("_all", "nginx", []string{"-s", "reload"}).
+ After(deploy).
+ OnlyIfChanged()
+```
+
+### Step 3: Commit
+
+Separate PR on osapi-orchestrator repo.
+
+---
+
+## Task 15: Documentation
+
+Update docs for the new feature.
+
+**Files:**
+
+- Create: `docs/docs/sidebar/features/file-management.md`
+- Create: `docs/docs/sidebar/usage/cli/client/file/file.md`
+- Create: `docs/docs/sidebar/usage/cli/client/file/upload.md`
+- Create: `docs/docs/sidebar/usage/cli/client/file/list.md`
+- Create: `docs/docs/sidebar/usage/cli/client/file/get.md`
+- Create: `docs/docs/sidebar/usage/cli/client/file/delete.md`
+- Create: `docs/docs/sidebar/usage/cli/client/node/file/file.md`
+- Create: `docs/docs/sidebar/usage/cli/client/node/file/deploy.md`
+- Create: `docs/docs/sidebar/usage/cli/client/node/file/status.md`
+- Modify: `docs/docusaurus.config.ts` — add to Features dropdown
+- Modify: `docs/docs/sidebar/usage/configuration.md` — add new config
+- Modify: `docs/docs/sidebar/architecture/system-architecture.md` — add
+ endpoints
+
+### Step 1: Feature page
+
+Create `file-management.md` covering:
+
+- What it manages (file deployment with SHA idempotency)
+- How it works (Object Store + file-state KV)
+- Template rendering with facts
+- Permissions (`file:read`, `file:write`)
+- Links to CLI and API docs
+
+### Step 2: CLI docs
+
+One page per command with usage examples, flags table, and `--json` output.
+
+### Step 3: Config docs
+
+Add `nats.objects` and `nats.file_state` sections with env vars:
+
+| Config Key | Env Var |
+| -------------------------- | -------------------------------- |
+| `nats.objects.bucket` | `OSAPI_NATS_OBJECTS_BUCKET` |
+| `nats.objects.max_bytes` | `OSAPI_NATS_OBJECTS_MAX_BYTES` |
+| `nats.objects.storage` | `OSAPI_NATS_OBJECTS_STORAGE` |
+| `nats.objects.replicas` | `OSAPI_NATS_OBJECTS_REPLICAS` |
+| `nats.file_state.bucket` | `OSAPI_NATS_FILE_STATE_BUCKET` |
+| `nats.file_state.storage` | `OSAPI_NATS_FILE_STATE_STORAGE` |
+| `nats.file_state.replicas` | `OSAPI_NATS_FILE_STATE_REPLICAS` |
+
+### Step 4: Commit
+
+```bash
+git add docs/
+git commit -m "docs: add file management feature documentation"
+```
+
+---
+
+## Shared Primitive: Object Store for Future Providers
+
+The Object Store and file-state KV infrastructure built in this plan is designed
+as a **shared primitive**. The agent's `objStore` handle is injected at startup
+and available to any provider. Future providers that would consume this
+infrastructure:
+
+| Provider | Operation | Usage |
+| ----------------- | --------------------------- | ------------------------------- |
+| `firmware.update` | Pull binary, run flash tool | Object Store for firmware blobs |
+| `package.install` | Pull `.deb`/`.rpm`, install | Object Store for packages |
+| `cert.deploy` | Pull TLS cert/key | Object Store + restricted perms |
+| `script.run` | Pull script, execute | Object Store for scripts |
+
+Each provider reuses: Object Store download, SHA comparison, and state tracking
+from the `file-state` KV bucket. No new infrastructure needed.
+
+---
+
+## Verification
+
+After all tasks complete:
+
+```bash
+# Full test suite
+just test
+
+# Manual verification
+osapi client file upload --name nginx.conf --file ./nginx.conf
+osapi client file list
+osapi client file get --name nginx.conf
+osapi client node file deploy \
+ --object nginx.conf --path /etc/nginx/nginx.conf \
+ --mode 0644 --owner root --group root --target _all
+osapi client node file status --path /etc/nginx/nginx.conf --target _all
+
+# Idempotency check (second run should show changed: false)
+osapi client node file deploy \
+ --object nginx.conf --path /etc/nginx/nginx.conf \
+ --mode 0644 --target _all
+```
diff --git a/docs/plans/2026-03-06-multipart-file-upload.md b/docs/plans/2026-03-06-multipart-file-upload.md
new file mode 100644
index 00000000..a8b9418c
--- /dev/null
+++ b/docs/plans/2026-03-06-multipart-file-upload.md
@@ -0,0 +1,179 @@
+# Multipart File Upload with Streaming
+
+> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to
+> implement this plan task-by-task.
+
+**Goal:** Migrate the file upload endpoint from JSON with base64-encoded content
+to `multipart/form-data` with streaming to NATS Object Store. Add a
+`content_type` metadata field to track file purpose (raw vs template). Increase
+Object Store bucket size for large file support.
+
+**Architecture:** Two-pass from temp file — Go's `ParseMultipartForm(32 MiB)`
+spools large files to disk. First pass computes SHA-256 for idempotency check;
+second pass streams to NATS via `Put(io.Reader)`. Memory is bounded at ~32 MiB
+regardless of file size. Content type stored as NATS object header on upload;
+deploy reads it from stored metadata.
+
+**Tech Stack:** Go 1.25, NATS JetStream Object Store, oapi-codegen
+`multipart/form-data`, testify/suite, gomock.
+
+**Design doc:** N/A — plan originated from conversation.
+
+---
+
+## Step 1: Add `Put` to ObjectStoreManager Interface
+
+**File:** `internal/api/file/types.go`
+
+Add streaming `Put` method alongside existing `PutBytes`:
+
+```go
+Put(
+ ctx context.Context,
+ meta *jetstream.ObjectMeta,
+ reader io.Reader,
+ opts ...jetstream.ObjectOpt,
+) (*jetstream.ObjectInfo, error)
+```
+
+Add `io` to imports. Keep `PutBytes` — it's still used elsewhere.
+
+Regenerate mock:
+
+```bash
+go generate ./internal/api/file/mocks/...
+```
+
+---
+
+## Step 2: Update OpenAPI Spec
+
+**File:** `internal/api/file/gen/api.yaml`
+
+### 2a: Change POST /file request body to multipart/form-data
+
+Replace `application/json` + `FileUploadRequest` with:
+
+```yaml
+requestBody:
+ description: The file to upload.
+ required: true
+ content:
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file in the Object Store.
+ example: 'nginx.conf'
+ content_type:
+ type: string
+ description: >
+ How the file should be treated during deploy. "raw" writes bytes
+ as-is; "template" renders with Go text/template and agent facts.
+ default: raw
+ enum:
+ - raw
+ - template
+ file:
+ type: string
+ format: binary
+ description: The file content.
+ required:
+ - name
+ - file
+```
+
+### 2b: Add `content_type` to response schemas
+
+Add `content_type` string field to `FileUploadResponse`, `FileInfo`, and
+`FileInfoResponse`. Add to their `required` arrays.
+
+### 2c: Regenerate
+
+```bash
+go generate ./internal/api/file/gen/...
+```
+
+---
+
+## Step 3: Rewrite Upload Handler
+
+**File:** `internal/api/file/file_upload.go`
+
+Replace JSON-based handler with multipart streaming:
+
+1. Extract form fields (name, content_type) from multipart body
+2. Validate name manually (multipart fields don't use struct tags)
+3. Open multipart file as `io.ReadSeeker`
+4. First pass: compute SHA-256 via `io.Copy(hash, file)`
+5. Idempotency check against existing digest
+6. Second pass: `file.Seek(0, 0)` then stream to NATS via `Put(meta, file)`
+7. Store content_type as `Osapi-Content-Type` NATS header on the object
+
+If oapi-codegen strict-server doesn't parse multipart correctly, fall back to
+custom Echo handler registered in `handler_file.go`.
+
+---
+
+## Step 4: Add `content_type` to Get/List Handlers
+
+**Files:** `internal/api/file/file_get.go`, `internal/api/file/file_list.go`
+
+Read `Osapi-Content-Type` from NATS object headers and include in responses.
+
+---
+
+## Step 5: Increase Object Store Bucket Size
+
+**Files:** `configs/osapi.yaml`, `configs/osapi.local.yaml`
+
+Change `max_bytes` from `104857600` (100 MiB) to `10737418240` (10 GiB).
+
+---
+
+## Step 6: Update Tests
+
+**File:** `internal/api/file/file_upload_public_test.go`
+
+- Rewrite `TestPostFile` for multipart request objects
+- Rewrite `TestPostFileHTTP` to send `multipart/form-data`
+- Rewrite `TestPostFileRBACHTTP` similarly
+- Update file_get and file_list tests to assert `content_type`
+- Add mock expectations for `Put` (streaming) instead of `PutBytes`
+
+---
+
+## Step 7: Update CLI
+
+**File:** `cmd/client_file_upload.go`
+
+- Add `--content-type` flag (default `raw`)
+- Stream file from disk via `os.Open` instead of `os.ReadFile`
+- Pass content_type to SDK `Upload` call
+- Show `Content-Type` in output
+
+---
+
+## Step 8: Update SDK
+
+**Files in `osapi-sdk`:**
+
+- Copy updated `api.yaml` to SDK, regenerate with `redocly join` + `go generate`
+- Add `ContentType` field to `FileUpload`, `FileItem`, `FileMetadata` types
+- Change `Upload` method to accept `io.Reader` and `contentType` parameter
+- Build multipart request body in SDK
+
+---
+
+## Verification
+
+```bash
+go generate ./internal/api/file/gen/...
+go generate ./internal/api/file/mocks/...
+go build ./...
+go test ./internal/api/file/... -count=1 -v
+just go::unit
+just go::vet
+```
diff --git a/go.mod b/go.mod
index b8ce9c40..915686bf 100644
--- a/go.mod
+++ b/go.mod
@@ -16,9 +16,9 @@ require (
github.com/nats-io/nats-server/v2 v2.12.4
github.com/nats-io/nats.go v1.49.0
github.com/oapi-codegen/runtime v1.2.0
- github.com/osapi-io/nats-client v0.0.0-20260222233639-d0822e0a4b86
+ github.com/osapi-io/nats-client v0.0.0-20260306210421-d68b2a0f287b
github.com/osapi-io/nats-server v0.0.0-20260216201410-1f33dfc63848
- github.com/osapi-io/osapi-sdk v0.0.0-20260306055249-0916698b04ef
+ github.com/osapi-io/osapi-sdk v0.0.0-20260307055727-ba9d92f92610
github.com/prometheus-community/pro-bing v0.8.0
github.com/prometheus/client_golang v1.23.2
github.com/samber/slog-echo v1.21.0
@@ -59,7 +59,7 @@ require (
github.com/Djarvur/go-err113 v0.1.1 // indirect
github.com/Masterminds/semver/v3 v3.4.0 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
- github.com/MirrexOne/unqueryvet v1.5.3 // indirect
+ github.com/MirrexOne/unqueryvet v1.5.4 // indirect
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
github.com/ProtonMail/go-crypto v1.1.6 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
@@ -68,7 +68,7 @@ require (
github.com/alecthomas/kingpin/v2 v2.4.0 // indirect
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
- github.com/alexkohler/prealloc v1.0.2 // indirect
+ github.com/alexkohler/prealloc v1.1.0 // indirect
github.com/alfatraining/structtag v1.0.0 // indirect
github.com/alingse/asasalint v0.0.11 // indirect
github.com/alingse/nilnesserr v0.2.0 // indirect
@@ -144,7 +144,7 @@ require (
github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
github.com/golangci/go-printf-func-name v0.1.1 // indirect
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
- github.com/golangci/golangci-lint/v2 v2.10.1 // indirect
+ github.com/golangci/golangci-lint/v2 v2.11.1 // indirect
github.com/golangci/golines v0.15.0 // indirect
github.com/golangci/misspell v0.8.0 // indirect
github.com/golangci/plugin-module-register v0.1.2 // indirect
@@ -170,7 +170,7 @@ require (
github.com/julz/importas v0.2.0 // indirect
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
- github.com/kisielk/errcheck v1.9.0 // indirect
+ github.com/kisielk/errcheck v1.10.0 // indirect
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
github.com/klauspost/compress v1.18.4 // indirect
github.com/kulti/thelper v0.7.1 // indirect
@@ -195,7 +195,7 @@ require (
github.com/matoous/godox v1.1.0 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
- github.com/mgechev/revive v1.14.0 // indirect
+ github.com/mgechev/revive v1.15.0 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/minio/highwayhash v1.0.4-0.20251030100505-070ab1a87a76 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
@@ -242,13 +242,13 @@ require (
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
- github.com/securego/gosec/v2 v2.23.0 // indirect
+ github.com/securego/gosec/v2 v2.24.7 // indirect
github.com/segmentio/golines v0.13.0 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sirupsen/logrus v1.9.4 // indirect
github.com/sivchari/containedctx v1.0.3 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
- github.com/sonatard/noctx v0.4.0 // indirect
+ github.com/sonatard/noctx v0.5.0 // indirect
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect
github.com/sourcegraph/go-diff v0.7.0 // indirect
github.com/speakeasy-api/jsonpath v0.6.0 // indirect
@@ -268,7 +268,7 @@ require (
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
github.com/ultraware/funlen v0.2.0 // indirect
github.com/ultraware/whitespace v0.2.0 // indirect
- github.com/uudashr/gocognit v1.2.0 // indirect
+ github.com/uudashr/gocognit v1.2.1 // indirect
github.com/uudashr/iface v1.4.1 // indirect
github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect
github.com/woodsbury/decimal128 v1.3.0 // indirect
@@ -324,7 +324,7 @@ require (
github.com/valyala/fasttemplate v1.2.2 // indirect
golang.org/x/crypto v0.48.0 // indirect
golang.org/x/mod v0.33.0 // indirect
- golang.org/x/net v0.50.0 // indirect
+ golang.org/x/net v0.51.0 // indirect
golang.org/x/sys v0.41.0 // indirect
golang.org/x/text v0.34.0 // indirect
golang.org/x/tools v0.42.0 // indirect
diff --git a/go.sum b/go.sum
index fd632e25..c3bd1a96 100644
--- a/go.sum
+++ b/go.sum
@@ -86,8 +86,8 @@ github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lpr
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
-github.com/MirrexOne/unqueryvet v1.5.3 h1:LpT3rsH+IY3cQddWF9bg4C7jsbASdGnrOSofY8IPEiw=
-github.com/MirrexOne/unqueryvet v1.5.3/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
+github.com/MirrexOne/unqueryvet v1.5.4 h1:38QOxShO7JmMWT+eCdDMbcUgGCOeJphVkzzRgyLJgsQ=
+github.com/MirrexOne/unqueryvet v1.5.4/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
@@ -115,8 +115,8 @@ github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b h1:mimo19zliBX/vS
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b/go.mod h1:fvzegU4vN3H1qMT+8wDmzjAcDONcgo2/SZ/TyfdUOFs=
github.com/alexkohler/nakedret/v2 v2.0.6 h1:ME3Qef1/KIKr3kWX3nti3hhgNxw6aqN5pZmQiFSsuzQ=
github.com/alexkohler/nakedret/v2 v2.0.6/go.mod h1:l3RKju/IzOMQHmsEvXwkqMDzHHvurNQfAgE1eVmT40Q=
-github.com/alexkohler/prealloc v1.0.2 h1:MPo8cIkGkZytq7WNH9UHv3DIX1mPz1RatPXnZb0zHWQ=
-github.com/alexkohler/prealloc v1.0.2/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
+github.com/alexkohler/prealloc v1.1.0 h1:cKGRBqlXw5iyQGLYhrXrDlcHxugXpTq4tQ5c91wkf8M=
+github.com/alexkohler/prealloc v1.1.0/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
github.com/alfatraining/structtag v1.0.0 h1:2qmcUqNcCoyVJ0up879K614L9PazjBSFruTB0GOFjCc=
github.com/alfatraining/structtag v1.0.0/go.mod h1:p3Xi5SwzTi+Ryj64DqjLWz7XurHxbGsq6y3ubePJPus=
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
@@ -420,8 +420,8 @@ github.com/golangci/go-printf-func-name v0.1.1 h1:hIYTFJqAGp1iwoIfsNTpoq1xZAarog
github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss=
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
-github.com/golangci/golangci-lint/v2 v2.10.1 h1:flhw5Px6ojbLyEFzXvJn5B2HEdkkRlkhE1SnmCbQBiE=
-github.com/golangci/golangci-lint/v2 v2.10.1/go.mod h1:dBsrOk6zj0vDhlTv+IiJGqkDokR24IVTS7W3EVfPTQY=
+github.com/golangci/golangci-lint/v2 v2.11.1 h1:aGbjflzzKNIdOoq/NawrhFjYpkNY4WzPSeIp2zBbzG8=
+github.com/golangci/golangci-lint/v2 v2.11.1/go.mod h1:wexdFBIQNhHNhDe1oqzlGFE5dYUqlfccWJKWjoWF1GI=
github.com/golangci/golines v0.15.0 h1:Qnph25g8Y1c5fdo1X7GaRDGgnMHgnxh4Gk4VfPTtRx0=
github.com/golangci/golines v0.15.0/go.mod h1:AZjXd23tbHMpowhtnGlj9KCNsysj72aeZVVHnVcZx10=
github.com/golangci/misspell v0.8.0 h1:qvxQhiE2/5z+BVRo1kwYA8yGz+lOlu5Jfvtx2b04Jbg=
@@ -574,8 +574,8 @@ github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRk
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
-github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M=
-github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
+github.com/kisielk/errcheck v1.10.0 h1:Lvs/YAHP24YKg08LA8oDw2z9fJVme090RAXd90S+rrw=
+github.com/kisielk/errcheck v1.10.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
@@ -671,8 +671,8 @@ github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRC
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
-github.com/mgechev/revive v1.14.0 h1:CC2Ulb3kV7JFYt+izwORoS3VT/+Plb8BvslI/l1yZsc=
-github.com/mgechev/revive v1.14.0/go.mod h1:MvnujelCZBZCaoDv5B3foPo6WWgULSSFxvfxp7GsPfo=
+github.com/mgechev/revive v1.15.0 h1:vJ0HzSBzfNyPbHKolgiFjHxLek9KUijhqh42yGoqZ8Q=
+github.com/mgechev/revive v1.15.0/go.mod h1:LlAKO3QQe9OJ0pVZzI2GPa8CbXGZ/9lNpCGvK4T/a8A=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
@@ -751,12 +751,12 @@ github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAl
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
github.com/onsi/gomega v1.39.1 h1:1IJLAad4zjPn2PsnhH70V4DKRFlrCzGBNrNaru+Vf28=
github.com/onsi/gomega v1.39.1/go.mod h1:hL6yVALoTOxeWudERyfppUcZXjMwIMLnuSfruD2lcfg=
-github.com/osapi-io/nats-client v0.0.0-20260222233639-d0822e0a4b86 h1:ML0fdgr0M4i6ZNXVEjKZaFMiZgVAMRuruVHF7KFA1Zs=
-github.com/osapi-io/nats-client v0.0.0-20260222233639-d0822e0a4b86/go.mod h1:TQqODOjF2JuAOFrLtm1ItsMzPPAizKfHo+grOMuPDyE=
+github.com/osapi-io/nats-client v0.0.0-20260306210421-d68b2a0f287b h1:d68ZLQLxJWtpqkAjg/xliqvFTf9ZOKtcdZ/3gp5Bgz4=
+github.com/osapi-io/nats-client v0.0.0-20260306210421-d68b2a0f287b/go.mod h1:66M9jRN03gZezKNttR17FCRZyLdF7E0BvBLitfrJl38=
github.com/osapi-io/nats-server v0.0.0-20260216201410-1f33dfc63848 h1:ELW1sTVBn5JIc17mHgd5fhpO3/7btaxJpxykG2Fe0U4=
github.com/osapi-io/nats-server v0.0.0-20260216201410-1f33dfc63848/go.mod h1:4rzeY9jiJF/+Ej4WNwqK5HQ2sflZrEs60GxQpg3Iya8=
-github.com/osapi-io/osapi-sdk v0.0.0-20260306055249-0916698b04ef h1:F0+X0uOVGuHIaui62KTmyhZRBIeL0PXurEPevZXGmDU=
-github.com/osapi-io/osapi-sdk v0.0.0-20260306055249-0916698b04ef/go.mod h1:gL9oHgIkG+VMazSIXO4Nvwd3IXEuzRvuXstGiphSycc=
+github.com/osapi-io/osapi-sdk v0.0.0-20260307055727-ba9d92f92610 h1:79ExRL8H8JsmIqi178benv+jwH28EU12N/HjZ+hiO3c=
+github.com/osapi-io/osapi-sdk v0.0.0-20260307055727-ba9d92f92610/go.mod h1:i9g4jaIL6NVo9MRpz33lAEnY4L7u6aO97/5hN4W3hGE=
github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
@@ -860,8 +860,8 @@ github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84d
github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ=
github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
-github.com/securego/gosec/v2 v2.23.0 h1:h4TtF64qFzvnkqvsHC/knT7YC5fqyOCItlVR8+ptEBo=
-github.com/securego/gosec/v2 v2.23.0/go.mod h1:qRHEgXLFuYUDkI2T7W7NJAmOkxVhkR0x9xyHOIcMNZ0=
+github.com/securego/gosec/v2 v2.24.7 h1:3k5yJnrhT1TTdsG0ZsnenlfCcT+7Y/+zeCPHbL7QAn8=
+github.com/securego/gosec/v2 v2.24.7/go.mod h1:AdDJbjcG/XxFgVv7pW19vMNYlFM6+Q6Qy3t6lWAUcEY=
github.com/segmentio/golines v0.13.0 h1:GfbpsxoF4eYuEZD3mxrlsN/XD30m6nOO4QLQj2JIa90=
github.com/segmentio/golines v0.13.0/go.mod h1:MMEi38dnJiyxqFZqFOqN14QMzWHzj/i0+L9Q2MsVr64=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
@@ -880,8 +880,8 @@ github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+W
github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
-github.com/sonatard/noctx v0.4.0 h1:7MC/5Gg4SQ4lhLYR6mvOP6mQVSxCrdyiExo7atBs27o=
-github.com/sonatard/noctx v0.4.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
+github.com/sonatard/noctx v0.5.0 h1:e/jdaqAsuWVOKQ0P6NWiIdDNHmHT5SwuuSfojFjzwrw=
+github.com/sonatard/noctx v0.5.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U=
github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
@@ -963,8 +963,8 @@ github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLk
github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
-github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA=
-github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU=
+github.com/uudashr/gocognit v1.2.1 h1:CSJynt5txTnORn/DkhiB4mZjwPuifyASC8/6Q0I/QS4=
+github.com/uudashr/gocognit v1.2.1/go.mod h1:acaubQc6xYlXFEMb9nWX2dYBzJ/bIjEkc1zzvyIZg5Q=
github.com/uudashr/iface v1.4.1 h1:J16Xl1wyNX9ofhpHmQ9h9gk5rnv2A6lX/2+APLTo0zU=
github.com/uudashr/iface v1.4.1/go.mod h1:pbeBPlbuU2qkNDn0mmfrxP2X+wjPMIQAy+r1MBXSXtg=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
@@ -1179,8 +1179,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
-golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
-golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
+golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
+golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
diff --git a/internal/agent/agent.go b/internal/agent/agent.go
index 5aa73330..486067c8 100644
--- a/internal/agent/agent.go
+++ b/internal/agent/agent.go
@@ -28,7 +28,9 @@ import (
"github.com/retr0h/osapi/internal/config"
"github.com/retr0h/osapi/internal/job/client"
+ "github.com/retr0h/osapi/internal/provider"
"github.com/retr0h/osapi/internal/provider/command"
+ fileProv "github.com/retr0h/osapi/internal/provider/file"
"github.com/retr0h/osapi/internal/provider/network/dns"
"github.com/retr0h/osapi/internal/provider/network/netinfo"
"github.com/retr0h/osapi/internal/provider/network/ping"
@@ -53,10 +55,11 @@ func New(
pingProvider ping.Provider,
netinfoProvider netinfo.Provider,
commandProvider command.Provider,
+ fileProvider fileProv.Provider,
registryKV jetstream.KeyValue,
factsKV jetstream.KeyValue,
) *Agent {
- return &Agent{
+ a := &Agent{
logger: logger,
appConfig: appConfig,
appFs: appFs,
@@ -70,7 +73,25 @@ func New(
pingProvider: pingProvider,
netinfoProvider: netinfoProvider,
commandProvider: commandProvider,
+ fileProvider: fileProvider,
registryKV: registryKV,
factsKV: factsKV,
}
+
+ // Wire agent facts into all providers so they can access the latest
+ // facts at execution time (e.g., for template rendering).
+ provider.WireProviderFacts(
+ a.GetFacts,
+ hostProvider,
+ diskProvider,
+ memProvider,
+ loadProvider,
+ dnsProvider,
+ pingProvider,
+ netinfoProvider,
+ commandProvider,
+ fileProvider,
+ )
+
+ return a
}
diff --git a/internal/agent/agent_public_test.go b/internal/agent/agent_public_test.go
index 055cf616..14dad938 100644
--- a/internal/agent/agent_public_test.go
+++ b/internal/agent/agent_public_test.go
@@ -109,6 +109,7 @@ func (s *AgentPublicTestSuite) TestNew() {
commandMocks.NewDefaultMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
s.NotNil(a)
@@ -151,6 +152,7 @@ func (s *AgentPublicTestSuite) TestStart() {
commandMocks.NewDefaultMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
stopFunc: func(a *agent.Agent) {
@@ -200,6 +202,7 @@ func (s *AgentPublicTestSuite) TestStart() {
commandMocks.NewDefaultMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
// Schedule cleanup after Stop returns
diff --git a/internal/agent/consumer_test.go b/internal/agent/consumer_test.go
index 038c95e4..3ef1a9e4 100644
--- a/internal/agent/consumer_test.go
+++ b/internal/agent/consumer_test.go
@@ -104,6 +104,7 @@ func (s *ConsumerTestSuite) SetupTest() {
commandMock,
nil,
nil,
+ nil,
)
}
diff --git a/internal/agent/drain_test.go b/internal/agent/drain_test.go
index 4d746a4f..d436e72c 100644
--- a/internal/agent/drain_test.go
+++ b/internal/agent/drain_test.go
@@ -78,6 +78,7 @@ func (s *DrainTestSuite) SetupTest() {
pingMocks.NewDefaultMockProvider(s.mockCtrl),
netinfoMocks.NewDefaultMockProvider(s.mockCtrl),
commandMocks.NewDefaultMockProvider(s.mockCtrl),
+ nil,
s.mockKV,
nil,
)
diff --git a/internal/agent/facts.go b/internal/agent/facts.go
index 2506e39f..eb3448b3 100644
--- a/internal/agent/facts.go
+++ b/internal/agent/facts.go
@@ -152,6 +152,28 @@ func (a *Agent) writeFacts(
}
}
+// GetFacts returns the agent's current facts as a flat map suitable for
+// template rendering. Returns nil if facts haven't been collected yet.
+// Uses JSON round-trip so the map automatically includes all fields
+// from FactsRegistration without hardcoding field names.
+func (a *Agent) GetFacts() map[string]any {
+ if a.cachedFacts == nil {
+ return nil
+ }
+
+ data, err := marshalJSON(a.cachedFacts)
+ if err != nil {
+ return nil
+ }
+
+ var result map[string]any
+ if err := unmarshalJSON(data, &result); err != nil {
+ return nil
+ }
+
+ return result
+}
+
// factsKey returns the KV key for an agent's facts entry.
func factsKey(
hostname string,
diff --git a/internal/agent/facts_test.go b/internal/agent/facts_test.go
index a4decfa4..d0540f10 100644
--- a/internal/agent/facts_test.go
+++ b/internal/agent/facts_test.go
@@ -85,6 +85,7 @@ func (s *FactsTestSuite) SetupTest() {
s.mockNetinfo,
commandMocks.NewDefaultMockProvider(s.mockCtrl),
nil,
+ nil,
s.mockFactsKV,
)
}
@@ -92,6 +93,7 @@ func (s *FactsTestSuite) SetupTest() {
func (s *FactsTestSuite) TearDownTest() {
s.mockCtrl.Finish()
marshalJSON = json.Marshal
+ unmarshalJSON = json.Unmarshal
factsInterval = 60 * time.Second
}
@@ -245,6 +247,84 @@ func (s *FactsTestSuite) TestStartFactsRefresh() {
}
}
+func (s *FactsTestSuite) TestGetFacts() {
+ tests := []struct {
+ name string
+ setupFunc func()
+ teardownFunc func()
+ validateFunc func(result map[string]any)
+ }{
+ {
+ name: "when cachedFacts is nil returns nil",
+ setupFunc: func() {},
+ validateFunc: func(result map[string]any) {
+ s.Nil(result)
+ },
+ },
+ {
+ name: "when cachedFacts populated returns fact map",
+ setupFunc: func() {
+ s.agent.cachedFacts = &job.FactsRegistration{
+ Architecture: "amd64",
+ CPUCount: 4,
+ FQDN: "test.local",
+ }
+ },
+ validateFunc: func(result map[string]any) {
+ s.Require().NotNil(result)
+ s.Equal("amd64", result["architecture"])
+ s.Equal(float64(4), result["cpu_count"])
+ s.Equal("test.local", result["fqdn"])
+ },
+ },
+ {
+ name: "when marshal fails returns nil",
+ setupFunc: func() {
+ s.agent.cachedFacts = &job.FactsRegistration{
+ Architecture: "amd64",
+ }
+ marshalJSON = func(_ interface{}) ([]byte, error) {
+ return nil, fmt.Errorf("marshal failure")
+ }
+ },
+ teardownFunc: func() {
+ marshalJSON = json.Marshal
+ },
+ validateFunc: func(result map[string]any) {
+ s.Nil(result)
+ },
+ },
+ {
+ name: "when unmarshal fails returns nil",
+ setupFunc: func() {
+ s.agent.cachedFacts = &job.FactsRegistration{
+ Architecture: "amd64",
+ }
+ unmarshalJSON = func(_ []byte, _ interface{}) error {
+ return fmt.Errorf("unmarshal failure")
+ }
+ },
+ teardownFunc: func() {
+ unmarshalJSON = json.Unmarshal
+ },
+ validateFunc: func(result map[string]any) {
+ s.Nil(result)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupFunc()
+ if tt.teardownFunc != nil {
+ defer tt.teardownFunc()
+ }
+ result := s.agent.GetFacts()
+ tt.validateFunc(result)
+ })
+ }
+}
+
func (s *FactsTestSuite) TestFactsKey() {
tests := []struct {
name string
diff --git a/internal/agent/handler_test.go b/internal/agent/handler_test.go
index ccde0457..c5d8dba2 100644
--- a/internal/agent/handler_test.go
+++ b/internal/agent/handler_test.go
@@ -36,6 +36,7 @@ import (
"github.com/retr0h/osapi/internal/job"
"github.com/retr0h/osapi/internal/job/mocks"
commandMocks "github.com/retr0h/osapi/internal/provider/command/mocks"
+ fileMocks "github.com/retr0h/osapi/internal/provider/file/mocks"
"github.com/retr0h/osapi/internal/provider/network/dns"
dnsMocks "github.com/retr0h/osapi/internal/provider/network/dns/mocks"
netinfoMocks "github.com/retr0h/osapi/internal/provider/network/netinfo/mocks"
@@ -100,6 +101,7 @@ func (s *HandlerTestSuite) SetupTest() {
netinfoMock := netinfoMocks.NewDefaultMockProvider(s.mockCtrl)
commandMock := commandMocks.NewDefaultMockProvider(s.mockCtrl)
+ fMock := fileMocks.NewDefaultMockProvider(s.mockCtrl)
s.agent = New(
appFs,
@@ -115,6 +117,7 @@ func (s *HandlerTestSuite) SetupTest() {
pingMock,
netinfoMock,
commandMock,
+ fMock,
nil,
nil,
)
diff --git a/internal/agent/heartbeat.go b/internal/agent/heartbeat.go
index ecbb437a..d2e9275f 100644
--- a/internal/agent/heartbeat.go
+++ b/internal/agent/heartbeat.go
@@ -38,6 +38,9 @@ var heartbeatInterval = 10 * time.Second
// marshalJSON is a package-level variable for testing the marshal error path.
var marshalJSON = json.Marshal
+// unmarshalJSON is a package-level variable for testing the unmarshal error path.
+var unmarshalJSON = json.Unmarshal
+
// startHeartbeat writes the initial registration, spawns a goroutine that
// refreshes the entry on a ticker, and deregisters on ctx.Done().
func (a *Agent) startHeartbeat(
diff --git a/internal/agent/heartbeat_public_test.go b/internal/agent/heartbeat_public_test.go
index 93db8d22..0869274f 100644
--- a/internal/agent/heartbeat_public_test.go
+++ b/internal/agent/heartbeat_public_test.go
@@ -143,6 +143,7 @@ func (s *HeartbeatPublicTestSuite) TestStartWithHeartbeat() {
pingMocks.NewDefaultMockProvider(s.mockCtrl),
netinfoMocks.NewDefaultMockProvider(s.mockCtrl),
commandMocks.NewDefaultMockProvider(s.mockCtrl),
+ nil,
s.mockKV,
nil,
)
@@ -184,6 +185,7 @@ func (s *HeartbeatPublicTestSuite) TestStartWithHeartbeat() {
commandMocks.NewDefaultMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
stopFunc: func(a *agent.Agent) {
diff --git a/internal/agent/heartbeat_test.go b/internal/agent/heartbeat_test.go
index fb64492f..718242c6 100644
--- a/internal/agent/heartbeat_test.go
+++ b/internal/agent/heartbeat_test.go
@@ -81,6 +81,7 @@ func (s *HeartbeatTestSuite) SetupTest() {
pingMocks.NewDefaultMockProvider(s.mockCtrl),
netinfoMocks.NewDefaultMockProvider(s.mockCtrl),
commandMocks.NewDefaultMockProvider(s.mockCtrl),
+ nil,
s.mockKV,
nil,
)
diff --git a/internal/agent/processor.go b/internal/agent/processor.go
index 719db046..30ad3a5c 100644
--- a/internal/agent/processor.go
+++ b/internal/agent/processor.go
@@ -52,6 +52,8 @@ func (a *Agent) processJobOperation(
return a.processNetworkOperation(jobRequest)
case "command":
return a.processCommandOperation(jobRequest)
+ case "file":
+ return a.processFileOperation(jobRequest)
default:
return nil, fmt.Errorf("unsupported job category: %s", jobRequest.Category)
}
diff --git a/internal/agent/processor_command_test.go b/internal/agent/processor_command_test.go
index e0da3093..34cb791e 100644
--- a/internal/agent/processor_command_test.go
+++ b/internal/agent/processor_command_test.go
@@ -79,6 +79,7 @@ func (s *ProcessorCommandTestSuite) newAgentWithCommandMock(
cmdMock,
nil,
nil,
+ nil,
)
}
diff --git a/internal/agent/processor_file.go b/internal/agent/processor_file.go
new file mode 100644
index 00000000..4c11cb9d
--- /dev/null
+++ b/internal/agent/processor_file.go
@@ -0,0 +1,91 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package agent
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/retr0h/osapi/internal/job"
+ fileProv "github.com/retr0h/osapi/internal/provider/file"
+)
+
+// processFileOperation handles file-related operations.
+func (a *Agent) processFileOperation(
+ jobRequest job.Request,
+) (json.RawMessage, error) {
+ if a.fileProvider == nil {
+ return nil, fmt.Errorf("file provider not configured")
+ }
+
+ // Extract base operation from dotted operation (e.g., "deploy.execute" -> "deploy")
+ baseOperation := strings.Split(jobRequest.Operation, ".")[0]
+
+ switch baseOperation {
+ case "deploy":
+ return a.processFileDeploy(jobRequest)
+ case "status":
+ return a.processFileStatus(jobRequest)
+ default:
+ return nil, fmt.Errorf("unsupported file operation: %s", jobRequest.Operation)
+ }
+}
+
+// processFileDeploy handles file deploy operations.
+func (a *Agent) processFileDeploy(
+ jobRequest job.Request,
+) (json.RawMessage, error) {
+ var req fileProv.DeployRequest
+ if err := json.Unmarshal(jobRequest.Data, &req); err != nil {
+ return nil, fmt.Errorf("failed to parse file deploy data: %w", err)
+ }
+
+ result, err := a.fileProvider.Deploy(context.Background(), req)
+ if err != nil {
+ return nil, fmt.Errorf("file deploy failed: %w", err)
+ }
+
+ return json.Marshal(result)
+}
+
+// processFileStatus handles file status operations.
+func (a *Agent) processFileStatus(
+ jobRequest job.Request,
+) (json.RawMessage, error) {
+ var req fileProv.StatusRequest
+ if err := json.Unmarshal(jobRequest.Data, &req); err != nil {
+ return nil, fmt.Errorf("failed to parse file status data: %w", err)
+ }
+
+ result, err := a.fileProvider.Status(context.Background(), req)
+ if err != nil {
+ return nil, fmt.Errorf("file status failed: %w", err)
+ }
+
+ return json.Marshal(result)
+}
+
+// getFileProvider returns the file provider.
+func (a *Agent) getFileProvider() fileProv.Provider {
+ return a.fileProvider
+}
diff --git a/internal/agent/processor_file_test.go b/internal/agent/processor_file_test.go
new file mode 100644
index 00000000..5cb5882d
--- /dev/null
+++ b/internal/agent/processor_file_test.go
@@ -0,0 +1,303 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package agent
+
+import (
+ "encoding/json"
+ "errors"
+ "log/slog"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/spf13/afero"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/config"
+ "github.com/retr0h/osapi/internal/job"
+ "github.com/retr0h/osapi/internal/job/mocks"
+ commandMocks "github.com/retr0h/osapi/internal/provider/command/mocks"
+ fileProv "github.com/retr0h/osapi/internal/provider/file"
+ fileMocks "github.com/retr0h/osapi/internal/provider/file/mocks"
+ dnsMocks "github.com/retr0h/osapi/internal/provider/network/dns/mocks"
+ netinfoMocks "github.com/retr0h/osapi/internal/provider/network/netinfo/mocks"
+ pingMocks "github.com/retr0h/osapi/internal/provider/network/ping/mocks"
+ diskMocks "github.com/retr0h/osapi/internal/provider/node/disk/mocks"
+ hostMocks "github.com/retr0h/osapi/internal/provider/node/host/mocks"
+ loadMocks "github.com/retr0h/osapi/internal/provider/node/load/mocks"
+ memMocks "github.com/retr0h/osapi/internal/provider/node/mem/mocks"
+)
+
+type ProcessorFileTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockJobClient *mocks.MockJobClient
+}
+
+func (s *ProcessorFileTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockJobClient = mocks.NewMockJobClient(s.mockCtrl)
+}
+
+func (s *ProcessorFileTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *ProcessorFileTestSuite) newAgentWithFileMock(
+ fileMock fileProv.Provider,
+) *Agent {
+ return New(
+ afero.NewMemMapFs(),
+ config.Config{},
+ slog.Default(),
+ s.mockJobClient,
+ "test-stream",
+ hostMocks.NewPlainMockProvider(s.mockCtrl),
+ diskMocks.NewPlainMockProvider(s.mockCtrl),
+ memMocks.NewPlainMockProvider(s.mockCtrl),
+ loadMocks.NewPlainMockProvider(s.mockCtrl),
+ dnsMocks.NewPlainMockProvider(s.mockCtrl),
+ pingMocks.NewPlainMockProvider(s.mockCtrl),
+ netinfoMocks.NewPlainMockProvider(s.mockCtrl),
+ commandMocks.NewPlainMockProvider(s.mockCtrl),
+ fileMock,
+ nil,
+ nil,
+ )
+}
+
+func (s *ProcessorFileTestSuite) TestProcessFileOperation() {
+ tests := []struct {
+ name string
+ jobRequest job.Request
+ setupMock func(*fileMocks.MockProvider)
+ expectError bool
+ errorMsg string
+ validate func(json.RawMessage)
+ }{
+ {
+ name: "successful deploy operation",
+ jobRequest: job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: "deploy.execute",
+ Data: json.RawMessage(
+ `{"object_name":"app.conf","path":"/etc/app/app.conf","mode":"0644","content_type":"raw"}`,
+ ),
+ },
+ setupMock: func(m *fileMocks.MockProvider) {
+ m.EXPECT().
+ Deploy(gomock.Any(), fileProv.DeployRequest{
+ ObjectName: "app.conf",
+ Path: "/etc/app/app.conf",
+ Mode: "0644",
+ ContentType: "raw",
+ }).
+ Return(&fileProv.DeployResult{
+ Changed: true,
+ SHA256: "abc123def456",
+ Path: "/etc/app/app.conf",
+ }, nil)
+ },
+ validate: func(result json.RawMessage) {
+ var r fileProv.DeployResult
+ err := json.Unmarshal(result, &r)
+ s.NoError(err)
+ s.True(r.Changed)
+ s.Equal("abc123def456", r.SHA256)
+ s.Equal("/etc/app/app.conf", r.Path)
+ },
+ },
+ {
+ name: "successful status operation",
+ jobRequest: job.Request{
+ Type: job.TypeQuery,
+ Category: "file",
+ Operation: "status.get",
+ Data: json.RawMessage(`{"path":"/etc/app/app.conf"}`),
+ },
+ setupMock: func(m *fileMocks.MockProvider) {
+ m.EXPECT().
+ Status(gomock.Any(), fileProv.StatusRequest{
+ Path: "/etc/app/app.conf",
+ }).
+ Return(&fileProv.StatusResult{
+ Path: "/etc/app/app.conf",
+ Status: "in-sync",
+ SHA256: "abc123def456",
+ }, nil)
+ },
+ validate: func(result json.RawMessage) {
+ var r fileProv.StatusResult
+ err := json.Unmarshal(result, &r)
+ s.NoError(err)
+ s.Equal("in-sync", r.Status)
+ s.Equal("/etc/app/app.conf", r.Path)
+ s.Equal("abc123def456", r.SHA256)
+ },
+ },
+ {
+ name: "unsupported file operation",
+ jobRequest: job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: "unknown.execute",
+ Data: json.RawMessage(`{}`),
+ },
+ setupMock: func(_ *fileMocks.MockProvider) {},
+ expectError: true,
+ errorMsg: "unsupported file operation",
+ },
+ {
+ name: "deploy with invalid JSON data",
+ jobRequest: job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: "deploy.execute",
+ Data: json.RawMessage(`invalid json`),
+ },
+ setupMock: func(_ *fileMocks.MockProvider) {},
+ expectError: true,
+ errorMsg: "failed to parse file deploy data",
+ },
+ {
+ name: "status with invalid JSON data",
+ jobRequest: job.Request{
+ Type: job.TypeQuery,
+ Category: "file",
+ Operation: "status.get",
+ Data: json.RawMessage(`invalid json`),
+ },
+ setupMock: func(_ *fileMocks.MockProvider) {},
+ expectError: true,
+ errorMsg: "failed to parse file status data",
+ },
+ {
+ name: "deploy provider error",
+ jobRequest: job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: "deploy.execute",
+ Data: json.RawMessage(
+ `{"object_name":"app.conf","path":"/etc/app/app.conf","content_type":"raw"}`,
+ ),
+ },
+ setupMock: func(m *fileMocks.MockProvider) {
+ m.EXPECT().
+ Deploy(gomock.Any(), gomock.Any()).
+ Return(nil, errors.New("object not found"))
+ },
+ expectError: true,
+ errorMsg: "file deploy failed",
+ },
+ {
+ name: "status provider error",
+ jobRequest: job.Request{
+ Type: job.TypeQuery,
+ Category: "file",
+ Operation: "status.get",
+ Data: json.RawMessage(`{"path":"/etc/app/app.conf"}`),
+ },
+ setupMock: func(m *fileMocks.MockProvider) {
+ m.EXPECT().
+ Status(gomock.Any(), gomock.Any()).
+ Return(nil, errors.New("state KV unavailable"))
+ },
+ expectError: true,
+ errorMsg: "file status failed",
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ fMock := fileMocks.NewMockProvider(s.mockCtrl)
+ tt.setupMock(fMock)
+
+ a := s.newAgentWithFileMock(fMock)
+ result, err := a.processFileOperation(tt.jobRequest)
+
+ if tt.expectError {
+ s.Error(err)
+ s.Contains(err.Error(), tt.errorMsg)
+ s.Nil(result)
+ } else {
+ s.NoError(err)
+ s.NotNil(result)
+ if tt.validate != nil {
+ tt.validate(result)
+ }
+ }
+ })
+ }
+}
+
+func (s *ProcessorFileTestSuite) TestProcessFileOperationNilProvider() {
+ tests := []struct {
+ name string
+ errorMsg string
+ }{
+ {
+ name: "returns error when file provider is nil",
+ errorMsg: "file provider not configured",
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ a := s.newAgentWithFileMock(nil)
+ result, err := a.processFileOperation(job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: "deploy.execute",
+ Data: json.RawMessage(`{}`),
+ })
+
+ s.Error(err)
+ s.Contains(err.Error(), tt.errorMsg)
+ s.Nil(result)
+ })
+ }
+}
+
+func (s *ProcessorFileTestSuite) TestGetFileProvider() {
+ tests := []struct {
+ name string
+ }{
+ {
+ name: "returns file provider",
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ fMock := fileMocks.NewPlainMockProvider(s.mockCtrl)
+ a := s.newAgentWithFileMock(fMock)
+
+ provider := a.getFileProvider()
+
+ s.NotNil(provider)
+ })
+ }
+}
+
+func TestProcessorFileTestSuite(t *testing.T) {
+ suite.Run(t, new(ProcessorFileTestSuite))
+}
diff --git a/internal/agent/processor_test.go b/internal/agent/processor_test.go
index 291209e7..edeae56c 100644
--- a/internal/agent/processor_test.go
+++ b/internal/agent/processor_test.go
@@ -35,6 +35,7 @@ import (
"github.com/retr0h/osapi/internal/job"
"github.com/retr0h/osapi/internal/job/mocks"
commandMocks "github.com/retr0h/osapi/internal/provider/command/mocks"
+ fileMocks "github.com/retr0h/osapi/internal/provider/file/mocks"
"github.com/retr0h/osapi/internal/provider/network/dns"
dnsMocks "github.com/retr0h/osapi/internal/provider/network/dns/mocks"
netinfoMocks "github.com/retr0h/osapi/internal/provider/network/netinfo/mocks"
@@ -104,6 +105,7 @@ func (s *ProcessorTestSuite) SetupTest() {
netinfoMock := netinfoMocks.NewDefaultMockProvider(s.mockCtrl)
commandMock := commandMocks.NewDefaultMockProvider(s.mockCtrl)
+ fMock := fileMocks.NewDefaultMockProvider(s.mockCtrl)
s.agent = New(
appFs,
@@ -119,6 +121,7 @@ func (s *ProcessorTestSuite) SetupTest() {
pingMock,
netinfoMock,
commandMock,
+ fMock,
nil,
nil,
)
@@ -338,6 +341,40 @@ func (s *ProcessorTestSuite) TestProcessJobOperation() {
s.Contains(response, "stdout")
},
},
+ {
+ name: "successful file deploy operation",
+ jobRequest: job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: "deploy.execute",
+ Data: json.RawMessage(
+ `{"object_name":"app.conf","path":"/etc/mock/file.conf","content_type":"raw"}`,
+ ),
+ },
+ expectError: false,
+ validate: func(result json.RawMessage) {
+ var response map[string]interface{}
+ err := json.Unmarshal(result, &response)
+ s.NoError(err)
+ s.Equal(true, response["changed"])
+ },
+ },
+ {
+ name: "successful file status operation",
+ jobRequest: job.Request{
+ Type: job.TypeQuery,
+ Category: "file",
+ Operation: "status.get",
+ Data: json.RawMessage(`{"path":"/etc/mock/file.conf"}`),
+ },
+ expectError: false,
+ validate: func(result json.RawMessage) {
+ var response map[string]interface{}
+ err := json.Unmarshal(result, &response)
+ s.NoError(err)
+ s.Equal("in-sync", response["status"])
+ },
+ },
{
name: "unsupported job category",
jobRequest: job.Request{
@@ -626,6 +663,10 @@ func (s *ProcessorTestSuite) TestProviderFactoryMethods() {
name: "getCommandProvider",
getProvider: func() interface{} { return s.agent.getCommandProvider() },
},
+ {
+ name: "getFileProvider",
+ getProvider: func() interface{} { return s.agent.getFileProvider() },
+ },
}
for _, tt := range tests {
@@ -666,6 +707,7 @@ func (s *ProcessorTestSuite) TestSystemOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -694,6 +736,7 @@ func (s *ProcessorTestSuite) TestSystemOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -720,6 +763,7 @@ func (s *ProcessorTestSuite) TestSystemOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -746,6 +790,7 @@ func (s *ProcessorTestSuite) TestSystemOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -772,6 +817,7 @@ func (s *ProcessorTestSuite) TestSystemOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -798,6 +844,7 @@ func (s *ProcessorTestSuite) TestSystemOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -858,6 +905,7 @@ func (s *ProcessorTestSuite) TestNetworkOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -888,6 +936,7 @@ func (s *ProcessorTestSuite) TestNetworkOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
@@ -916,6 +965,7 @@ func (s *ProcessorTestSuite) TestNetworkOperationErrors() {
commandMocks.NewPlainMockProvider(s.mockCtrl),
nil,
nil,
+ nil,
)
},
},
diff --git a/internal/agent/types.go b/internal/agent/types.go
index 00a7cc9b..27648c57 100644
--- a/internal/agent/types.go
+++ b/internal/agent/types.go
@@ -33,6 +33,7 @@ import (
"github.com/retr0h/osapi/internal/job"
"github.com/retr0h/osapi/internal/job/client"
"github.com/retr0h/osapi/internal/provider/command"
+ fileProv "github.com/retr0h/osapi/internal/provider/file"
"github.com/retr0h/osapi/internal/provider/network/dns"
"github.com/retr0h/osapi/internal/provider/network/netinfo"
"github.com/retr0h/osapi/internal/provider/network/ping"
@@ -66,6 +67,9 @@ type Agent struct {
// Command provider
commandProvider command.Provider
+ // File provider
+ fileProvider fileProv.Provider
+
// Registry KV for heartbeat registration
registryKV jetstream.KeyValue
diff --git a/internal/api/agent/agent_drain_public_test.go b/internal/api/agent/agent_drain_public_test.go
index c804d14d..8bd310ae 100644
--- a/internal/api/agent/agent_drain_public_test.go
+++ b/internal/api/agent/agent_drain_public_test.go
@@ -201,7 +201,7 @@ func (s *AgentDrainPublicTestSuite) TestDrainAgent() {
}
}
-func (s *AgentDrainPublicTestSuite) TestDrainAgentValidationHTTP() {
+func (s *AgentDrainPublicTestSuite) TestDrainAgentHTTP() {
tests := []struct {
name string
hostname string
diff --git a/internal/api/agent/agent_get_public_test.go b/internal/api/agent/agent_get_public_test.go
index 41c3e848..516dd805 100644
--- a/internal/api/agent/agent_get_public_test.go
+++ b/internal/api/agent/agent_get_public_test.go
@@ -138,7 +138,7 @@ func (s *AgentGetPublicTestSuite) TestGetAgentDetails() {
}
}
-func (s *AgentGetPublicTestSuite) TestGetAgentDetailsValidationHTTP() {
+func (s *AgentGetPublicTestSuite) TestGetAgentDetailsHTTP() {
tests := []struct {
name string
hostname string
diff --git a/internal/api/agent/agent_list_public_test.go b/internal/api/agent/agent_list_public_test.go
index ee554506..4818b557 100644
--- a/internal/api/agent/agent_list_public_test.go
+++ b/internal/api/agent/agent_list_public_test.go
@@ -326,7 +326,7 @@ func (s *AgentListPublicTestSuite) TestGetAgent() {
}
}
-func (s *AgentListPublicTestSuite) TestGetAgentValidationHTTP() {
+func (s *AgentListPublicTestSuite) TestGetAgentHTTP() {
tests := []struct {
name string
setupJobMock func() *jobmocks.MockJobClient
diff --git a/internal/api/agent/agent_undrain_public_test.go b/internal/api/agent/agent_undrain_public_test.go
index 21079b13..8050aacb 100644
--- a/internal/api/agent/agent_undrain_public_test.go
+++ b/internal/api/agent/agent_undrain_public_test.go
@@ -215,7 +215,7 @@ func (s *AgentUndrainPublicTestSuite) TestUndrainAgent() {
}
}
-func (s *AgentUndrainPublicTestSuite) TestUndrainAgentValidationHTTP() {
+func (s *AgentUndrainPublicTestSuite) TestUndrainAgentHTTP() {
tests := []struct {
name string
hostname string
diff --git a/internal/api/audit/audit_list_public_test.go b/internal/api/audit/audit_list_public_test.go
index ec9d8149..8b4cabe5 100644
--- a/internal/api/audit/audit_list_public_test.go
+++ b/internal/api/audit/audit_list_public_test.go
@@ -202,7 +202,7 @@ func (s *AuditListPublicTestSuite) TestGetAuditLogs() {
}
}
-func (s *AuditListPublicTestSuite) TestGetAuditLogsHTTP() {
+func (s *AuditListPublicTestSuite) TestGetAuditLogsValidationHTTP() {
tests := []struct {
name string
query string
diff --git a/internal/api/file/file.go b/internal/api/file/file.go
new file mode 100644
index 00000000..f5611db3
--- /dev/null
+++ b/internal/api/file/file.go
@@ -0,0 +1,42 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+// Package file provides file management API handlers.
+package file
+
+import (
+ "log/slog"
+
+ "github.com/retr0h/osapi/internal/api/file/gen"
+)
+
+// ensure that we've conformed to the `StrictServerInterface` with a compile-time check
+var _ gen.StrictServerInterface = (*File)(nil)
+
+// New factory to create a new instance.
+func New(
+ logger *slog.Logger,
+ objStore ObjectStoreManager,
+) *File {
+ return &File{
+ objStore: objStore,
+ logger: logger,
+ }
+}
diff --git a/internal/api/file/file_delete.go b/internal/api/file/file_delete.go
new file mode 100644
index 00000000..7ffc88f5
--- /dev/null
+++ b/internal/api/file/file_delete.go
@@ -0,0 +1,74 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log/slog"
+
+ "github.com/nats-io/nats.go/jetstream"
+
+ "github.com/retr0h/osapi/internal/api/file/gen"
+)
+
+// DeleteFileByName delete a file from the Object Store.
+func (f *File) DeleteFileByName(
+ ctx context.Context,
+ request gen.DeleteFileByNameRequestObject,
+) (gen.DeleteFileByNameResponseObject, error) {
+ if errMsg, ok := validateFileName(request.Name); !ok {
+ return gen.DeleteFileByName400JSONResponse{Error: &errMsg}, nil
+ }
+
+ f.logger.Debug("file delete",
+ slog.String("name", request.Name),
+ )
+
+ // Check if the file exists before attempting deletion.
+ _, err := f.objStore.GetInfo(ctx, request.Name)
+ if err != nil {
+ if errors.Is(err, jetstream.ErrObjectNotFound) {
+ errMsg := fmt.Sprintf("file not found: %s", request.Name)
+ return gen.DeleteFileByName404JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ errMsg := fmt.Sprintf("failed to get file info: %s", err.Error())
+ return gen.DeleteFileByName500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ if err := f.objStore.Delete(ctx, request.Name); err != nil {
+ errMsg := fmt.Sprintf("failed to delete file: %s", err.Error())
+ return gen.DeleteFileByName500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ return gen.DeleteFileByName200JSONResponse{
+ Name: request.Name,
+ Deleted: true,
+ }, nil
+}
diff --git a/internal/api/file/file_delete_public_test.go b/internal/api/file/file_delete_public_test.go
new file mode 100644
index 00000000..900a9f46
--- /dev/null
+++ b/internal/api/file/file_delete_public_test.go
@@ -0,0 +1,367 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/api"
+ apifile "github.com/retr0h/osapi/internal/api/file"
+ "github.com/retr0h/osapi/internal/api/file/gen"
+ "github.com/retr0h/osapi/internal/api/file/mocks"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
+)
+
+type FileDeletePublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockObjStore *mocks.MockObjectStoreManager
+ handler *apifile.File
+ ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
+}
+
+func (s *FileDeletePublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockObjStore = mocks.NewMockObjectStoreManager(s.mockCtrl)
+ s.handler = apifile.New(slog.Default(), s.mockObjStore)
+ s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+}
+
+func (s *FileDeletePublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *FileDeletePublicTestSuite) TestDeleteFileByName() {
+ tests := []struct {
+ name string
+ request gen.DeleteFileByNameRequestObject
+ setupMock func()
+ validateFunc func(resp gen.DeleteFileByNameResponseObject)
+ }{
+ {
+ name: "success",
+ request: gen.DeleteFileByNameRequestObject{Name: "nginx.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: 1024,
+ }, nil)
+ s.mockObjStore.EXPECT().
+ Delete(gomock.Any(), "nginx.conf").
+ Return(nil)
+ },
+ validateFunc: func(resp gen.DeleteFileByNameResponseObject) {
+ r, ok := resp.(gen.DeleteFileByName200JSONResponse)
+ s.True(ok)
+ s.Equal("nginx.conf", r.Name)
+ s.True(r.Deleted)
+ },
+ },
+ {
+ name: "validation error name too long",
+ request: gen.DeleteFileByNameRequestObject{Name: strings.Repeat("a", 256)},
+ setupMock: func() {
+ // No mock calls expected; validation rejects before reaching obj store.
+ },
+ validateFunc: func(resp gen.DeleteFileByNameResponseObject) {
+ _, ok := resp.(gen.DeleteFileByName400JSONResponse)
+ s.True(ok)
+ },
+ },
+ {
+ name: "not found",
+ request: gen.DeleteFileByNameRequestObject{Name: "missing.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "missing.conf").
+ Return(nil, jetstream.ErrObjectNotFound)
+ },
+ validateFunc: func(resp gen.DeleteFileByNameResponseObject) {
+ r, ok := resp.(gen.DeleteFileByName404JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "file not found")
+ },
+ },
+ {
+ name: "get info error",
+ request: gen.DeleteFileByNameRequestObject{Name: "nginx.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+ },
+ validateFunc: func(resp gen.DeleteFileByNameResponseObject) {
+ _, ok := resp.(gen.DeleteFileByName500JSONResponse)
+ s.True(ok)
+ },
+ },
+ {
+ name: "delete error",
+ request: gen.DeleteFileByNameRequestObject{Name: "nginx.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: 1024,
+ }, nil)
+ s.mockObjStore.EXPECT().
+ Delete(gomock.Any(), "nginx.conf").
+ Return(assert.AnError)
+ },
+ validateFunc: func(resp gen.DeleteFileByNameResponseObject) {
+ _, ok := resp.(gen.DeleteFileByName500JSONResponse)
+ s.True(ok)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupMock()
+
+ resp, err := s.handler.DeleteFileByName(s.ctx, tt.request)
+ s.NoError(err)
+ tt.validateFunc(resp)
+ })
+ }
+}
+
+func (s *FileDeletePublicTestSuite) TestDeleteFileByNameValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when name too long returns 400",
+ path: "/file/" + strings.Repeat("a", 256),
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`},
+ },
+ {
+ name: "when delete Ok",
+ path: "/file/nginx.conf",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: 1024,
+ }, nil)
+ mock.EXPECT().
+ Delete(gomock.Any(), "nginx.conf").
+ Return(nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"name":"nginx.conf"`, `"deleted":true`},
+ },
+ {
+ name: "when not found",
+ path: "/file/missing.conf",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "missing.conf").
+ Return(nil, jetstream.ErrObjectNotFound)
+ return mock
+ },
+ wantCode: http.StatusNotFound,
+ wantContains: []string{"file not found"},
+ },
+ {
+ name: "when delete error",
+ path: "/file/nginx.conf",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: 1024,
+ }, nil)
+ mock.EXPECT().
+ Delete(gomock.Any(), "nginx.conf").
+ Return(assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ wantContains: []string{"failed to delete file"},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ fileHandler := apifile.New(s.logger, objMock)
+ strictHandler := gen.NewStrictHandler(fileHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodDelete, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacDeleteTestSigningKey = "test-signing-key-for-file-delete-rbac"
+
+func (s *FileDeletePublicTestSuite) TestDeleteFileByNameRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacDeleteTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"file:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with file:write returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacDeleteTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ []string{"file:write"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: 1024,
+ }, nil)
+ mock.EXPECT().
+ Delete(gomock.Any(), "nginx.conf").
+ Return(nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"name":"nginx.conf"`, `"deleted":true`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacDeleteTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetFileHandler(objMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodDelete, "/file/nginx.conf", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+func TestFileDeletePublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FileDeletePublicTestSuite))
+}
diff --git a/internal/api/file/file_get.go b/internal/api/file/file_get.go
new file mode 100644
index 00000000..d133b220
--- /dev/null
+++ b/internal/api/file/file_get.go
@@ -0,0 +1,81 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "log/slog"
+ "strings"
+
+ "github.com/nats-io/nats.go/jetstream"
+
+ "github.com/retr0h/osapi/internal/api/file/gen"
+)
+
+// GetFileByName get metadata for a specific file in the Object Store.
+func (f *File) GetFileByName(
+ ctx context.Context,
+ request gen.GetFileByNameRequestObject,
+) (gen.GetFileByNameResponseObject, error) {
+ if errMsg, ok := validateFileName(request.Name); !ok {
+ return gen.GetFileByName400JSONResponse{Error: &errMsg}, nil
+ }
+
+ f.logger.Debug("file get",
+ slog.String("name", request.Name),
+ )
+
+ info, err := f.objStore.GetInfo(ctx, request.Name)
+ if err != nil {
+ if errors.Is(err, jetstream.ErrObjectNotFound) {
+ errMsg := fmt.Sprintf("file not found: %s", request.Name)
+ return gen.GetFileByName404JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ errMsg := fmt.Sprintf("failed to get file info: %s", err.Error())
+ return gen.GetFileByName500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ digestB64 := strings.TrimPrefix(info.Digest, "SHA-256=")
+ sha256Hex := digestB64
+ if digestBytes, err := base64.URLEncoding.DecodeString(digestB64); err == nil {
+ sha256Hex = fmt.Sprintf("%x", digestBytes)
+ }
+
+ contentType := ""
+ if info.Headers != nil {
+ contentType = info.Headers.Get("Osapi-Content-Type")
+ }
+
+ return gen.GetFileByName200JSONResponse{
+ Name: info.Name,
+ Sha256: sha256Hex,
+ Size: int(info.Size),
+ ContentType: contentType,
+ }, nil
+}
diff --git a/internal/api/file/file_get_public_test.go b/internal/api/file/file_get_public_test.go
new file mode 100644
index 00000000..4ffb770b
--- /dev/null
+++ b/internal/api/file/file_get_public_test.go
@@ -0,0 +1,362 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/nats-io/nats.go"
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/api"
+ apifile "github.com/retr0h/osapi/internal/api/file"
+ "github.com/retr0h/osapi/internal/api/file/gen"
+ "github.com/retr0h/osapi/internal/api/file/mocks"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
+)
+
+type FileGetPublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockObjStore *mocks.MockObjectStoreManager
+ handler *apifile.File
+ ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
+}
+
+func (s *FileGetPublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockObjStore = mocks.NewMockObjectStoreManager(s.mockCtrl)
+ s.handler = apifile.New(slog.Default(), s.mockObjStore)
+ s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+}
+
+func (s *FileGetPublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *FileGetPublicTestSuite) TestGetFileByName() {
+ tests := []struct {
+ name string
+ request gen.GetFileByNameRequestObject
+ setupMock func()
+ validateFunc func(resp gen.GetFileByNameResponseObject)
+ }{
+ {
+ name: "success",
+ request: gen.GetFileByNameRequestObject{Name: "nginx.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 1024,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ }, nil)
+ },
+ validateFunc: func(resp gen.GetFileByNameResponseObject) {
+ r, ok := resp.(gen.GetFileByName200JSONResponse)
+ s.True(ok)
+ s.Equal("nginx.conf", r.Name)
+ s.Equal(
+ "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ r.Sha256,
+ )
+ s.Equal(1024, r.Size)
+ s.Equal("raw", r.ContentType)
+ },
+ },
+ {
+ name: "validation error name too long",
+ request: gen.GetFileByNameRequestObject{Name: strings.Repeat("a", 256)},
+ setupMock: func() {
+ // No mock calls expected; validation rejects before reaching obj store.
+ },
+ validateFunc: func(resp gen.GetFileByNameResponseObject) {
+ _, ok := resp.(gen.GetFileByName400JSONResponse)
+ s.True(ok)
+ },
+ },
+ {
+ name: "not found",
+ request: gen.GetFileByNameRequestObject{Name: "missing.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "missing.conf").
+ Return(nil, jetstream.ErrObjectNotFound)
+ },
+ validateFunc: func(resp gen.GetFileByNameResponseObject) {
+ r, ok := resp.(gen.GetFileByName404JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "file not found")
+ },
+ },
+ {
+ name: "object store error",
+ request: gen.GetFileByNameRequestObject{Name: "nginx.conf"},
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+ },
+ validateFunc: func(resp gen.GetFileByNameResponseObject) {
+ _, ok := resp.(gen.GetFileByName500JSONResponse)
+ s.True(ok)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupMock()
+
+ resp, err := s.handler.GetFileByName(s.ctx, tt.request)
+ s.NoError(err)
+ tt.validateFunc(resp)
+ })
+ }
+}
+
+func (s *FileGetPublicTestSuite) TestGetFileByNameValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when name too long returns 400",
+ path: "/file/" + strings.Repeat("a", 256),
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`},
+ },
+ {
+ name: "when get Ok",
+ path: "/file/nginx.conf",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 1024,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{
+ `"name":"nginx.conf"`,
+ `"sha256":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"`,
+ `"size":1024`,
+ `"content_type":"raw"`,
+ },
+ },
+ {
+ name: "when not found",
+ path: "/file/missing.conf",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "missing.conf").
+ Return(nil, jetstream.ErrObjectNotFound)
+ return mock
+ },
+ wantCode: http.StatusNotFound,
+ wantContains: []string{"file not found"},
+ },
+ {
+ name: "when object store error",
+ path: "/file/nginx.conf",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ wantContains: []string{"failed to get file info"},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ fileHandler := apifile.New(s.logger, objMock)
+ strictHandler := gen.NewStrictHandler(fileHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacGetTestSigningKey = "test-signing-key-for-file-get-rbac"
+
+func (s *FileGetPublicTestSuite) TestGetFileByNameRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacGetTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"node:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with file:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacGetTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ []string{"file:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 1024,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"name":"nginx.conf"`, `"sha256"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacGetTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetFileHandler(objMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/file/nginx.conf", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+func TestFileGetPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FileGetPublicTestSuite))
+}
diff --git a/internal/api/file/file_list.go b/internal/api/file/file_list.go
new file mode 100644
index 00000000..b14c08ed
--- /dev/null
+++ b/internal/api/file/file_list.go
@@ -0,0 +1,86 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/nats-io/nats.go/jetstream"
+
+ "github.com/retr0h/osapi/internal/api/file/gen"
+)
+
+// GetFiles list all files stored in the Object Store.
+func (f *File) GetFiles(
+ ctx context.Context,
+ _ gen.GetFilesRequestObject,
+) (gen.GetFilesResponseObject, error) {
+ f.logger.Debug("file list")
+
+ objects, err := f.objStore.List(ctx)
+ if err != nil {
+ if errors.Is(err, jetstream.ErrNoObjectsFound) {
+ return gen.GetFiles200JSONResponse{
+ Files: []gen.FileInfo{},
+ Total: 0,
+ }, nil
+ }
+
+ errMsg := fmt.Sprintf("failed to list files: %s", err.Error())
+ return gen.GetFiles500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ files := make([]gen.FileInfo, 0, len(objects))
+ for _, obj := range objects {
+ if obj.Deleted {
+ continue
+ }
+
+ digestB64 := strings.TrimPrefix(obj.Digest, "SHA-256=")
+ sha256Hex := digestB64
+ if digestBytes, err := base64.URLEncoding.DecodeString(digestB64); err == nil {
+ sha256Hex = fmt.Sprintf("%x", digestBytes)
+ }
+
+ contentType := ""
+ if obj.Headers != nil {
+ contentType = obj.Headers.Get("Osapi-Content-Type")
+ }
+
+ files = append(files, gen.FileInfo{
+ Name: obj.Name,
+ Sha256: sha256Hex,
+ Size: int(obj.Size),
+ ContentType: contentType,
+ })
+ }
+
+ return gen.GetFiles200JSONResponse{
+ Files: files,
+ Total: len(files),
+ }, nil
+}
diff --git a/internal/api/file/file_list_public_test.go b/internal/api/file/file_list_public_test.go
new file mode 100644
index 00000000..f9e71d88
--- /dev/null
+++ b/internal/api/file/file_list_public_test.go
@@ -0,0 +1,370 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/nats-io/nats.go"
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/api"
+ apifile "github.com/retr0h/osapi/internal/api/file"
+ "github.com/retr0h/osapi/internal/api/file/gen"
+ "github.com/retr0h/osapi/internal/api/file/mocks"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
+)
+
+type FileListPublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockObjStore *mocks.MockObjectStoreManager
+ handler *apifile.File
+ ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
+}
+
+func (s *FileListPublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockObjStore = mocks.NewMockObjectStoreManager(s.mockCtrl)
+ s.handler = apifile.New(slog.Default(), s.mockObjStore)
+ s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+}
+
+func (s *FileListPublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *FileListPublicTestSuite) TestGetFiles() {
+ tests := []struct {
+ name string
+ setupMock func()
+ validateFunc func(resp gen.GetFilesResponseObject)
+ }{
+ {
+ name: "success with files",
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ List(gomock.Any()).
+ Return([]*jetstream.ObjectInfo{
+ {
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 1024,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ },
+ {
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "app.yaml",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"template"},
+ },
+ },
+ Size: 512,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ },
+ }, nil)
+ },
+ validateFunc: func(resp gen.GetFilesResponseObject) {
+ r, ok := resp.(gen.GetFiles200JSONResponse)
+ s.True(ok)
+ s.Equal(2, r.Total)
+ s.Len(r.Files, 2)
+ s.Equal("nginx.conf", r.Files[0].Name)
+ s.Equal(
+ "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ r.Files[0].Sha256,
+ )
+ s.Equal(1024, r.Files[0].Size)
+ s.Equal("raw", r.Files[0].ContentType)
+ s.Equal("template", r.Files[1].ContentType)
+ },
+ },
+ {
+ name: "success with empty store",
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ List(gomock.Any()).
+ Return([]*jetstream.ObjectInfo{}, nil)
+ },
+ validateFunc: func(resp gen.GetFilesResponseObject) {
+ r, ok := resp.(gen.GetFiles200JSONResponse)
+ s.True(ok)
+ s.Equal(0, r.Total)
+ s.Empty(r.Files)
+ },
+ },
+ {
+ name: "when ErrNoObjectsFound returns empty list",
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ List(gomock.Any()).
+ Return(nil, jetstream.ErrNoObjectsFound)
+ },
+ validateFunc: func(resp gen.GetFilesResponseObject) {
+ r, ok := resp.(gen.GetFiles200JSONResponse)
+ s.True(ok)
+ s.Equal(0, r.Total)
+ s.Empty(r.Files)
+ },
+ },
+ {
+ name: "filters deleted objects",
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ List(gomock.Any()).
+ Return([]*jetstream.ObjectInfo{
+ {
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "active.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 100,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ },
+ {
+ ObjectMeta: jetstream.ObjectMeta{Name: "deleted.conf"},
+ Size: 200,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ Deleted: true,
+ },
+ }, nil)
+ },
+ validateFunc: func(resp gen.GetFilesResponseObject) {
+ r, ok := resp.(gen.GetFiles200JSONResponse)
+ s.True(ok)
+ s.Equal(1, r.Total)
+ s.Len(r.Files, 1)
+ s.Equal("active.conf", r.Files[0].Name)
+ },
+ },
+ {
+ name: "object store error",
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ List(gomock.Any()).
+ Return(nil, assert.AnError)
+ },
+ validateFunc: func(resp gen.GetFilesResponseObject) {
+ _, ok := resp.(gen.GetFiles500JSONResponse)
+ s.True(ok)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupMock()
+
+ resp, err := s.handler.GetFiles(s.ctx, gen.GetFilesRequestObject{})
+ s.NoError(err)
+ tt.validateFunc(resp)
+ })
+ }
+}
+
+func (s *FileListPublicTestSuite) TestGetFilesHTTP() {
+ tests := []struct {
+ name string
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when list Ok",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ List(gomock.Any()).
+ Return([]*jetstream.ObjectInfo{
+ {
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 1024,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ },
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"files"`, `"nginx.conf"`, `"total":1`, `"content_type":"raw"`},
+ },
+ {
+ name: "when object store error",
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ List(gomock.Any()).
+ Return(nil, assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ wantContains: []string{"failed to list files"},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ fileHandler := apifile.New(s.logger, objMock)
+ strictHandler := gen.NewStrictHandler(fileHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, "/file", nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacListTestSigningKey = "test-signing-key-for-file-list-rbac"
+
+func (s *FileListPublicTestSuite) TestGetFilesRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacListTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"node:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with file:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacListTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ []string{"file:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ List(gomock.Any()).
+ Return([]*jetstream.ObjectInfo{}, nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"files"`, `"total":0`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacListTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetFileHandler(objMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/file", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+func TestFileListPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FileListPublicTestSuite))
+}
diff --git a/internal/api/file/file_upload.go b/internal/api/file/file_upload.go
new file mode 100644
index 00000000..a6f9cb83
--- /dev/null
+++ b/internal/api/file/file_upload.go
@@ -0,0 +1,159 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "bytes"
+ "context"
+ "crypto/sha256"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "log/slog"
+
+ "github.com/nats-io/nats.go"
+ "github.com/nats-io/nats.go/jetstream"
+
+ "github.com/retr0h/osapi/internal/api/file/gen"
+)
+
+// PostFile upload a file to the Object Store via multipart/form-data.
+func (f *File) PostFile(
+ ctx context.Context,
+ request gen.PostFileRequestObject,
+) (gen.PostFileResponseObject, error) {
+ name, contentType, fileData, errResp := f.parseMultipart(request)
+ if errResp != nil {
+ return errResp, nil
+ }
+
+ f.logger.Debug("file upload",
+ slog.String("name", name),
+ slog.String("content_type", contentType),
+ slog.Int("size", len(fileData)),
+ )
+
+ hash := sha256.Sum256(fileData)
+ sha256Hex := fmt.Sprintf("%x", hash)
+ newDigest := "SHA-256=" + base64.URLEncoding.EncodeToString(hash[:])
+
+ force := request.Params.Force != nil && *request.Params.Force
+
+ // Unless forced, check if the Object Store already has this file.
+ if !force {
+ existing, err := f.objStore.GetInfo(ctx, name)
+ if err == nil && existing != nil {
+ if existing.Digest == newDigest {
+ // Same content — skip the write.
+ return gen.PostFile201JSONResponse{
+ Name: name,
+ Sha256: sha256Hex,
+ Size: len(fileData),
+ Changed: false,
+ ContentType: contentType,
+ }, nil
+ }
+
+ // Different content — reject without force.
+ errMsg := fmt.Sprintf(
+ "file %s already exists with different content; use force to overwrite",
+ name,
+ )
+ return gen.PostFile409JSONResponse{Error: &errMsg}, nil
+ }
+ }
+
+ meta := jetstream.ObjectMeta{
+ Name: name,
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{contentType},
+ },
+ }
+ _, err := f.objStore.Put(ctx, meta, bytes.NewReader(fileData))
+ if err != nil {
+ errMsg := fmt.Sprintf("failed to store file: %s", err.Error())
+ return gen.PostFile500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ return gen.PostFile201JSONResponse{
+ Name: name,
+ Sha256: sha256Hex,
+ Size: len(fileData),
+ Changed: true,
+ ContentType: contentType,
+ }, nil
+}
+
+// parseMultipart reads multipart parts and extracts name, content_type,
+// and file data. Returns a 400 response on validation failure.
+func (f *File) parseMultipart(
+ request gen.PostFileRequestObject,
+) (string, string, []byte, gen.PostFileResponseObject) {
+ var name string
+ var contentType string
+ var fileData []byte
+
+ for {
+ part, err := request.Body.NextPart()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ errMsg := fmt.Sprintf("failed to read multipart: %s", err.Error())
+ return "", "", nil, gen.PostFile400JSONResponse{Error: &errMsg}
+ }
+
+ switch part.FormName() {
+ case "name":
+ b, _ := io.ReadAll(part)
+ name = string(b)
+ case "content_type":
+ b, _ := io.ReadAll(part)
+ contentType = string(b)
+ case "file":
+ fileData, _ = io.ReadAll(part)
+ }
+ _ = part.Close()
+ }
+
+ if contentType == "" {
+ contentType = "raw"
+ }
+
+ if name == "" || len(name) > 255 {
+ errMsg := "name is required and must be 1-255 characters"
+ return "", "", nil, gen.PostFile400JSONResponse{Error: &errMsg}
+ }
+
+ if len(fileData) == 0 {
+ errMsg := "file is required"
+ return "", "", nil, gen.PostFile400JSONResponse{Error: &errMsg}
+ }
+
+ if contentType != "raw" && contentType != "template" {
+ errMsg := "content_type must be raw or template"
+ return "", "", nil, gen.PostFile400JSONResponse{Error: &errMsg}
+ }
+
+ return name, contentType, fileData, nil
+}
diff --git a/internal/api/file/file_upload_public_test.go b/internal/api/file/file_upload_public_test.go
new file mode 100644
index 00000000..110223fa
--- /dev/null
+++ b/internal/api/file/file_upload_public_test.go
@@ -0,0 +1,689 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "log/slog"
+ "mime/multipart"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/nats-io/nats.go"
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/api"
+ apifile "github.com/retr0h/osapi/internal/api/file"
+ "github.com/retr0h/osapi/internal/api/file/gen"
+ "github.com/retr0h/osapi/internal/api/file/mocks"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
+)
+
+type FileUploadPublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockObjStore *mocks.MockObjectStoreManager
+ handler *apifile.File
+ ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
+}
+
+func (s *FileUploadPublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockObjStore = mocks.NewMockObjectStoreManager(s.mockCtrl)
+ s.handler = apifile.New(slog.Default(), s.mockObjStore)
+ s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+}
+
+func (s *FileUploadPublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+// makeMultipartReader builds a multipart.Reader for testing. Pass empty
+// contentType to omit the content_type field. Pass nil data to omit the
+// file part entirely.
+func boolPtr(v bool) *bool { return &v }
+
+func makeMultipartReader(
+ name string,
+ contentType string,
+ data []byte,
+) *multipart.Reader {
+ body := &bytes.Buffer{}
+ writer := multipart.NewWriter(body)
+
+ if name != "" {
+ _ = writer.WriteField("name", name)
+ }
+ if contentType != "" {
+ _ = writer.WriteField("content_type", contentType)
+ }
+ if data != nil {
+ part, _ := writer.CreateFormFile("file", "upload")
+ _, _ = part.Write(data)
+ }
+
+ _ = writer.Close()
+
+ return multipart.NewReader(body, writer.Boundary())
+}
+
+// makeBrokenMultipartReader returns a multipart.Reader that delivers a valid
+// "name" part first, then produces a non-EOF error on the next NextPart() call.
+// This triggers the non-EOF error path (lines 126-129) in parseMultipart.
+func makeBrokenMultipartReader() *multipart.Reader {
+ boundary := "testboundary"
+
+ // Build a valid first part (name field) followed by a corrupt second part
+ // header that will cause NextPart() to return a non-EOF error.
+ raw := "--" + boundary + "\r\n" +
+ "Content-Disposition: form-data; name=\"name\"\r\n\r\n" +
+ "test.conf\r\n" +
+ "--" + boundary + "\r\n" +
+ "Malformed-No-Blank-Line\r\n"
+
+ return multipart.NewReader(bytes.NewReader([]byte(raw)), boundary)
+}
+
+// makeMultipartBody builds a multipart body and returns the body and
+// content-type header for HTTP tests.
+func makeMultipartBody(
+ name string,
+ contentType string,
+ data []byte,
+) (*bytes.Buffer, string) {
+ body := &bytes.Buffer{}
+ writer := multipart.NewWriter(body)
+
+ if name != "" {
+ _ = writer.WriteField("name", name)
+ }
+ if contentType != "" {
+ _ = writer.WriteField("content_type", contentType)
+ }
+ if data != nil {
+ part, _ := writer.CreateFormFile("file", "upload")
+ _, _ = part.Write(data)
+ }
+
+ _ = writer.Close()
+
+ return body, writer.FormDataContentType()
+}
+
+func (s *FileUploadPublicTestSuite) TestPostFile() {
+ fileContent := []byte("server { listen 80; }")
+
+ tests := []struct {
+ name string
+ request gen.PostFileRequestObject
+ setupMock func()
+ validateFunc func(resp gen.PostFileResponseObject)
+ }{
+ {
+ name: "when new file",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("nginx.conf", "raw", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+
+ s.mockObjStore.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ DoAndReturn(func(
+ _ context.Context,
+ meta jetstream.ObjectMeta,
+ _ io.Reader,
+ ) (*jetstream.ObjectInfo, error) {
+ s.Equal("nginx.conf", meta.Name)
+ s.Equal("raw", meta.Headers.Get("Osapi-Content-Type"))
+ return &jetstream.ObjectInfo{
+ ObjectMeta: meta,
+ Size: uint64(len(fileContent)),
+ }, nil
+ })
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile201JSONResponse)
+ s.True(ok)
+ s.Equal("nginx.conf", r.Name)
+ s.Equal(len(fileContent), r.Size)
+ s.NotEmpty(r.Sha256)
+ s.True(r.Changed)
+ s.Equal("raw", r.ContentType)
+ },
+ },
+ {
+ name: "when template file",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("tmpl.conf", "template", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "tmpl.conf").
+ Return(nil, assert.AnError)
+
+ s.mockObjStore.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ DoAndReturn(func(
+ _ context.Context,
+ meta jetstream.ObjectMeta,
+ _ io.Reader,
+ ) (*jetstream.ObjectInfo, error) {
+ s.Equal("template", meta.Headers.Get("Osapi-Content-Type"))
+ return &jetstream.ObjectInfo{
+ ObjectMeta: meta,
+ Size: uint64(len(fileContent)),
+ }, nil
+ })
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile201JSONResponse)
+ s.True(ok)
+ s.Equal("tmpl.conf", r.Name)
+ s.True(r.Changed)
+ s.Equal("template", r.ContentType)
+ },
+ },
+ {
+ name: "when content_type defaults to raw",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("f.txt", "", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "f.txt").
+ Return(nil, assert.AnError)
+
+ s.mockObjStore.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "f.txt"},
+ Size: uint64(len(fileContent)),
+ }, nil)
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile201JSONResponse)
+ s.True(ok)
+ s.Equal("raw", r.ContentType)
+ },
+ },
+ {
+ name: "when unchanged content",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("nginx.conf", "raw", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: uint64(len(fileContent)),
+ Digest: "SHA-256=udwh0KiTQXw0wAbA6MMre9G3vJSOnF4MeW7eBweZr0g=",
+ }, nil)
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile201JSONResponse)
+ s.True(ok)
+ s.Equal("nginx.conf", r.Name)
+ s.Equal(len(fileContent), r.Size)
+ s.False(r.Changed)
+ s.Equal("raw", r.ContentType)
+ },
+ },
+ {
+ name: "when different content without force returns 409",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("nginx.conf", "raw", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{
+ Name: "nginx.conf",
+ Headers: nats.Header{
+ "Osapi-Content-Type": []string{"raw"},
+ },
+ },
+ Size: 100,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ }, nil)
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile409JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "already exists with different content")
+ },
+ },
+ {
+ name: "when force upload bypasses digest check",
+ request: gen.PostFileRequestObject{
+ Params: gen.PostFileParams{Force: boolPtr(true)},
+ Body: makeMultipartReader("nginx.conf", "raw", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: uint64(len(fileContent)),
+ }, nil)
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile201JSONResponse)
+ s.True(ok)
+ s.Equal("nginx.conf", r.Name)
+ s.True(r.Changed)
+ },
+ },
+ {
+ name: "when force upload same content still writes",
+ request: gen.PostFileRequestObject{
+ Params: gen.PostFileParams{Force: boolPtr(true)},
+ Body: makeMultipartReader("nginx.conf", "raw", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: uint64(len(fileContent)),
+ }, nil)
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile201JSONResponse)
+ s.True(ok)
+ s.True(r.Changed)
+ },
+ },
+ {
+ name: "when multipart read error returns 400",
+ request: gen.PostFileRequestObject{
+ Body: makeBrokenMultipartReader(),
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "failed to read multipart")
+ },
+ },
+ {
+ name: "validation error empty name",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("", "raw", fileContent),
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "name is required")
+ },
+ },
+ {
+ name: "validation error empty file",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("test.txt", "raw", nil),
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "file is required")
+ },
+ },
+ {
+ name: "validation error invalid content_type",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("test.txt", "invalid", fileContent),
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ r, ok := resp.(gen.PostFile400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "content_type must be raw or template")
+ },
+ },
+ {
+ name: "object store error",
+ request: gen.PostFileRequestObject{
+ Body: makeMultipartReader("nginx.conf", "raw", fileContent),
+ },
+ setupMock: func() {
+ s.mockObjStore.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+
+ s.mockObjStore.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+ },
+ validateFunc: func(resp gen.PostFileResponseObject) {
+ _, ok := resp.(gen.PostFile500JSONResponse)
+ s.True(ok)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupMock()
+
+ resp, err := s.handler.PostFile(s.ctx, tt.request)
+ s.NoError(err)
+ tt.validateFunc(resp)
+ })
+ }
+}
+
+func (s *FileUploadPublicTestSuite) TestPostFileValidationHTTP() {
+ fileContent := []byte("server { listen 80; }")
+
+ tests := []struct {
+ name string
+ path string
+ buildBody func() (*bytes.Buffer, string)
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when upload Ok",
+ buildBody: func() (*bytes.Buffer, string) {
+ return makeMultipartBody("nginx.conf", "raw", fileContent)
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+ mock.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: uint64(len(fileContent)),
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusCreated,
+ wantContains: []string{
+ `"name":"nginx.conf"`,
+ `"sha256"`,
+ `"size"`,
+ `"changed":true`,
+ `"content_type":"raw"`,
+ },
+ },
+ {
+ name: "when validation error",
+ buildBody: func() (*bytes.Buffer, string) {
+ return makeMultipartBody("", "raw", fileContent)
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"name is required"},
+ },
+ {
+ name: "when different content without force returns 409",
+ buildBody: func() (*bytes.Buffer, string) {
+ return makeMultipartBody("nginx.conf", "raw", fileContent)
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: 100,
+ Digest: "SHA-256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU=",
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusConflict,
+ wantContains: []string{"already exists with different content"},
+ },
+ {
+ name: "when force upload bypasses digest check",
+ path: "/file?force=true",
+ buildBody: func() (*bytes.Buffer, string) {
+ return makeMultipartBody("nginx.conf", "raw", fileContent)
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: uint64(len(fileContent)),
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusCreated,
+ wantContains: []string{`"changed":true`},
+ },
+ {
+ name: "when invalid force param returns 400",
+ path: "/file?force=notabool",
+ buildBody: func() (*bytes.Buffer, string) {
+ return makeMultipartBody("nginx.conf", "raw", fileContent)
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"Invalid format for parameter force"},
+ },
+ {
+ name: "when object store error",
+ buildBody: func() (*bytes.Buffer, string) {
+ return makeMultipartBody("nginx.conf", "raw", fileContent)
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+ mock.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ wantContains: []string{"failed to store file"},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ fileHandler := apifile.New(s.logger, objMock)
+ strictHandler := gen.NewStrictHandler(fileHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ body, ct := tc.buildBody()
+
+ path := tc.path
+ if path == "" {
+ path = "/file"
+ }
+
+ req := httptest.NewRequest(
+ http.MethodPost,
+ path,
+ body,
+ )
+ req.Header.Set("Content-Type", ct)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacUploadTestSigningKey = "test-signing-key-for-file-upload-rbac"
+
+func (s *FileUploadPublicTestSuite) TestPostFileRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+ fileContent := []byte("server { listen 80; }")
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupMock func() *mocks.MockObjectStoreManager
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacUploadTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"node:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ return mocks.NewMockObjectStoreManager(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with file:write returns 201",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacUploadTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ []string{"file:write"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupMock: func() *mocks.MockObjectStoreManager {
+ mock := mocks.NewMockObjectStoreManager(s.mockCtrl)
+ mock.EXPECT().
+ GetInfo(gomock.Any(), "nginx.conf").
+ Return(nil, assert.AnError)
+ mock.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(&jetstream.ObjectInfo{
+ ObjectMeta: jetstream.ObjectMeta{Name: "nginx.conf"},
+ Size: uint64(len(fileContent)),
+ }, nil)
+ return mock
+ },
+ wantCode: http.StatusCreated,
+ wantContains: []string{`"name":"nginx.conf"`, `"sha256"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ objMock := tc.setupMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacUploadTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetFileHandler(objMock)
+ server.RegisterHandlers(handlers)
+
+ body, ct := makeMultipartBody("nginx.conf", "raw", fileContent)
+ req := httptest.NewRequest(
+ http.MethodPost,
+ "/file",
+ body,
+ )
+ req.Header.Set("Content-Type", ct)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+func TestFileUploadPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FileUploadPublicTestSuite))
+}
diff --git a/internal/api/file/gen/api.yaml b/internal/api/file/gen/api.yaml
new file mode 100644
index 00000000..50637b35
--- /dev/null
+++ b/internal/api/file/gen/api.yaml
@@ -0,0 +1,413 @@
+# Copyright (c) 2026 John Dewey
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+---
+openapi: 3.0.0
+info:
+ title: File Management API
+ version: 1.0.0
+tags:
+ - name: file_operations
+ x-displayName: File
+ description: Object Store file management operations.
+
+paths:
+ /file:
+ post:
+ summary: Upload a file
+ description: Upload a file to the Object Store.
+ tags:
+ - file_operations
+ operationId: PostFile
+ security:
+ - BearerAuth:
+ - file:write
+ parameters:
+ - name: force
+ in: query
+ required: false
+ # x-oapi-codegen-extra-tags:
+ # validate: omitempty
+ # NOTE: The tags above are intentionally commented out. The
+ # only param is an optional bool — validate:"omitempty" can
+ # never fail, so validation.Struct(Params) would be dead
+ # code. The middleware already type-validates the bool before
+ # the handler runs. Uncomment if a future param needs a
+ # stricter validate tag.
+ description: >
+ When true, bypass the digest check and always write the
+ file. Returns changed=true regardless of whether the
+ content differs from the existing object.
+ schema:
+ type: boolean
+ default: false
+ requestBody:
+ description: The file to upload.
+ required: true
+ content:
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file in the Object Store.
+ example: "nginx.conf"
+ content_type:
+ type: string
+ description: >
+ How the file should be treated during deploy. "raw"
+ writes bytes as-is; "template" renders with Go
+ text/template and agent facts.
+ default: raw
+ enum:
+ - raw
+ - template
+ file:
+ type: string
+ format: binary
+ description: The file content.
+ required:
+ - name
+ - file
+ responses:
+ '201':
+ description: File uploaded successfully.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileUploadResponse'
+ '400':
+ description: Invalid request payload.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '409':
+ description: >
+ File already exists with different content. Use
+ ?force=true to overwrite.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Error uploading file.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ get:
+ summary: List stored files
+ description: List all files stored in the Object Store.
+ tags:
+ - file_operations
+ operationId: GetFiles
+ security:
+ - BearerAuth:
+ - file:read
+ responses:
+ '200':
+ description: List of stored files.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileListResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Error listing files.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ /file/{name}:
+ get:
+ summary: Get file metadata
+ description: Get metadata for a specific file in the Object Store.
+ tags:
+ - file_operations
+ operationId: GetFileByName
+ security:
+ - BearerAuth:
+ - file:read
+ parameters:
+ - $ref: '#/components/parameters/FileName'
+ responses:
+ '200':
+ description: File metadata.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileInfoResponse'
+ '400':
+ description: Invalid file name.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '404':
+ description: File not found.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Error retrieving file metadata.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ delete:
+ summary: Delete a file
+ description: Delete a file from the Object Store.
+ tags:
+ - file_operations
+ operationId: DeleteFileByName
+ security:
+ - BearerAuth:
+ - file:write
+ parameters:
+ - $ref: '#/components/parameters/FileName'
+ responses:
+ '200':
+ description: File deleted successfully.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeleteResponse'
+ '400':
+ description: Invalid file name.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '404':
+ description: File not found.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Error deleting file.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+# -- Reusable components -----------------------------------------------------
+
+components:
+ parameters:
+ FileName:
+ name: name
+ in: path
+ required: true
+ description: >
+ The name of the file in the Object Store.
+ # NOTE: x-oapi-codegen-extra-tags on path params do not generate
+ # validate tags in strict-server mode. Validation is handled
+ # manually in the handler.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+ schema:
+ type: string
+ minLength: 1
+ maxLength: 255
+
+ securitySchemes:
+ BearerAuth:
+ type: http
+ scheme: bearer
+ bearerFormat: JWT
+
+ schemas:
+ ErrorResponse:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ # -- Response schemas -----------------------------------------------------
+
+ FileInfo:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file.
+ example: "nginx.conf"
+ sha256:
+ type: string
+ description: SHA-256 hash of the file content.
+ example: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
+ size:
+ type: integer
+ description: File size in bytes.
+ example: 1024
+ content_type:
+ type: string
+ description: >
+ How the file should be treated during deploy (raw or
+ template).
+ example: "raw"
+ required:
+ - name
+ - sha256
+ - size
+ - content_type
+
+ FileUploadResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the uploaded file.
+ example: "nginx.conf"
+ sha256:
+ type: string
+ description: SHA-256 hash of the file content.
+ example: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
+ size:
+ type: integer
+ description: File size in bytes.
+ example: 1024
+ changed:
+ type: boolean
+ description: >
+ Whether the file content changed. False when the Object
+ Store already held an object with the same SHA-256 digest.
+ example: true
+ content_type:
+ type: string
+ description: >
+ How the file should be treated during deploy (raw or
+ template).
+ example: "raw"
+ required:
+ - name
+ - sha256
+ - size
+ - changed
+ - content_type
+
+ FileListResponse:
+ type: object
+ properties:
+ files:
+ type: array
+ items:
+ $ref: '#/components/schemas/FileInfo'
+ description: List of stored files.
+ total:
+ type: integer
+ description: Total number of files.
+ example: 5
+ required:
+ - files
+ - total
+
+ FileInfoResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file.
+ example: "nginx.conf"
+ sha256:
+ type: string
+ description: SHA-256 hash of the file content.
+ example: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
+ size:
+ type: integer
+ description: File size in bytes.
+ example: 1024
+ content_type:
+ type: string
+ description: >
+ How the file should be treated during deploy (raw or
+ template).
+ example: "raw"
+ required:
+ - name
+ - sha256
+ - size
+ - content_type
+
+ FileDeleteResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the deleted file.
+ example: "nginx.conf"
+ deleted:
+ type: boolean
+ description: Whether the file was deleted.
+ example: true
+ required:
+ - name
+ - deleted
diff --git a/internal/api/file/gen/cfg.yaml b/internal/api/file/gen/cfg.yaml
new file mode 100644
index 00000000..31b924dd
--- /dev/null
+++ b/internal/api/file/gen/cfg.yaml
@@ -0,0 +1,32 @@
+# Copyright (c) 2026 John Dewey
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+
+---
+package: gen
+output: file.gen.go
+generate:
+ models: true
+ echo-server: true
+ strict-server: true
+import-mapping:
+ ../../common/gen/api.yaml: github.com/retr0h/osapi/internal/api/common/gen
+output-options:
+ # to make sure that all types are generated
+ skip-prune: true
diff --git a/internal/api/file/gen/file.gen.go b/internal/api/file/gen/file.gen.go
new file mode 100644
index 00000000..0377146e
--- /dev/null
+++ b/internal/api/file/gen/file.gen.go
@@ -0,0 +1,609 @@
+// Package gen provides primitives to interact with the openapi HTTP API.
+//
+// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.5.1 DO NOT EDIT.
+package gen
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "mime/multipart"
+ "net/http"
+
+ "github.com/labstack/echo/v4"
+ "github.com/oapi-codegen/runtime"
+ strictecho "github.com/oapi-codegen/runtime/strictmiddleware/echo"
+ openapi_types "github.com/oapi-codegen/runtime/types"
+ externalRef0 "github.com/retr0h/osapi/internal/api/common/gen"
+)
+
+const (
+ BearerAuthScopes = "BearerAuth.Scopes"
+)
+
+// Defines values for PostFileMultipartBodyContentType.
+const (
+ Raw PostFileMultipartBodyContentType = "raw"
+ Template PostFileMultipartBodyContentType = "template"
+)
+
+// ErrorResponse defines model for ErrorResponse.
+type ErrorResponse = externalRef0.ErrorResponse
+
+// FileDeleteResponse defines model for FileDeleteResponse.
+type FileDeleteResponse struct {
+ // Deleted Whether the file was deleted.
+ Deleted bool `json:"deleted"`
+
+ // Name The name of the deleted file.
+ Name string `json:"name"`
+}
+
+// FileInfo defines model for FileInfo.
+type FileInfo struct {
+ // ContentType How the file should be treated during deploy (raw or template).
+ ContentType string `json:"content_type"`
+
+ // Name The name of the file.
+ Name string `json:"name"`
+
+ // Sha256 SHA-256 hash of the file content.
+ Sha256 string `json:"sha256"`
+
+ // Size File size in bytes.
+ Size int `json:"size"`
+}
+
+// FileInfoResponse defines model for FileInfoResponse.
+type FileInfoResponse struct {
+ // ContentType How the file should be treated during deploy (raw or template).
+ ContentType string `json:"content_type"`
+
+ // Name The name of the file.
+ Name string `json:"name"`
+
+ // Sha256 SHA-256 hash of the file content.
+ Sha256 string `json:"sha256"`
+
+ // Size File size in bytes.
+ Size int `json:"size"`
+}
+
+// FileListResponse defines model for FileListResponse.
+type FileListResponse struct {
+ // Files List of stored files.
+ Files []FileInfo `json:"files"`
+
+ // Total Total number of files.
+ Total int `json:"total"`
+}
+
+// FileUploadResponse defines model for FileUploadResponse.
+type FileUploadResponse struct {
+ // Changed Whether the file content changed. False when the Object Store already held an object with the same SHA-256 digest.
+ Changed bool `json:"changed"`
+
+ // ContentType How the file should be treated during deploy (raw or template).
+ ContentType string `json:"content_type"`
+
+ // Name The name of the uploaded file.
+ Name string `json:"name"`
+
+ // Sha256 SHA-256 hash of the file content.
+ Sha256 string `json:"sha256"`
+
+ // Size File size in bytes.
+ Size int `json:"size"`
+}
+
+// FileName defines model for FileName.
+type FileName = string
+
+// PostFileMultipartBody defines parameters for PostFile.
+type PostFileMultipartBody struct {
+ // ContentType How the file should be treated during deploy. "raw" writes bytes as-is; "template" renders with Go text/template and agent facts.
+ ContentType *PostFileMultipartBodyContentType `json:"content_type,omitempty"`
+
+ // File The file content.
+ File openapi_types.File `json:"file"`
+
+ // Name The name of the file in the Object Store.
+ Name string `json:"name"`
+}
+
+// PostFileParams defines parameters for PostFile.
+type PostFileParams struct {
+ // Force When true, bypass the digest check and always write the file. Returns changed=true regardless of whether the content differs from the existing object.
+ Force *bool `form:"force,omitempty" json:"force,omitempty"`
+}
+
+// PostFileMultipartBodyContentType defines parameters for PostFile.
+type PostFileMultipartBodyContentType string
+
+// PostFileMultipartRequestBody defines body for PostFile for multipart/form-data ContentType.
+type PostFileMultipartRequestBody PostFileMultipartBody
+
+// ServerInterface represents all server handlers.
+type ServerInterface interface {
+ // List stored files
+ // (GET /file)
+ GetFiles(ctx echo.Context) error
+ // Upload a file
+ // (POST /file)
+ PostFile(ctx echo.Context, params PostFileParams) error
+ // Delete a file
+ // (DELETE /file/{name})
+ DeleteFileByName(ctx echo.Context, name FileName) error
+ // Get file metadata
+ // (GET /file/{name})
+ GetFileByName(ctx echo.Context, name FileName) error
+}
+
+// ServerInterfaceWrapper converts echo contexts to parameters.
+type ServerInterfaceWrapper struct {
+ Handler ServerInterface
+}
+
+// GetFiles converts echo context to params.
+func (w *ServerInterfaceWrapper) GetFiles(ctx echo.Context) error {
+ var err error
+
+ ctx.Set(BearerAuthScopes, []string{"file:read"})
+
+ // Invoke the callback with all the unmarshaled arguments
+ err = w.Handler.GetFiles(ctx)
+ return err
+}
+
+// PostFile converts echo context to params.
+func (w *ServerInterfaceWrapper) PostFile(ctx echo.Context) error {
+ var err error
+
+ ctx.Set(BearerAuthScopes, []string{"file:write"})
+
+ // Parameter object where we will unmarshal all parameters from the context
+ var params PostFileParams
+ // ------------- Optional query parameter "force" -------------
+
+ err = runtime.BindQueryParameter("form", true, false, "force", ctx.QueryParams(), ¶ms.Force)
+ if err != nil {
+ return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter force: %s", err))
+ }
+
+ // Invoke the callback with all the unmarshaled arguments
+ err = w.Handler.PostFile(ctx, params)
+ return err
+}
+
+// DeleteFileByName converts echo context to params.
+func (w *ServerInterfaceWrapper) DeleteFileByName(ctx echo.Context) error {
+ var err error
+ // ------------- Path parameter "name" -------------
+ var name FileName
+
+ err = runtime.BindStyledParameterWithOptions("simple", "name", ctx.Param("name"), &name, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true})
+ if err != nil {
+ return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter name: %s", err))
+ }
+
+ ctx.Set(BearerAuthScopes, []string{"file:write"})
+
+ // Invoke the callback with all the unmarshaled arguments
+ err = w.Handler.DeleteFileByName(ctx, name)
+ return err
+}
+
+// GetFileByName converts echo context to params.
+func (w *ServerInterfaceWrapper) GetFileByName(ctx echo.Context) error {
+ var err error
+ // ------------- Path parameter "name" -------------
+ var name FileName
+
+ err = runtime.BindStyledParameterWithOptions("simple", "name", ctx.Param("name"), &name, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true})
+ if err != nil {
+ return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter name: %s", err))
+ }
+
+ ctx.Set(BearerAuthScopes, []string{"file:read"})
+
+ // Invoke the callback with all the unmarshaled arguments
+ err = w.Handler.GetFileByName(ctx, name)
+ return err
+}
+
+// This is a simple interface which specifies echo.Route addition functions which
+// are present on both echo.Echo and echo.Group, since we want to allow using
+// either of them for path registration
+type EchoRouter interface {
+ CONNECT(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ DELETE(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ GET(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ HEAD(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ OPTIONS(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ PATCH(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ POST(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ PUT(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+ TRACE(path string, h echo.HandlerFunc, m ...echo.MiddlewareFunc) *echo.Route
+}
+
+// RegisterHandlers adds each server route to the EchoRouter.
+func RegisterHandlers(router EchoRouter, si ServerInterface) {
+ RegisterHandlersWithBaseURL(router, si, "")
+}
+
+// Registers handlers, and prepends BaseURL to the paths, so that the paths
+// can be served under a prefix.
+func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {
+
+ wrapper := ServerInterfaceWrapper{
+ Handler: si,
+ }
+
+ router.GET(baseURL+"/file", wrapper.GetFiles)
+ router.POST(baseURL+"/file", wrapper.PostFile)
+ router.DELETE(baseURL+"/file/:name", wrapper.DeleteFileByName)
+ router.GET(baseURL+"/file/:name", wrapper.GetFileByName)
+
+}
+
+type GetFilesRequestObject struct {
+}
+
+type GetFilesResponseObject interface {
+ VisitGetFilesResponse(w http.ResponseWriter) error
+}
+
+type GetFiles200JSONResponse FileListResponse
+
+func (response GetFiles200JSONResponse) VisitGetFilesResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(200)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFiles401JSONResponse externalRef0.ErrorResponse
+
+func (response GetFiles401JSONResponse) VisitGetFilesResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(401)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFiles403JSONResponse externalRef0.ErrorResponse
+
+func (response GetFiles403JSONResponse) VisitGetFilesResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(403)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFiles500JSONResponse externalRef0.ErrorResponse
+
+func (response GetFiles500JSONResponse) VisitGetFilesResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(500)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostFileRequestObject struct {
+ Params PostFileParams
+ Body *multipart.Reader
+}
+
+type PostFileResponseObject interface {
+ VisitPostFileResponse(w http.ResponseWriter) error
+}
+
+type PostFile201JSONResponse FileUploadResponse
+
+func (response PostFile201JSONResponse) VisitPostFileResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(201)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostFile400JSONResponse externalRef0.ErrorResponse
+
+func (response PostFile400JSONResponse) VisitPostFileResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(400)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostFile401JSONResponse externalRef0.ErrorResponse
+
+func (response PostFile401JSONResponse) VisitPostFileResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(401)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostFile403JSONResponse externalRef0.ErrorResponse
+
+func (response PostFile403JSONResponse) VisitPostFileResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(403)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostFile409JSONResponse externalRef0.ErrorResponse
+
+func (response PostFile409JSONResponse) VisitPostFileResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(409)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostFile500JSONResponse externalRef0.ErrorResponse
+
+func (response PostFile500JSONResponse) VisitPostFileResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(500)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type DeleteFileByNameRequestObject struct {
+ Name FileName `json:"name"`
+}
+
+type DeleteFileByNameResponseObject interface {
+ VisitDeleteFileByNameResponse(w http.ResponseWriter) error
+}
+
+type DeleteFileByName200JSONResponse FileDeleteResponse
+
+func (response DeleteFileByName200JSONResponse) VisitDeleteFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(200)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type DeleteFileByName400JSONResponse externalRef0.ErrorResponse
+
+func (response DeleteFileByName400JSONResponse) VisitDeleteFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(400)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type DeleteFileByName401JSONResponse externalRef0.ErrorResponse
+
+func (response DeleteFileByName401JSONResponse) VisitDeleteFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(401)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type DeleteFileByName403JSONResponse externalRef0.ErrorResponse
+
+func (response DeleteFileByName403JSONResponse) VisitDeleteFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(403)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type DeleteFileByName404JSONResponse externalRef0.ErrorResponse
+
+func (response DeleteFileByName404JSONResponse) VisitDeleteFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(404)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type DeleteFileByName500JSONResponse externalRef0.ErrorResponse
+
+func (response DeleteFileByName500JSONResponse) VisitDeleteFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(500)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFileByNameRequestObject struct {
+ Name FileName `json:"name"`
+}
+
+type GetFileByNameResponseObject interface {
+ VisitGetFileByNameResponse(w http.ResponseWriter) error
+}
+
+type GetFileByName200JSONResponse FileInfoResponse
+
+func (response GetFileByName200JSONResponse) VisitGetFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(200)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFileByName400JSONResponse externalRef0.ErrorResponse
+
+func (response GetFileByName400JSONResponse) VisitGetFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(400)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFileByName401JSONResponse externalRef0.ErrorResponse
+
+func (response GetFileByName401JSONResponse) VisitGetFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(401)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFileByName403JSONResponse externalRef0.ErrorResponse
+
+func (response GetFileByName403JSONResponse) VisitGetFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(403)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFileByName404JSONResponse externalRef0.ErrorResponse
+
+func (response GetFileByName404JSONResponse) VisitGetFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(404)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type GetFileByName500JSONResponse externalRef0.ErrorResponse
+
+func (response GetFileByName500JSONResponse) VisitGetFileByNameResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(500)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+// StrictServerInterface represents all server handlers.
+type StrictServerInterface interface {
+ // List stored files
+ // (GET /file)
+ GetFiles(ctx context.Context, request GetFilesRequestObject) (GetFilesResponseObject, error)
+ // Upload a file
+ // (POST /file)
+ PostFile(ctx context.Context, request PostFileRequestObject) (PostFileResponseObject, error)
+ // Delete a file
+ // (DELETE /file/{name})
+ DeleteFileByName(ctx context.Context, request DeleteFileByNameRequestObject) (DeleteFileByNameResponseObject, error)
+ // Get file metadata
+ // (GET /file/{name})
+ GetFileByName(ctx context.Context, request GetFileByNameRequestObject) (GetFileByNameResponseObject, error)
+}
+
+type StrictHandlerFunc = strictecho.StrictEchoHandlerFunc
+type StrictMiddlewareFunc = strictecho.StrictEchoMiddlewareFunc
+
+func NewStrictHandler(ssi StrictServerInterface, middlewares []StrictMiddlewareFunc) ServerInterface {
+ return &strictHandler{ssi: ssi, middlewares: middlewares}
+}
+
+type strictHandler struct {
+ ssi StrictServerInterface
+ middlewares []StrictMiddlewareFunc
+}
+
+// GetFiles operation middleware
+func (sh *strictHandler) GetFiles(ctx echo.Context) error {
+ var request GetFilesRequestObject
+
+ handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
+ return sh.ssi.GetFiles(ctx.Request().Context(), request.(GetFilesRequestObject))
+ }
+ for _, middleware := range sh.middlewares {
+ handler = middleware(handler, "GetFiles")
+ }
+
+ response, err := handler(ctx, request)
+
+ if err != nil {
+ return err
+ } else if validResponse, ok := response.(GetFilesResponseObject); ok {
+ return validResponse.VisitGetFilesResponse(ctx.Response())
+ } else if response != nil {
+ return fmt.Errorf("unexpected response type: %T", response)
+ }
+ return nil
+}
+
+// PostFile operation middleware
+func (sh *strictHandler) PostFile(ctx echo.Context, params PostFileParams) error {
+ var request PostFileRequestObject
+
+ request.Params = params
+
+ if reader, err := ctx.Request().MultipartReader(); err != nil {
+ return err
+ } else {
+ request.Body = reader
+ }
+
+ handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
+ return sh.ssi.PostFile(ctx.Request().Context(), request.(PostFileRequestObject))
+ }
+ for _, middleware := range sh.middlewares {
+ handler = middleware(handler, "PostFile")
+ }
+
+ response, err := handler(ctx, request)
+
+ if err != nil {
+ return err
+ } else if validResponse, ok := response.(PostFileResponseObject); ok {
+ return validResponse.VisitPostFileResponse(ctx.Response())
+ } else if response != nil {
+ return fmt.Errorf("unexpected response type: %T", response)
+ }
+ return nil
+}
+
+// DeleteFileByName operation middleware
+func (sh *strictHandler) DeleteFileByName(ctx echo.Context, name FileName) error {
+ var request DeleteFileByNameRequestObject
+
+ request.Name = name
+
+ handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
+ return sh.ssi.DeleteFileByName(ctx.Request().Context(), request.(DeleteFileByNameRequestObject))
+ }
+ for _, middleware := range sh.middlewares {
+ handler = middleware(handler, "DeleteFileByName")
+ }
+
+ response, err := handler(ctx, request)
+
+ if err != nil {
+ return err
+ } else if validResponse, ok := response.(DeleteFileByNameResponseObject); ok {
+ return validResponse.VisitDeleteFileByNameResponse(ctx.Response())
+ } else if response != nil {
+ return fmt.Errorf("unexpected response type: %T", response)
+ }
+ return nil
+}
+
+// GetFileByName operation middleware
+func (sh *strictHandler) GetFileByName(ctx echo.Context, name FileName) error {
+ var request GetFileByNameRequestObject
+
+ request.Name = name
+
+ handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
+ return sh.ssi.GetFileByName(ctx.Request().Context(), request.(GetFileByNameRequestObject))
+ }
+ for _, middleware := range sh.middlewares {
+ handler = middleware(handler, "GetFileByName")
+ }
+
+ response, err := handler(ctx, request)
+
+ if err != nil {
+ return err
+ } else if validResponse, ok := response.(GetFileByNameResponseObject); ok {
+ return validResponse.VisitGetFileByNameResponse(ctx.Response())
+ } else if response != nil {
+ return fmt.Errorf("unexpected response type: %T", response)
+ }
+ return nil
+}
diff --git a/internal/api/file/gen/generate.go b/internal/api/file/gen/generate.go
new file mode 100644
index 00000000..979a6cfe
--- /dev/null
+++ b/internal/api/file/gen/generate.go
@@ -0,0 +1,24 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+// Package gen contains generated code for the file API.
+package gen
+
+//go:generate go tool github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen -config cfg.yaml api.yaml
diff --git a/internal/api/file/mocks/generate.go b/internal/api/file/mocks/generate.go
new file mode 100644
index 00000000..fb0a0384
--- /dev/null
+++ b/internal/api/file/mocks/generate.go
@@ -0,0 +1,24 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+// Package mocks provides mock implementations for testing.
+package mocks
+
+//go:generate go tool github.com/golang/mock/mockgen -source=../types.go -destination=types.gen.go -package=mocks
diff --git a/internal/api/file/mocks/mocks.go b/internal/api/file/mocks/mocks.go
new file mode 100644
index 00000000..f715ff1a
--- /dev/null
+++ b/internal/api/file/mocks/mocks.go
@@ -0,0 +1,32 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package mocks
+
+import (
+ "github.com/golang/mock/gomock"
+)
+
+// NewPlainMockObjectStoreManager creates a Mock without defaults.
+func NewPlainMockObjectStoreManager(
+ ctrl *gomock.Controller,
+) *MockObjectStoreManager {
+ return NewMockObjectStoreManager(ctrl)
+}
diff --git a/internal/api/file/mocks/types.gen.go b/internal/api/file/mocks/types.gen.go
new file mode 100644
index 00000000..c69393bb
--- /dev/null
+++ b/internal/api/file/mocks/types.gen.go
@@ -0,0 +1,141 @@
+// Code generated by MockGen. DO NOT EDIT.
+// Source: ../types.go
+
+// Package mocks is a generated GoMock package.
+package mocks
+
+import (
+ context "context"
+ io "io"
+ reflect "reflect"
+
+ gomock "github.com/golang/mock/gomock"
+ jetstream "github.com/nats-io/nats.go/jetstream"
+)
+
+// MockObjectStoreManager is a mock of ObjectStoreManager interface.
+type MockObjectStoreManager struct {
+ ctrl *gomock.Controller
+ recorder *MockObjectStoreManagerMockRecorder
+}
+
+// MockObjectStoreManagerMockRecorder is the mock recorder for MockObjectStoreManager.
+type MockObjectStoreManagerMockRecorder struct {
+ mock *MockObjectStoreManager
+}
+
+// NewMockObjectStoreManager creates a new mock instance.
+func NewMockObjectStoreManager(ctrl *gomock.Controller) *MockObjectStoreManager {
+ mock := &MockObjectStoreManager{ctrl: ctrl}
+ mock.recorder = &MockObjectStoreManagerMockRecorder{mock}
+ return mock
+}
+
+// EXPECT returns an object that allows the caller to indicate expected use.
+func (m *MockObjectStoreManager) EXPECT() *MockObjectStoreManagerMockRecorder {
+ return m.recorder
+}
+
+// Delete mocks base method.
+func (m *MockObjectStoreManager) Delete(ctx context.Context, name string) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Delete", ctx, name)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// Delete indicates an expected call of Delete.
+func (mr *MockObjectStoreManagerMockRecorder) Delete(ctx, name interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockObjectStoreManager)(nil).Delete), ctx, name)
+}
+
+// GetBytes mocks base method.
+func (m *MockObjectStoreManager) GetBytes(ctx context.Context, name string, opts ...jetstream.GetObjectOpt) ([]byte, error) {
+ m.ctrl.T.Helper()
+ varargs := []interface{}{ctx, name}
+ for _, a := range opts {
+ varargs = append(varargs, a)
+ }
+ ret := m.ctrl.Call(m, "GetBytes", varargs...)
+ ret0, _ := ret[0].([]byte)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetBytes indicates an expected call of GetBytes.
+func (mr *MockObjectStoreManagerMockRecorder) GetBytes(ctx, name interface{}, opts ...interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ varargs := append([]interface{}{ctx, name}, opts...)
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBytes", reflect.TypeOf((*MockObjectStoreManager)(nil).GetBytes), varargs...)
+}
+
+// GetInfo mocks base method.
+func (m *MockObjectStoreManager) GetInfo(ctx context.Context, name string, opts ...jetstream.GetObjectInfoOpt) (*jetstream.ObjectInfo, error) {
+ m.ctrl.T.Helper()
+ varargs := []interface{}{ctx, name}
+ for _, a := range opts {
+ varargs = append(varargs, a)
+ }
+ ret := m.ctrl.Call(m, "GetInfo", varargs...)
+ ret0, _ := ret[0].(*jetstream.ObjectInfo)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetInfo indicates an expected call of GetInfo.
+func (mr *MockObjectStoreManagerMockRecorder) GetInfo(ctx, name interface{}, opts ...interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ varargs := append([]interface{}{ctx, name}, opts...)
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetInfo", reflect.TypeOf((*MockObjectStoreManager)(nil).GetInfo), varargs...)
+}
+
+// List mocks base method.
+func (m *MockObjectStoreManager) List(ctx context.Context, opts ...jetstream.ListObjectsOpt) ([]*jetstream.ObjectInfo, error) {
+ m.ctrl.T.Helper()
+ varargs := []interface{}{ctx}
+ for _, a := range opts {
+ varargs = append(varargs, a)
+ }
+ ret := m.ctrl.Call(m, "List", varargs...)
+ ret0, _ := ret[0].([]*jetstream.ObjectInfo)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// List indicates an expected call of List.
+func (mr *MockObjectStoreManagerMockRecorder) List(ctx interface{}, opts ...interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ varargs := append([]interface{}{ctx}, opts...)
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockObjectStoreManager)(nil).List), varargs...)
+}
+
+// Put mocks base method.
+func (m *MockObjectStoreManager) Put(ctx context.Context, meta jetstream.ObjectMeta, reader io.Reader) (*jetstream.ObjectInfo, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Put", ctx, meta, reader)
+ ret0, _ := ret[0].(*jetstream.ObjectInfo)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// Put indicates an expected call of Put.
+func (mr *MockObjectStoreManagerMockRecorder) Put(ctx, meta, reader interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Put", reflect.TypeOf((*MockObjectStoreManager)(nil).Put), ctx, meta, reader)
+}
+
+// PutBytes mocks base method.
+func (m *MockObjectStoreManager) PutBytes(ctx context.Context, name string, data []byte) (*jetstream.ObjectInfo, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "PutBytes", ctx, name, data)
+ ret0, _ := ret[0].(*jetstream.ObjectInfo)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// PutBytes indicates an expected call of PutBytes.
+func (mr *MockObjectStoreManagerMockRecorder) PutBytes(ctx, name, data interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PutBytes", reflect.TypeOf((*MockObjectStoreManager)(nil).PutBytes), ctx, name, data)
+}
diff --git a/internal/api/file/types.go b/internal/api/file/types.go
new file mode 100644
index 00000000..69bbb3fd
--- /dev/null
+++ b/internal/api/file/types.go
@@ -0,0 +1,66 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "io"
+ "log/slog"
+
+ "github.com/nats-io/nats.go/jetstream"
+)
+
+// ObjectStoreManager wraps the subset of jetstream.ObjectStore methods
+// needed by the file API handlers. This minimal interface enables
+// straightforward mocking in tests.
+type ObjectStoreManager interface {
+ // Put stores data from a reader under the given metadata.
+ Put(
+ ctx context.Context,
+ meta jetstream.ObjectMeta,
+ reader io.Reader,
+ ) (*jetstream.ObjectInfo, error)
+
+ // PutBytes stores data under the given name.
+ PutBytes(ctx context.Context, name string, data []byte) (*jetstream.ObjectInfo, error)
+
+ // GetBytes retrieves the content stored under the given name.
+ GetBytes(ctx context.Context, name string, opts ...jetstream.GetObjectOpt) ([]byte, error)
+
+ // GetInfo retrieves metadata for the named object.
+ GetInfo(
+ ctx context.Context,
+ name string,
+ opts ...jetstream.GetObjectInfoOpt,
+ ) (*jetstream.ObjectInfo, error)
+
+ // Delete removes the named object.
+ Delete(ctx context.Context, name string) error
+
+ // List returns metadata for all objects in the store.
+ List(ctx context.Context, opts ...jetstream.ListObjectsOpt) ([]*jetstream.ObjectInfo, error)
+}
+
+// File implementation of the File APIs operations.
+type File struct {
+ objStore ObjectStoreManager
+ logger *slog.Logger
+}
diff --git a/internal/api/file/validate.go b/internal/api/file/validate.go
new file mode 100644
index 00000000..c21eb2ba
--- /dev/null
+++ b/internal/api/file/validate.go
@@ -0,0 +1,34 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import "github.com/retr0h/osapi/internal/validation"
+
+// validateFileName validates a file name path parameter using the shared
+// validator. Returns the error message and false if invalid.
+//
+// This exists because oapi-codegen does not generate validate tags on
+// path parameters in strict-server mode (upstream limitation).
+func validateFileName(
+ name string,
+) (string, bool) {
+ return validation.Var(name, "required,min=1,max=255")
+}
diff --git a/internal/api/file/validate_test.go b/internal/api/file/validate_test.go
new file mode 100644
index 00000000..321a4f6e
--- /dev/null
+++ b/internal/api/file/validate_test.go
@@ -0,0 +1,86 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/suite"
+)
+
+type ValidateTestSuite struct {
+ suite.Suite
+}
+
+func (suite *ValidateTestSuite) TestValidateFileName() {
+ tests := []struct {
+ name string
+ input string
+ valid bool
+ }{
+ {
+ name: "when valid name",
+ input: "nginx.conf",
+ valid: true,
+ },
+ {
+ name: "when valid name with path chars",
+ input: "app.conf.tmpl",
+ valid: true,
+ },
+ {
+ name: "when empty name",
+ input: "",
+ valid: false,
+ },
+ {
+ name: "when name exceeds 255 chars",
+ input: strings.Repeat("a", 256),
+ valid: false,
+ },
+ {
+ name: "when name at max 255 chars",
+ input: strings.Repeat("a", 255),
+ valid: true,
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ errMsg, ok := validateFileName(tc.input)
+
+ if tc.valid {
+ suite.True(ok)
+ suite.Empty(errMsg)
+ } else {
+ suite.False(ok)
+ suite.NotEmpty(errMsg)
+ }
+ })
+ }
+}
+
+// In order for `go test` to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run.
+func TestValidateTestSuite(t *testing.T) {
+ suite.Run(t, new(ValidateTestSuite))
+}
diff --git a/internal/api/gen/api.yaml b/internal/api/gen/api.yaml
index 8c0b59db..c51c0678 100644
--- a/internal/api/gen/api.yaml
+++ b/internal/api/gen/api.yaml
@@ -12,6 +12,9 @@ tags:
- name: OSAPI_-_A_CRUD_API_for_managing_Linux_systems_info
x-displayName: Info
description: Operations related to the info endpoint.
+ - name: File_Management_API_file_operations
+ x-displayName: File
+ description: Object Store file management operations.
- name: Health_Check_API_health
x-displayName: Health
description: Health check endpoints for liveness, readiness, and detailed status.
@@ -394,6 +397,228 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/ErrorResponse'
+ /file:
+ servers: []
+ post:
+ summary: Upload a file
+ description: Upload a file to the Object Store.
+ tags:
+ - File_Management_API_file_operations
+ operationId: PostFile
+ security:
+ - BearerAuth:
+ - file:write
+ parameters:
+ - name: force
+ in: query
+ required: false
+ description: >
+ When true, bypass the digest check and always write the file.
+ Returns changed=true regardless of whether the content differs from
+ the existing object.
+ schema:
+ type: boolean
+ default: false
+ requestBody:
+ description: The file to upload.
+ required: true
+ content:
+ multipart/form-data:
+ schema:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file in the Object Store.
+ example: nginx.conf
+ content_type:
+ type: string
+ description: >
+ How the file should be treated during deploy. "raw" writes
+ bytes as-is; "template" renders with Go text/template and
+ agent facts.
+ default: raw
+ enum:
+ - raw
+ - template
+ file:
+ type: string
+ format: binary
+ description: The file content.
+ required:
+ - name
+ - file
+ responses:
+ '201':
+ description: File uploaded successfully.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileUploadResponse'
+ '400':
+ description: Invalid request payload.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '409':
+ description: >
+ File already exists with different content. Use ?force=true to
+ overwrite.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '500':
+ description: Error uploading file.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ get:
+ summary: List stored files
+ description: List all files stored in the Object Store.
+ tags:
+ - File_Management_API_file_operations
+ operationId: GetFiles
+ security:
+ - BearerAuth:
+ - file:read
+ responses:
+ '200':
+ description: List of stored files.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileListResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '500':
+ description: Error listing files.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ /file/{name}:
+ servers: []
+ get:
+ summary: Get file metadata
+ description: Get metadata for a specific file in the Object Store.
+ tags:
+ - File_Management_API_file_operations
+ operationId: GetFileByName
+ security:
+ - BearerAuth:
+ - file:read
+ parameters:
+ - $ref: '#/components/parameters/FileName'
+ responses:
+ '200':
+ description: File metadata.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileInfoResponse'
+ '400':
+ description: Invalid file name.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '404':
+ description: File not found.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '500':
+ description: Error retrieving file metadata.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ delete:
+ summary: Delete a file
+ description: Delete a file from the Object Store.
+ tags:
+ - File_Management_API_file_operations
+ operationId: DeleteFileByName
+ security:
+ - BearerAuth:
+ - file:write
+ parameters:
+ - $ref: '#/components/parameters/FileName'
+ responses:
+ '200':
+ description: File deleted successfully.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeleteResponse'
+ '400':
+ description: Invalid file name.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized - API key required
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden - Insufficient permissions
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '404':
+ description: File not found.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '500':
+ description: Error deleting file.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
/health:
servers: []
get:
@@ -1284,6 +1509,104 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/ErrorResponse'
+ /node/{hostname}/file/deploy:
+ servers: []
+ post:
+ operationId: PostNodeFileDeploy
+ summary: Deploy a file from Object Store to the host
+ tags:
+ - Node_Management_API_node_operations
+ security:
+ - BearerAuth:
+ - file:write
+ parameters:
+ - $ref: '#/components/parameters/Hostname'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeployRequest'
+ responses:
+ '202':
+ description: File deploy job accepted.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeployResponse'
+ '400':
+ description: Invalid input.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal error.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ /node/{hostname}/file/status:
+ servers: []
+ post:
+ operationId: PostNodeFileStatus
+ summary: Check deployment status of a file on the host
+ tags:
+ - Node_Management_API_node_operations
+ security:
+ - BearerAuth:
+ - file:read
+ parameters:
+ - $ref: '#/components/parameters/Hostname'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileStatusRequest'
+ responses:
+ '200':
+ description: File status.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileStatusResponse'
+ '400':
+ description: Invalid input.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal error.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorResponse'
/node/{hostname}/command/exec:
servers: []
post:
@@ -1750,6 +2073,117 @@ components:
required:
- total_items
- items
+ FileInfo:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file.
+ example: nginx.conf
+ sha256:
+ type: string
+ description: SHA-256 hash of the file content.
+ example: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+ size:
+ type: integer
+ description: File size in bytes.
+ example: 1024
+ content_type:
+ type: string
+ description: |
+ How the file should be treated during deploy (raw or template).
+ example: raw
+ required:
+ - name
+ - sha256
+ - size
+ - content_type
+ FileUploadResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the uploaded file.
+ example: nginx.conf
+ sha256:
+ type: string
+ description: SHA-256 hash of the file content.
+ example: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+ size:
+ type: integer
+ description: File size in bytes.
+ example: 1024
+ changed:
+ type: boolean
+ description: >
+ Whether the file content changed. False when the Object Store
+ already held an object with the same SHA-256 digest.
+ example: true
+ content_type:
+ type: string
+ description: |
+ How the file should be treated during deploy (raw or template).
+ example: raw
+ required:
+ - name
+ - sha256
+ - size
+ - changed
+ - content_type
+ FileListResponse:
+ type: object
+ properties:
+ files:
+ type: array
+ items:
+ $ref: '#/components/schemas/FileInfo'
+ description: List of stored files.
+ total:
+ type: integer
+ description: Total number of files.
+ example: 5
+ required:
+ - files
+ - total
+ FileInfoResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the file.
+ example: nginx.conf
+ sha256:
+ type: string
+ description: SHA-256 hash of the file content.
+ example: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+ size:
+ type: integer
+ description: File size in bytes.
+ example: 1024
+ content_type:
+ type: string
+ description: |
+ How the file should be treated during deploy (raw or template).
+ example: raw
+ required:
+ - name
+ - sha256
+ - size
+ - content_type
+ FileDeleteResponse:
+ type: object
+ properties:
+ name:
+ type: string
+ description: The name of the deleted file.
+ example: nginx.conf
+ deleted:
+ type: boolean
+ description: Whether the file was deleted.
+ example: true
+ required:
+ - name
+ - deleted
HealthResponse:
type: object
properties:
@@ -2539,6 +2973,93 @@ components:
Accepts alphanumeric names or @fact. references.
required:
- interface_name
+ FileDeployRequest:
+ type: object
+ properties:
+ object_name:
+ type: string
+ description: Name of the file in the Object Store.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+ path:
+ type: string
+ description: Destination path on the target filesystem.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1
+ mode:
+ type: string
+ description: File permission mode (e.g., "0644").
+ owner:
+ type: string
+ description: File owner user.
+ group:
+ type: string
+ description: File owner group.
+ content_type:
+ type: string
+ description: Content type — "raw" or "template".
+ enum:
+ - raw
+ - template
+ x-oapi-codegen-extra-tags:
+ validate: required,oneof=raw template
+ vars:
+ type: object
+ description: Template variables when content_type is "template".
+ additionalProperties: true
+ required:
+ - object_name
+ - path
+ - content_type
+ FileDeployResponse:
+ type: object
+ properties:
+ job_id:
+ type: string
+ description: The ID of the created job.
+ hostname:
+ type: string
+ description: The agent that processed the job.
+ changed:
+ type: boolean
+ description: Whether the file was actually written.
+ required:
+ - job_id
+ - hostname
+ - changed
+ FileStatusRequest:
+ type: object
+ properties:
+ path:
+ type: string
+ description: Filesystem path to check.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1
+ required:
+ - path
+ FileStatusResponse:
+ type: object
+ properties:
+ job_id:
+ type: string
+ description: The ID of the created job.
+ hostname:
+ type: string
+ description: The agent that processed the job.
+ path:
+ type: string
+ description: The filesystem path.
+ status:
+ type: string
+ description: File state — "in-sync", "drifted", or "missing".
+ sha256:
+ type: string
+ description: Current SHA-256 of the file on disk.
+ required:
+ - job_id
+ - hostname
+ - path
+ - status
CommandExecRequest:
type: object
properties:
@@ -2635,6 +3156,18 @@ components:
required:
- results
parameters:
+ FileName:
+ name: name
+ in: path
+ required: true
+ description: |
+ The name of the file in the Object Store.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+ schema:
+ type: string
+ minLength: 1
+ maxLength: 255
Hostname:
name: hostname
in: path
@@ -2657,6 +3190,9 @@ x-tagGroups:
- name: OSAPI - A CRUD API for managing Linux systems
tags:
- OSAPI_-_A_CRUD_API_for_managing_Linux_systems_info
+ - name: File Management API
+ tags:
+ - File_Management_API_file_operations
- name: Health Check API
tags:
- Health_Check_API_health
diff --git a/internal/api/handler_file.go b/internal/api/handler_file.go
new file mode 100644
index 00000000..8030d462
--- /dev/null
+++ b/internal/api/handler_file.go
@@ -0,0 +1,60 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package api
+
+import (
+ "github.com/labstack/echo/v4"
+ strictecho "github.com/oapi-codegen/runtime/strictmiddleware/echo"
+
+ "github.com/retr0h/osapi/internal/api/file"
+ fileGen "github.com/retr0h/osapi/internal/api/file/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+)
+
+// GetFileHandler returns file handler for registration.
+func (s *Server) GetFileHandler(
+ objStore file.ObjectStoreManager,
+) []func(e *echo.Echo) {
+ var tokenManager TokenValidator = authtoken.New(s.logger)
+
+ fileHandler := file.New(s.logger, objStore)
+
+ strictHandler := fileGen.NewStrictHandler(
+ fileHandler,
+ []fileGen.StrictMiddlewareFunc{
+ func(handler strictecho.StrictEchoHandlerFunc, _ string) strictecho.StrictEchoHandlerFunc {
+ return scopeMiddleware(
+ handler,
+ tokenManager,
+ s.appConfig.API.Server.Security.SigningKey,
+ fileGen.BearerAuthScopes,
+ s.customRoles,
+ )
+ },
+ },
+ )
+
+ return []func(e *echo.Echo){
+ func(e *echo.Echo) {
+ fileGen.RegisterHandlers(e, strictHandler)
+ },
+ }
+}
diff --git a/internal/api/handler_public_test.go b/internal/api/handler_public_test.go
index 742e0ca7..4ce13960 100644
--- a/internal/api/handler_public_test.go
+++ b/internal/api/handler_public_test.go
@@ -33,6 +33,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/retr0h/osapi/internal/api"
+ fileMocks "github.com/retr0h/osapi/internal/api/file/mocks"
"github.com/retr0h/osapi/internal/api/health"
auditstore "github.com/retr0h/osapi/internal/audit"
"github.com/retr0h/osapi/internal/config"
@@ -338,6 +339,46 @@ func (s *HandlerPublicTestSuite) TestGetAuditHandler() {
}
}
+func (s *HandlerPublicTestSuite) TestGetFileHandler() {
+ tests := []struct {
+ name string
+ validate func([]func(e *echo.Echo))
+ }{
+ {
+ name: "returns handler functions",
+ validate: func(handlers []func(e *echo.Echo)) {
+ s.NotEmpty(handlers)
+ },
+ },
+ {
+ name: "closure registers routes and middleware executes",
+ validate: func(handlers []func(e *echo.Echo)) {
+ e := echo.New()
+ for _, h := range handlers {
+ h(e)
+ }
+ s.NotEmpty(e.Routes())
+
+ req := httptest.NewRequest(http.MethodGet, "/file", nil)
+ rec := httptest.NewRecorder()
+ e.ServeHTTP(rec, req)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ ctrl := gomock.NewController(s.T())
+ defer ctrl.Finish()
+ mockObjStore := fileMocks.NewMockObjectStoreManager(ctrl)
+
+ handlers := s.server.GetFileHandler(mockObjStore)
+
+ tt.validate(handlers)
+ })
+ }
+}
+
func (s *HandlerPublicTestSuite) TestRegisterHandlers() {
tests := []struct {
name string
diff --git a/internal/api/job/job_create_public_test.go b/internal/api/job/job_create_public_test.go
index 1b2a6256..f3142263 100644
--- a/internal/api/job/job_create_public_test.go
+++ b/internal/api/job/job_create_public_test.go
@@ -189,7 +189,7 @@ func (s *JobCreatePublicTestSuite) TestPostJob() {
}
}
-func (s *JobCreatePublicTestSuite) TestPostJobHTTP() {
+func (s *JobCreatePublicTestSuite) TestPostJobValidationHTTP() {
tests := []struct {
name string
body string
diff --git a/internal/api/job/job_list_public_test.go b/internal/api/job/job_list_public_test.go
index be0ed87f..873dff7e 100644
--- a/internal/api/job/job_list_public_test.go
+++ b/internal/api/job/job_list_public_test.go
@@ -272,7 +272,7 @@ func (s *JobListPublicTestSuite) TestGetJob() {
}
}
-func (s *JobListPublicTestSuite) TestListJobsHTTP() {
+func (s *JobListPublicTestSuite) TestListJobsValidationHTTP() {
tests := []struct {
name string
query string
diff --git a/internal/api/job/job_retry_public_test.go b/internal/api/job/job_retry_public_test.go
index 5df5d4be..6f735a98 100644
--- a/internal/api/job/job_retry_public_test.go
+++ b/internal/api/job/job_retry_public_test.go
@@ -208,7 +208,7 @@ func (s *JobRetryPublicTestSuite) TestRetryJobByID() {
}
}
-func (s *JobRetryPublicTestSuite) TestRetryJobByIDHTTP() {
+func (s *JobRetryPublicTestSuite) TestRetryJobByIDValidationHTTP() {
tests := []struct {
name string
jobID string
diff --git a/internal/api/node/command_exec_post_public_test.go b/internal/api/node/command_exec_post_public_test.go
index 10fc0901..76f8c0d4 100644
--- a/internal/api/node/command_exec_post_public_test.go
+++ b/internal/api/node/command_exec_post_public_test.go
@@ -368,7 +368,7 @@ func (s *CommandExecPostPublicTestSuite) TestPostNodeCommandExec() {
}
}
-func (s *CommandExecPostPublicTestSuite) TestPostCommandExecHTTP() {
+func (s *CommandExecPostPublicTestSuite) TestPostCommandExecValidationHTTP() {
tests := []struct {
name string
path string
diff --git a/internal/api/node/command_shell_post_public_test.go b/internal/api/node/command_shell_post_public_test.go
index 6b4eb818..c04e930d 100644
--- a/internal/api/node/command_shell_post_public_test.go
+++ b/internal/api/node/command_shell_post_public_test.go
@@ -344,7 +344,7 @@ func (s *CommandShellPostPublicTestSuite) TestPostNodeCommandShell() {
}
}
-func (s *CommandShellPostPublicTestSuite) TestPostCommandShellHTTP() {
+func (s *CommandShellPostPublicTestSuite) TestPostCommandShellValidationHTTP() {
tests := []struct {
name string
path string
diff --git a/internal/api/node/file_deploy_post.go b/internal/api/node/file_deploy_post.go
new file mode 100644
index 00000000..79f24971
--- /dev/null
+++ b/internal/api/node/file_deploy_post.go
@@ -0,0 +1,102 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package node
+
+import (
+ "context"
+ "log/slog"
+
+ "github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/validation"
+)
+
+// PostNodeFileDeploy post the node file deploy API endpoint.
+func (s *Node) PostNodeFileDeploy(
+ ctx context.Context,
+ request gen.PostNodeFileDeployRequestObject,
+) (gen.PostNodeFileDeployResponseObject, error) {
+ if errMsg, ok := validateHostname(request.Hostname); !ok {
+ return gen.PostNodeFileDeploy400JSONResponse{Error: &errMsg}, nil
+ }
+
+ if errMsg, ok := validation.Struct(request.Body); !ok {
+ return gen.PostNodeFileDeploy400JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ objectName := request.Body.ObjectName
+ path := request.Body.Path
+ contentType := string(request.Body.ContentType)
+
+ var mode string
+ if request.Body.Mode != nil {
+ mode = *request.Body.Mode
+ }
+
+ var owner string
+ if request.Body.Owner != nil {
+ owner = *request.Body.Owner
+ }
+
+ var group string
+ if request.Body.Group != nil {
+ group = *request.Body.Group
+ }
+
+ var vars map[string]any
+ if request.Body.Vars != nil {
+ vars = *request.Body.Vars
+ }
+
+ hostname := request.Hostname
+
+ s.logger.Debug("file deploy",
+ slog.String("object_name", objectName),
+ slog.String("path", path),
+ slog.String("content_type", contentType),
+ slog.String("target", hostname),
+ )
+
+ jobID, agentHostname, changed, err := s.JobClient.ModifyFileDeploy(
+ ctx,
+ hostname,
+ objectName,
+ path,
+ contentType,
+ mode,
+ owner,
+ group,
+ vars,
+ )
+ if err != nil {
+ errMsg := err.Error()
+ return gen.PostNodeFileDeploy500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ return gen.PostNodeFileDeploy202JSONResponse{
+ JobId: jobID,
+ Hostname: agentHostname,
+ Changed: changed,
+ }, nil
+}
diff --git a/internal/api/node/file_deploy_post_public_test.go b/internal/api/node/file_deploy_post_public_test.go
new file mode 100644
index 00000000..d3c41dfc
--- /dev/null
+++ b/internal/api/node/file_deploy_post_public_test.go
@@ -0,0 +1,490 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package node_test
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/api"
+ apinode "github.com/retr0h/osapi/internal/api/node"
+ "github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
+ jobmocks "github.com/retr0h/osapi/internal/job/mocks"
+ "github.com/retr0h/osapi/internal/validation"
+)
+
+type FileDeployPostPublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockJobClient *jobmocks.MockJobClient
+ handler *apinode.Node
+ ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
+}
+
+func (s *FileDeployPostPublicTestSuite) SetupSuite() {
+ validation.RegisterTargetValidator(func(_ context.Context) ([]validation.AgentTarget, error) {
+ return []validation.AgentTarget{
+ {Hostname: "server1", Labels: map[string]string{"group": "web"}},
+ {Hostname: "server2"},
+ }, nil
+ })
+}
+
+func (s *FileDeployPostPublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
+ s.handler = apinode.New(slog.Default(), s.mockJobClient)
+ s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+}
+
+func (s *FileDeployPostPublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *FileDeployPostPublicTestSuite) TestPostNodeFileDeploy() {
+ tests := []struct {
+ name string
+ request gen.PostNodeFileDeployRequestObject
+ setupMock func()
+ validateFunc func(resp gen.PostNodeFileDeployResponseObject)
+ }{
+ {
+ name: "when success",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: gen.Raw,
+ Mode: strPtr("0644"),
+ Owner: strPtr("root"),
+ Group: strPtr("root"),
+ },
+ },
+ setupMock: func() {
+ s.mockJobClient.EXPECT().
+ ModifyFileDeploy(
+ gomock.Any(),
+ "_any",
+ "nginx.conf",
+ "/etc/nginx/nginx.conf",
+ "raw",
+ "0644",
+ "root",
+ "root",
+ map[string]any(nil),
+ ).
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ "agent1",
+ true,
+ nil,
+ )
+ },
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ r, ok := resp.(gen.PostNodeFileDeploy202JSONResponse)
+ s.True(ok)
+ s.Equal("550e8400-e29b-41d4-a716-446655440000", r.JobId)
+ s.Equal("agent1", r.Hostname)
+ s.True(r.Changed)
+ },
+ },
+ {
+ name: "when success with template vars",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "app.conf.tmpl",
+ Path: "/etc/app/app.conf",
+ ContentType: gen.Template,
+ Vars: &map[string]interface{}{
+ "port": float64(8080),
+ },
+ },
+ },
+ setupMock: func() {
+ s.mockJobClient.EXPECT().
+ ModifyFileDeploy(
+ gomock.Any(),
+ "_any",
+ "app.conf.tmpl",
+ "/etc/app/app.conf",
+ "template",
+ "",
+ "",
+ "",
+ map[string]any{"port": float64(8080)},
+ ).
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ "agent1",
+ true,
+ nil,
+ )
+ },
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ r, ok := resp.(gen.PostNodeFileDeploy202JSONResponse)
+ s.True(ok)
+ s.Equal("agent1", r.Hostname)
+ s.True(r.Changed)
+ },
+ },
+ {
+ name: "when validation error empty hostname",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: gen.Raw,
+ },
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ r, ok := resp.(gen.PostNodeFileDeploy400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "required")
+ },
+ },
+ {
+ name: "when validation error missing object_name",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: gen.Raw,
+ },
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ r, ok := resp.(gen.PostNodeFileDeploy400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "ObjectName")
+ },
+ },
+ {
+ name: "when validation error missing path",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "nginx.conf",
+ Path: "",
+ ContentType: gen.Raw,
+ },
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ r, ok := resp.(gen.PostNodeFileDeploy400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "Path")
+ },
+ },
+ {
+ name: "when validation error invalid content_type",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: gen.FileDeployRequestContentType("invalid"),
+ },
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ r, ok := resp.(gen.PostNodeFileDeploy400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "ContentType")
+ },
+ },
+ {
+ name: "when job client error",
+ request: gen.PostNodeFileDeployRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileDeployJSONRequestBody{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: gen.Raw,
+ },
+ },
+ setupMock: func() {
+ s.mockJobClient.EXPECT().
+ ModifyFileDeploy(
+ gomock.Any(),
+ "_any",
+ "nginx.conf",
+ "/etc/nginx/nginx.conf",
+ "raw",
+ "",
+ "",
+ "",
+ map[string]any(nil),
+ ).
+ Return("", "", false, assert.AnError)
+ },
+ validateFunc: func(resp gen.PostNodeFileDeployResponseObject) {
+ _, ok := resp.(gen.PostNodeFileDeploy500JSONResponse)
+ s.True(ok)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupMock()
+
+ resp, err := s.handler.PostNodeFileDeploy(s.ctx, tt.request)
+ s.NoError(err)
+ tt.validateFunc(resp)
+ })
+ }
+}
+
+func (s *FileDeployPostPublicTestSuite) TestPostNodeFileDeployValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ body string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when valid request",
+ path: "/node/server1/file/deploy",
+ body: `{"object_name":"nginx.conf","path":"/etc/nginx/nginx.conf","content_type":"raw"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ ModifyFileDeploy(gomock.Any(), "server1", gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).
+ Return("550e8400-e29b-41d4-a716-446655440000", "agent1", true, nil)
+ return mock
+ },
+ wantCode: http.StatusAccepted,
+ wantContains: []string{`"job_id"`, `"agent1"`, `"changed":true`},
+ },
+ {
+ name: "when missing object_name",
+ path: "/node/server1/file/deploy",
+ body: `{"path":"/etc/nginx/nginx.conf","content_type":"raw"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`, "ObjectName", "required"},
+ },
+ {
+ name: "when invalid content_type",
+ path: "/node/server1/file/deploy",
+ body: `{"object_name":"nginx.conf","path":"/etc/nginx/nginx.conf","content_type":"invalid"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`, "ContentType"},
+ },
+ {
+ name: "when server error",
+ path: "/node/server1/file/deploy",
+ body: `{"object_name":"nginx.conf","path":"/etc/nginx/nginx.conf","content_type":"raw"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ ModifyFileDeploy(gomock.Any(), "server1", gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).
+ Return("", "", false, assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ wantContains: []string{`"error"`},
+ },
+ {
+ name: "when target agent not found",
+ path: "/node/nonexistent/file/deploy",
+ body: `{"object_name":"nginx.conf","path":"/etc/nginx/nginx.conf","content_type":"raw"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`, "valid_target", "not found"},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(
+ http.MethodPost,
+ tc.path,
+ strings.NewReader(tc.body),
+ )
+ req.Header.Set("Content-Type", "application/json")
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacFileDeployTestSigningKey = "test-signing-key-for-file-deploy-rbac"
+
+func (s *FileDeployPostPublicTestSuite) TestPostNodeFileDeployRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacFileDeployTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"node:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with file:write returns 202",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacFileDeployTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ []string{"file:write"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ ModifyFileDeploy(gomock.Any(), "server1", gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ "agent1",
+ true,
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusAccepted,
+ wantContains: []string{`"job_id"`, `"changed":true`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacFileDeployTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(
+ http.MethodPost,
+ "/node/server1/file/deploy",
+ strings.NewReader(
+ `{"object_name":"nginx.conf","path":"/etc/nginx/nginx.conf","content_type":"raw"}`,
+ ),
+ )
+ req.Header.Set("Content-Type", "application/json")
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+func TestFileDeployPostPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FileDeployPostPublicTestSuite))
+}
diff --git a/internal/api/node/file_status_post.go b/internal/api/node/file_status_post.go
new file mode 100644
index 00000000..22c28b67
--- /dev/null
+++ b/internal/api/node/file_status_post.go
@@ -0,0 +1,79 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package node
+
+import (
+ "context"
+ "log/slog"
+
+ "github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/validation"
+)
+
+// PostNodeFileStatus post the node file status API endpoint.
+func (s *Node) PostNodeFileStatus(
+ ctx context.Context,
+ request gen.PostNodeFileStatusRequestObject,
+) (gen.PostNodeFileStatusResponseObject, error) {
+ if errMsg, ok := validateHostname(request.Hostname); !ok {
+ return gen.PostNodeFileStatus400JSONResponse{Error: &errMsg}, nil
+ }
+
+ if errMsg, ok := validation.Struct(request.Body); !ok {
+ return gen.PostNodeFileStatus400JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ path := request.Body.Path
+ hostname := request.Hostname
+
+ s.logger.Debug("file status",
+ slog.String("path", path),
+ slog.String("target", hostname),
+ )
+
+ jobID, result, agentHostname, err := s.JobClient.QueryFileStatus(
+ ctx,
+ hostname,
+ path,
+ )
+ if err != nil {
+ errMsg := err.Error()
+ return gen.PostNodeFileStatus500JSONResponse{
+ Error: &errMsg,
+ }, nil
+ }
+
+ resp := gen.PostNodeFileStatus200JSONResponse{
+ JobId: jobID,
+ Hostname: agentHostname,
+ Path: result.Path,
+ Status: result.Status,
+ }
+
+ if result.SHA256 != "" {
+ sha := result.SHA256
+ resp.Sha256 = &sha
+ }
+
+ return resp, nil
+}
diff --git a/internal/api/node/file_status_post_public_test.go b/internal/api/node/file_status_post_public_test.go
new file mode 100644
index 00000000..ac33f069
--- /dev/null
+++ b/internal/api/node/file_status_post_public_test.go
@@ -0,0 +1,413 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package node_test
+
+import (
+ "context"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/api"
+ apinode "github.com/retr0h/osapi/internal/api/node"
+ "github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
+ jobmocks "github.com/retr0h/osapi/internal/job/mocks"
+ "github.com/retr0h/osapi/internal/provider/file"
+ "github.com/retr0h/osapi/internal/validation"
+)
+
+type FileStatusPostPublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockJobClient *jobmocks.MockJobClient
+ handler *apinode.Node
+ ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
+}
+
+func (s *FileStatusPostPublicTestSuite) SetupSuite() {
+ validation.RegisterTargetValidator(func(_ context.Context) ([]validation.AgentTarget, error) {
+ return []validation.AgentTarget{
+ {Hostname: "server1", Labels: map[string]string{"group": "web"}},
+ {Hostname: "server2"},
+ }, nil
+ })
+}
+
+func (s *FileStatusPostPublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
+ s.handler = apinode.New(slog.Default(), s.mockJobClient)
+ s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+}
+
+func (s *FileStatusPostPublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *FileStatusPostPublicTestSuite) TestPostNodeFileStatus() {
+ tests := []struct {
+ name string
+ request gen.PostNodeFileStatusRequestObject
+ setupMock func()
+ validateFunc func(resp gen.PostNodeFileStatusResponseObject)
+ }{
+ {
+ name: "when success with sha256",
+ request: gen.PostNodeFileStatusRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileStatusJSONRequestBody{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ },
+ setupMock: func() {
+ s.mockJobClient.EXPECT().
+ QueryFileStatus(
+ gomock.Any(),
+ "_any",
+ "/etc/nginx/nginx.conf",
+ ).
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "in-sync",
+ SHA256: "abc123def456",
+ },
+ "agent1",
+ nil,
+ )
+ },
+ validateFunc: func(resp gen.PostNodeFileStatusResponseObject) {
+ r, ok := resp.(gen.PostNodeFileStatus200JSONResponse)
+ s.True(ok)
+ s.Equal("550e8400-e29b-41d4-a716-446655440000", r.JobId)
+ s.Equal("agent1", r.Hostname)
+ s.Equal("/etc/nginx/nginx.conf", r.Path)
+ s.Equal("in-sync", r.Status)
+ s.Require().NotNil(r.Sha256)
+ s.Equal("abc123def456", *r.Sha256)
+ },
+ },
+ {
+ name: "when success missing file no sha256",
+ request: gen.PostNodeFileStatusRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileStatusJSONRequestBody{
+ Path: "/etc/missing.conf",
+ },
+ },
+ setupMock: func() {
+ s.mockJobClient.EXPECT().
+ QueryFileStatus(
+ gomock.Any(),
+ "_any",
+ "/etc/missing.conf",
+ ).
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &file.StatusResult{
+ Path: "/etc/missing.conf",
+ Status: "missing",
+ },
+ "agent1",
+ nil,
+ )
+ },
+ validateFunc: func(resp gen.PostNodeFileStatusResponseObject) {
+ r, ok := resp.(gen.PostNodeFileStatus200JSONResponse)
+ s.True(ok)
+ s.Equal("missing", r.Status)
+ s.Nil(r.Sha256)
+ },
+ },
+ {
+ name: "when validation error empty hostname",
+ request: gen.PostNodeFileStatusRequestObject{
+ Hostname: "",
+ Body: &gen.PostNodeFileStatusJSONRequestBody{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostNodeFileStatusResponseObject) {
+ r, ok := resp.(gen.PostNodeFileStatus400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "required")
+ },
+ },
+ {
+ name: "when validation error missing path",
+ request: gen.PostNodeFileStatusRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileStatusJSONRequestBody{
+ Path: "",
+ },
+ },
+ setupMock: func() {},
+ validateFunc: func(resp gen.PostNodeFileStatusResponseObject) {
+ r, ok := resp.(gen.PostNodeFileStatus400JSONResponse)
+ s.True(ok)
+ s.Require().NotNil(r.Error)
+ s.Contains(*r.Error, "Path")
+ },
+ },
+ {
+ name: "when job client error",
+ request: gen.PostNodeFileStatusRequestObject{
+ Hostname: "_any",
+ Body: &gen.PostNodeFileStatusJSONRequestBody{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ },
+ setupMock: func() {
+ s.mockJobClient.EXPECT().
+ QueryFileStatus(
+ gomock.Any(),
+ "_any",
+ "/etc/nginx/nginx.conf",
+ ).
+ Return("", nil, "", assert.AnError)
+ },
+ validateFunc: func(resp gen.PostNodeFileStatusResponseObject) {
+ _, ok := resp.(gen.PostNodeFileStatus500JSONResponse)
+ s.True(ok)
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ tt.setupMock()
+
+ resp, err := s.handler.PostNodeFileStatus(s.ctx, tt.request)
+ s.NoError(err)
+ tt.validateFunc(resp)
+ })
+ }
+}
+
+func (s *FileStatusPostPublicTestSuite) TestPostNodeFileStatusValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ body string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when valid request",
+ path: "/node/server1/file/status",
+ body: `{"path":"/etc/nginx/nginx.conf"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryFileStatus(gomock.Any(), "server1", "/etc/nginx/nginx.conf").
+ Return("550e8400-e29b-41d4-a716-446655440000", &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "in-sync",
+ SHA256: "abc123",
+ }, "agent1", nil)
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`, `"agent1"`, `"in-sync"`, `"sha256"`},
+ },
+ {
+ name: "when missing path",
+ path: "/node/server1/file/status",
+ body: `{}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`, "Path", "required"},
+ },
+ {
+ name: "when target agent not found",
+ path: "/node/nonexistent/file/status",
+ body: `{"path":"/etc/nginx/nginx.conf"}`,
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`, "valid_target", "not found"},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(
+ http.MethodPost,
+ tc.path,
+ strings.NewReader(tc.body),
+ )
+ req.Header.Set("Content-Type", "application/json")
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacFileStatusTestSigningKey = "test-signing-key-for-file-status-rbac"
+
+func (s *FileStatusPostPublicTestSuite) TestPostNodeFileStatusRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacFileStatusTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"node:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with file:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacFileStatusTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ []string{"file:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryFileStatus(gomock.Any(), "server1", "/etc/nginx/nginx.conf").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "in-sync",
+ SHA256: "abc123",
+ },
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`, `"in-sync"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacFileStatusTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(
+ http.MethodPost,
+ "/node/server1/file/status",
+ strings.NewReader(`{"path":"/etc/nginx/nginx.conf"}`),
+ )
+ req.Header.Set("Content-Type", "application/json")
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+func TestFileStatusPostPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FileStatusPostPublicTestSuite))
+}
diff --git a/internal/api/node/gen/api.yaml b/internal/api/node/gen/api.yaml
index 3383f8a3..bc70882c 100644
--- a/internal/api/node/gen/api.yaml
+++ b/internal/api/node/gen/api.yaml
@@ -529,6 +529,106 @@ paths:
schema:
$ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ # ── File operations ──────────────────────────────────────
+
+ /node/{hostname}/file/deploy:
+ post:
+ operationId: PostNodeFileDeploy
+ summary: Deploy a file from Object Store to the host
+ tags:
+ - node_operations
+ security:
+ - BearerAuth:
+ - "file:write"
+ parameters:
+ - $ref: '#/components/parameters/Hostname'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeployRequest'
+ responses:
+ '202':
+ description: File deploy job accepted.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileDeployResponse'
+ '400':
+ description: Invalid input.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal error.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
+ /node/{hostname}/file/status:
+ post:
+ operationId: PostNodeFileStatus
+ summary: Check deployment status of a file on the host
+ tags:
+ - node_operations
+ security:
+ - BearerAuth:
+ - "file:read"
+ parameters:
+ - $ref: '#/components/parameters/Hostname'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileStatusRequest'
+ responses:
+ '200':
+ description: File status.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/FileStatusResponse'
+ '400':
+ description: Invalid input.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '401':
+ description: Unauthorized.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '403':
+ description: Forbidden.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+ '500':
+ description: Internal error.
+ content:
+ application/json:
+ schema:
+ $ref: '../../common/gen/api.yaml#/components/schemas/ErrorResponse'
+
# ── Command operations ───────────────────────────────────
/node/{hostname}/command/exec:
@@ -1138,6 +1238,86 @@ components:
required:
- interface_name
+ # ── File schemas ──────────────────────────────────────
+
+ FileDeployRequest:
+ type: object
+ properties:
+ object_name:
+ type: string
+ description: Name of the file in the Object Store.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1,max=255
+ path:
+ type: string
+ description: Destination path on the target filesystem.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1
+ mode:
+ type: string
+ description: File permission mode (e.g., "0644").
+ owner:
+ type: string
+ description: File owner user.
+ group:
+ type: string
+ description: File owner group.
+ content_type:
+ type: string
+ description: Content type — "raw" or "template".
+ enum: [raw, template]
+ x-oapi-codegen-extra-tags:
+ validate: required,oneof=raw template
+ vars:
+ type: object
+ description: Template variables when content_type is "template".
+ additionalProperties: true
+ required: [object_name, path, content_type]
+
+ FileDeployResponse:
+ type: object
+ properties:
+ job_id:
+ type: string
+ description: The ID of the created job.
+ hostname:
+ type: string
+ description: The agent that processed the job.
+ changed:
+ type: boolean
+ description: Whether the file was actually written.
+ required: [job_id, hostname, changed]
+
+ FileStatusRequest:
+ type: object
+ properties:
+ path:
+ type: string
+ description: Filesystem path to check.
+ x-oapi-codegen-extra-tags:
+ validate: required,min=1
+ required: [path]
+
+ FileStatusResponse:
+ type: object
+ properties:
+ job_id:
+ type: string
+ description: The ID of the created job.
+ hostname:
+ type: string
+ description: The agent that processed the job.
+ path:
+ type: string
+ description: The filesystem path.
+ status:
+ type: string
+ description: File state — "in-sync", "drifted", or "missing".
+ sha256:
+ type: string
+ description: Current SHA-256 of the file on disk.
+ required: [job_id, hostname, path, status]
+
# ── Command schemas ────────────────────────────────────
CommandExecRequest:
diff --git a/internal/api/node/gen/node.gen.go b/internal/api/node/gen/node.gen.go
index 90eae3e6..8a4e7a44 100644
--- a/internal/api/node/gen/node.gen.go
+++ b/internal/api/node/gen/node.gen.go
@@ -26,6 +26,12 @@ const (
Ok DNSUpdateResultItemStatus = "ok"
)
+// Defines values for FileDeployRequestContentType.
+const (
+ Raw FileDeployRequestContentType = "raw"
+ Template FileDeployRequestContentType = "template"
+)
+
// CommandExecRequest defines model for CommandExecRequest.
type CommandExecRequest struct {
// Args Command arguments.
@@ -177,6 +183,69 @@ type DisksResponse = []DiskResponse
// ErrorResponse defines model for ErrorResponse.
type ErrorResponse = externalRef0.ErrorResponse
+// FileDeployRequest defines model for FileDeployRequest.
+type FileDeployRequest struct {
+ // ContentType Content type — "raw" or "template".
+ ContentType FileDeployRequestContentType `json:"content_type" validate:"required,oneof=raw template"`
+
+ // Group File owner group.
+ Group *string `json:"group,omitempty"`
+
+ // Mode File permission mode (e.g., "0644").
+ Mode *string `json:"mode,omitempty"`
+
+ // ObjectName Name of the file in the Object Store.
+ ObjectName string `json:"object_name" validate:"required,min=1,max=255"`
+
+ // Owner File owner user.
+ Owner *string `json:"owner,omitempty"`
+
+ // Path Destination path on the target filesystem.
+ Path string `json:"path" validate:"required,min=1"`
+
+ // Vars Template variables when content_type is "template".
+ Vars *map[string]interface{} `json:"vars,omitempty"`
+}
+
+// FileDeployRequestContentType Content type — "raw" or "template".
+type FileDeployRequestContentType string
+
+// FileDeployResponse defines model for FileDeployResponse.
+type FileDeployResponse struct {
+ // Changed Whether the file was actually written.
+ Changed bool `json:"changed"`
+
+ // Hostname The agent that processed the job.
+ Hostname string `json:"hostname"`
+
+ // JobId The ID of the created job.
+ JobId string `json:"job_id"`
+}
+
+// FileStatusRequest defines model for FileStatusRequest.
+type FileStatusRequest struct {
+ // Path Filesystem path to check.
+ Path string `json:"path" validate:"required,min=1"`
+}
+
+// FileStatusResponse defines model for FileStatusResponse.
+type FileStatusResponse struct {
+ // Hostname The agent that processed the job.
+ Hostname string `json:"hostname"`
+
+ // JobId The ID of the created job.
+ JobId string `json:"job_id"`
+
+ // Path The filesystem path.
+ Path string `json:"path"`
+
+ // Sha256 Current SHA-256 of the file on disk.
+ Sha256 *string `json:"sha256,omitempty"`
+
+ // Status File state — "in-sync", "drifted", or "missing".
+ Status string `json:"status"`
+}
+
// HostnameCollectionResponse defines model for HostnameCollectionResponse.
type HostnameCollectionResponse struct {
// JobId The job ID used to process this request.
@@ -385,6 +454,12 @@ type PostNodeCommandExecJSONRequestBody = CommandExecRequest
// PostNodeCommandShellJSONRequestBody defines body for PostNodeCommandShell for application/json ContentType.
type PostNodeCommandShellJSONRequestBody = CommandShellRequest
+// PostNodeFileDeployJSONRequestBody defines body for PostNodeFileDeploy for application/json ContentType.
+type PostNodeFileDeployJSONRequestBody = FileDeployRequest
+
+// PostNodeFileStatusJSONRequestBody defines body for PostNodeFileStatus for application/json ContentType.
+type PostNodeFileStatusJSONRequestBody = FileStatusRequest
+
// PutNodeNetworkDNSJSONRequestBody defines body for PutNodeNetworkDNS for application/json ContentType.
type PutNodeNetworkDNSJSONRequestBody = DNSConfigUpdateRequest
@@ -405,6 +480,12 @@ type ServerInterface interface {
// Retrieve disk usage
// (GET /node/{hostname}/disk)
GetNodeDisk(ctx echo.Context, hostname Hostname) error
+ // Deploy a file from Object Store to the host
+ // (POST /node/{hostname}/file/deploy)
+ PostNodeFileDeploy(ctx echo.Context, hostname Hostname) error
+ // Check deployment status of a file on the host
+ // (POST /node/{hostname}/file/status)
+ PostNodeFileStatus(ctx echo.Context, hostname Hostname) error
// Retrieve node hostname
// (GET /node/{hostname}/hostname)
GetNodeHostname(ctx echo.Context, hostname Hostname) error
@@ -508,6 +589,42 @@ func (w *ServerInterfaceWrapper) GetNodeDisk(ctx echo.Context) error {
return err
}
+// PostNodeFileDeploy converts echo context to params.
+func (w *ServerInterfaceWrapper) PostNodeFileDeploy(ctx echo.Context) error {
+ var err error
+ // ------------- Path parameter "hostname" -------------
+ var hostname Hostname
+
+ err = runtime.BindStyledParameterWithOptions("simple", "hostname", ctx.Param("hostname"), &hostname, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true})
+ if err != nil {
+ return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter hostname: %s", err))
+ }
+
+ ctx.Set(BearerAuthScopes, []string{"file:write"})
+
+ // Invoke the callback with all the unmarshaled arguments
+ err = w.Handler.PostNodeFileDeploy(ctx, hostname)
+ return err
+}
+
+// PostNodeFileStatus converts echo context to params.
+func (w *ServerInterfaceWrapper) PostNodeFileStatus(ctx echo.Context) error {
+ var err error
+ // ------------- Path parameter "hostname" -------------
+ var hostname Hostname
+
+ err = runtime.BindStyledParameterWithOptions("simple", "hostname", ctx.Param("hostname"), &hostname, runtime.BindStyledParameterOptions{ParamLocation: runtime.ParamLocationPath, Explode: false, Required: true})
+ if err != nil {
+ return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter hostname: %s", err))
+ }
+
+ ctx.Set(BearerAuthScopes, []string{"file:read"})
+
+ // Invoke the callback with all the unmarshaled arguments
+ err = w.Handler.PostNodeFileStatus(ctx, hostname)
+ return err
+}
+
// GetNodeHostname converts echo context to params.
func (w *ServerInterfaceWrapper) GetNodeHostname(ctx echo.Context) error {
var err error
@@ -692,6 +809,8 @@ func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL
router.POST(baseURL+"/node/:hostname/command/exec", wrapper.PostNodeCommandExec)
router.POST(baseURL+"/node/:hostname/command/shell", wrapper.PostNodeCommandShell)
router.GET(baseURL+"/node/:hostname/disk", wrapper.GetNodeDisk)
+ router.POST(baseURL+"/node/:hostname/file/deploy", wrapper.PostNodeFileDeploy)
+ router.POST(baseURL+"/node/:hostname/file/status", wrapper.PostNodeFileStatus)
router.GET(baseURL+"/node/:hostname/hostname", wrapper.GetNodeHostname)
router.GET(baseURL+"/node/:hostname/load", wrapper.GetNodeLoad)
router.GET(baseURL+"/node/:hostname/memory", wrapper.GetNodeMemory)
@@ -917,6 +1036,114 @@ func (response GetNodeDisk500JSONResponse) VisitGetNodeDiskResponse(w http.Respo
return json.NewEncoder(w).Encode(response)
}
+type PostNodeFileDeployRequestObject struct {
+ Hostname Hostname `json:"hostname"`
+ Body *PostNodeFileDeployJSONRequestBody
+}
+
+type PostNodeFileDeployResponseObject interface {
+ VisitPostNodeFileDeployResponse(w http.ResponseWriter) error
+}
+
+type PostNodeFileDeploy202JSONResponse FileDeployResponse
+
+func (response PostNodeFileDeploy202JSONResponse) VisitPostNodeFileDeployResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(202)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileDeploy400JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileDeploy400JSONResponse) VisitPostNodeFileDeployResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(400)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileDeploy401JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileDeploy401JSONResponse) VisitPostNodeFileDeployResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(401)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileDeploy403JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileDeploy403JSONResponse) VisitPostNodeFileDeployResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(403)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileDeploy500JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileDeploy500JSONResponse) VisitPostNodeFileDeployResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(500)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileStatusRequestObject struct {
+ Hostname Hostname `json:"hostname"`
+ Body *PostNodeFileStatusJSONRequestBody
+}
+
+type PostNodeFileStatusResponseObject interface {
+ VisitPostNodeFileStatusResponse(w http.ResponseWriter) error
+}
+
+type PostNodeFileStatus200JSONResponse FileStatusResponse
+
+func (response PostNodeFileStatus200JSONResponse) VisitPostNodeFileStatusResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(200)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileStatus400JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileStatus400JSONResponse) VisitPostNodeFileStatusResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(400)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileStatus401JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileStatus401JSONResponse) VisitPostNodeFileStatusResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(401)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileStatus403JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileStatus403JSONResponse) VisitPostNodeFileStatusResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(403)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
+type PostNodeFileStatus500JSONResponse externalRef0.ErrorResponse
+
+func (response PostNodeFileStatus500JSONResponse) VisitPostNodeFileStatusResponse(w http.ResponseWriter) error {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(500)
+
+ return json.NewEncoder(w).Encode(response)
+}
+
type GetNodeHostnameRequestObject struct {
Hostname Hostname `json:"hostname"`
}
@@ -1358,6 +1585,12 @@ type StrictServerInterface interface {
// Retrieve disk usage
// (GET /node/{hostname}/disk)
GetNodeDisk(ctx context.Context, request GetNodeDiskRequestObject) (GetNodeDiskResponseObject, error)
+ // Deploy a file from Object Store to the host
+ // (POST /node/{hostname}/file/deploy)
+ PostNodeFileDeploy(ctx context.Context, request PostNodeFileDeployRequestObject) (PostNodeFileDeployResponseObject, error)
+ // Check deployment status of a file on the host
+ // (POST /node/{hostname}/file/status)
+ PostNodeFileStatus(ctx context.Context, request PostNodeFileStatusRequestObject) (PostNodeFileStatusResponseObject, error)
// Retrieve node hostname
// (GET /node/{hostname}/hostname)
GetNodeHostname(ctx context.Context, request GetNodeHostnameRequestObject) (GetNodeHostnameResponseObject, error)
@@ -1508,6 +1741,68 @@ func (sh *strictHandler) GetNodeDisk(ctx echo.Context, hostname Hostname) error
return nil
}
+// PostNodeFileDeploy operation middleware
+func (sh *strictHandler) PostNodeFileDeploy(ctx echo.Context, hostname Hostname) error {
+ var request PostNodeFileDeployRequestObject
+
+ request.Hostname = hostname
+
+ var body PostNodeFileDeployJSONRequestBody
+ if err := ctx.Bind(&body); err != nil {
+ return err
+ }
+ request.Body = &body
+
+ handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
+ return sh.ssi.PostNodeFileDeploy(ctx.Request().Context(), request.(PostNodeFileDeployRequestObject))
+ }
+ for _, middleware := range sh.middlewares {
+ handler = middleware(handler, "PostNodeFileDeploy")
+ }
+
+ response, err := handler(ctx, request)
+
+ if err != nil {
+ return err
+ } else if validResponse, ok := response.(PostNodeFileDeployResponseObject); ok {
+ return validResponse.VisitPostNodeFileDeployResponse(ctx.Response())
+ } else if response != nil {
+ return fmt.Errorf("unexpected response type: %T", response)
+ }
+ return nil
+}
+
+// PostNodeFileStatus operation middleware
+func (sh *strictHandler) PostNodeFileStatus(ctx echo.Context, hostname Hostname) error {
+ var request PostNodeFileStatusRequestObject
+
+ request.Hostname = hostname
+
+ var body PostNodeFileStatusJSONRequestBody
+ if err := ctx.Bind(&body); err != nil {
+ return err
+ }
+ request.Body = &body
+
+ handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
+ return sh.ssi.PostNodeFileStatus(ctx.Request().Context(), request.(PostNodeFileStatusRequestObject))
+ }
+ for _, middleware := range sh.middlewares {
+ handler = middleware(handler, "PostNodeFileStatus")
+ }
+
+ response, err := handler(ctx, request)
+
+ if err != nil {
+ return err
+ } else if validResponse, ok := response.(PostNodeFileStatusResponseObject); ok {
+ return validResponse.VisitPostNodeFileStatusResponse(ctx.Response())
+ } else if response != nil {
+ return fmt.Errorf("unexpected response type: %T", response)
+ }
+ return nil
+}
+
// GetNodeHostname operation middleware
func (sh *strictHandler) GetNodeHostname(ctx echo.Context, hostname Hostname) error {
var request GetNodeHostnameRequestObject
diff --git a/internal/api/node/network_dns_get_by_interface_public_test.go b/internal/api/node/network_dns_get_by_interface_public_test.go
index ddb77421..b8555771 100644
--- a/internal/api/node/network_dns_get_by_interface_public_test.go
+++ b/internal/api/node/network_dns_get_by_interface_public_test.go
@@ -253,7 +253,7 @@ func (s *NetworkDNSGetByInterfacePublicTestSuite) TestGetNodeNetworkDNSByInterfa
}
}
-func (s *NetworkDNSGetByInterfacePublicTestSuite) TestGetNetworkDNSByInterfaceHTTP() {
+func (s *NetworkDNSGetByInterfacePublicTestSuite) TestGetNetworkDNSByInterfaceValidationHTTP() {
tests := []struct {
name string
path string
diff --git a/internal/api/node/network_dns_put_by_interface_public_test.go b/internal/api/node/network_dns_put_by_interface_public_test.go
index d01afeac..1bb8b0de 100644
--- a/internal/api/node/network_dns_put_by_interface_public_test.go
+++ b/internal/api/node/network_dns_put_by_interface_public_test.go
@@ -299,7 +299,7 @@ func (s *NetworkDNSPutByInterfacePublicTestSuite) TestPutNodeNetworkDNS() {
}
}
-func (s *NetworkDNSPutByInterfacePublicTestSuite) TestPutNetworkDNSHTTP() {
+func (s *NetworkDNSPutByInterfacePublicTestSuite) TestPutNetworkDNSValidationHTTP() {
tests := []struct {
name string
path string
diff --git a/internal/api/node/network_ping_post_public_test.go b/internal/api/node/network_ping_post_public_test.go
index 3075c9ad..6d4645a7 100644
--- a/internal/api/node/network_ping_post_public_test.go
+++ b/internal/api/node/network_ping_post_public_test.go
@@ -275,7 +275,7 @@ func (s *NetworkPingPostPublicTestSuite) TestPostNodeNetworkPing() {
}
}
-func (s *NetworkPingPostPublicTestSuite) TestPostNetworkPingHTTP() {
+func (s *NetworkPingPostPublicTestSuite) TestPostNetworkPingValidationHTTP() {
tests := []struct {
name string
path string
diff --git a/internal/api/node/node_disk_get_public_test.go b/internal/api/node/node_disk_get_public_test.go
index e5623c57..e755e7bf 100644
--- a/internal/api/node/node_disk_get_public_test.go
+++ b/internal/api/node/node_disk_get_public_test.go
@@ -22,15 +22,22 @@ package node_test
import (
"context"
+ "fmt"
"log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
"testing"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
+ "github.com/retr0h/osapi/internal/api"
apinode "github.com/retr0h/osapi/internal/api/node"
"github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
"github.com/retr0h/osapi/internal/job"
jobmocks "github.com/retr0h/osapi/internal/job/mocks"
"github.com/retr0h/osapi/internal/provider/node/disk"
@@ -44,6 +51,8 @@ type NodeDiskGetPublicTestSuite struct {
mockJobClient *jobmocks.MockJobClient
handler *apinode.Node
ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
}
func (s *NodeDiskGetPublicTestSuite) SetupSuite() {
@@ -60,6 +69,8 @@ func (s *NodeDiskGetPublicTestSuite) SetupTest() {
s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
s.handler = apinode.New(slog.Default(), s.mockJobClient)
s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
}
func (s *NodeDiskGetPublicTestSuite) TearDownTest() {
@@ -194,6 +205,187 @@ func (s *NodeDiskGetPublicTestSuite) TestGetNodeDisk() {
}
}
+func (s *NodeDiskGetPublicTestSuite) TestGetNodeDiskValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when get Ok",
+ path: "/node/server1/disk",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeDisk(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &job.NodeDiskResponse{
+ Disks: []disk.Result{
+ {Name: "/dev/sda1", Total: 1000, Used: 500, Free: 500},
+ },
+ },
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ },
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20/disk",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"error"},
+ },
+ {
+ name: "when job client errors",
+ path: "/node/server1/disk",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeDisk(gomock.Any(), "server1").
+ Return("", nil, "", assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacDiskTestSigningKey = "test-signing-key-for-disk-rbac"
+
+func (s *NodeDiskGetPublicTestSuite) TestGetNodeDiskRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacDiskTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"job:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with node:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacDiskTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ nil,
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeDisk(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &job.NodeDiskResponse{
+ Disks: []disk.Result{
+ {Name: "/dev/sda1", Total: 1000, Used: 500, Free: 500},
+ },
+ },
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacDiskTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/node/server1/disk", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
func TestNodeDiskGetPublicTestSuite(t *testing.T) {
suite.Run(t, new(NodeDiskGetPublicTestSuite))
}
diff --git a/internal/api/node/node_hostname_get_public_test.go b/internal/api/node/node_hostname_get_public_test.go
index 47d3f3a0..ef4db77d 100644
--- a/internal/api/node/node_hostname_get_public_test.go
+++ b/internal/api/node/node_hostname_get_public_test.go
@@ -212,7 +212,7 @@ func (s *NodeHostnameGetPublicTestSuite) TestGetNodeHostname() {
}
}
-func (s *NodeHostnameGetPublicTestSuite) TestGetNodeHostnameHTTP() {
+func (s *NodeHostnameGetPublicTestSuite) TestGetNodeHostnameValidationHTTP() {
tests := []struct {
name string
path string
@@ -221,6 +221,15 @@ func (s *NodeHostnameGetPublicTestSuite) TestGetNodeHostnameHTTP() {
wantBody string
wantContains []string
}{
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20/hostname",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`},
+ },
{
name: "when get Ok",
path: "/node/server1/hostname",
diff --git a/internal/api/node/node_load_get_public_test.go b/internal/api/node/node_load_get_public_test.go
index 02bf72ff..6e527fe2 100644
--- a/internal/api/node/node_load_get_public_test.go
+++ b/internal/api/node/node_load_get_public_test.go
@@ -22,15 +22,22 @@ package node_test
import (
"context"
+ "fmt"
"log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
"testing"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
+ "github.com/retr0h/osapi/internal/api"
apinode "github.com/retr0h/osapi/internal/api/node"
"github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
jobmocks "github.com/retr0h/osapi/internal/job/mocks"
"github.com/retr0h/osapi/internal/provider/node/load"
"github.com/retr0h/osapi/internal/validation"
@@ -43,6 +50,8 @@ type NodeLoadGetPublicTestSuite struct {
mockJobClient *jobmocks.MockJobClient
handler *apinode.Node
ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
}
func (s *NodeLoadGetPublicTestSuite) SetupSuite() {
@@ -59,6 +68,8 @@ func (s *NodeLoadGetPublicTestSuite) SetupTest() {
s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
s.handler = apinode.New(slog.Default(), s.mockJobClient)
s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
}
func (s *NodeLoadGetPublicTestSuite) TearDownTest() {
@@ -181,6 +192,179 @@ func (s *NodeLoadGetPublicTestSuite) TestGetNodeLoad() {
}
}
+func (s *NodeLoadGetPublicTestSuite) TestGetNodeLoadValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when get Ok",
+ path: "/node/server1/load",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeLoad(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &load.Result{Load1: 1.5, Load5: 2.0, Load15: 1.8},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ },
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20/load",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"error"},
+ },
+ {
+ name: "when job client errors",
+ path: "/node/server1/load",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeLoad(gomock.Any(), "server1").
+ Return("", nil, "", assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacLoadTestSigningKey = "test-signing-key-for-load-rbac"
+
+func (s *NodeLoadGetPublicTestSuite) TestGetNodeLoadRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacLoadTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"job:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with node:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacLoadTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ nil,
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeLoad(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &load.Result{Load1: 1.5, Load5: 2.0, Load15: 1.8},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacLoadTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/node/server1/load", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
func TestNodeLoadGetPublicTestSuite(t *testing.T) {
suite.Run(t, new(NodeLoadGetPublicTestSuite))
}
diff --git a/internal/api/node/node_memory_get_public_test.go b/internal/api/node/node_memory_get_public_test.go
index 90ae0321..df2cedd2 100644
--- a/internal/api/node/node_memory_get_public_test.go
+++ b/internal/api/node/node_memory_get_public_test.go
@@ -22,15 +22,22 @@ package node_test
import (
"context"
+ "fmt"
"log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
"testing"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
+ "github.com/retr0h/osapi/internal/api"
apinode "github.com/retr0h/osapi/internal/api/node"
"github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
jobmocks "github.com/retr0h/osapi/internal/job/mocks"
"github.com/retr0h/osapi/internal/provider/node/mem"
"github.com/retr0h/osapi/internal/validation"
@@ -43,6 +50,8 @@ type NodeMemoryGetPublicTestSuite struct {
mockJobClient *jobmocks.MockJobClient
handler *apinode.Node
ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
}
func (s *NodeMemoryGetPublicTestSuite) SetupSuite() {
@@ -59,6 +68,8 @@ func (s *NodeMemoryGetPublicTestSuite) SetupTest() {
s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
s.handler = apinode.New(slog.Default(), s.mockJobClient)
s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
}
func (s *NodeMemoryGetPublicTestSuite) TearDownTest() {
@@ -181,6 +192,179 @@ func (s *NodeMemoryGetPublicTestSuite) TestGetNodeMemory() {
}
}
+func (s *NodeMemoryGetPublicTestSuite) TestGetNodeMemoryValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when get Ok",
+ path: "/node/server1/memory",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeMemory(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &mem.Result{Total: 8192, Free: 4096, Cached: 2048},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ },
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20/memory",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"error"},
+ },
+ {
+ name: "when job client errors",
+ path: "/node/server1/memory",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeMemory(gomock.Any(), "server1").
+ Return("", nil, "", assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacMemoryTestSigningKey = "test-signing-key-for-memory-rbac"
+
+func (s *NodeMemoryGetPublicTestSuite) TestGetNodeMemoryRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacMemoryTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"job:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with node:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacMemoryTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ nil,
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeMemory(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &mem.Result{Total: 8192, Free: 4096, Cached: 2048},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacMemoryTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/node/server1/memory", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
func TestNodeMemoryGetPublicTestSuite(t *testing.T) {
suite.Run(t, new(NodeMemoryGetPublicTestSuite))
}
diff --git a/internal/api/node/node_os_get_public_test.go b/internal/api/node/node_os_get_public_test.go
index ce2929fc..1a0f6d55 100644
--- a/internal/api/node/node_os_get_public_test.go
+++ b/internal/api/node/node_os_get_public_test.go
@@ -22,15 +22,22 @@ package node_test
import (
"context"
+ "fmt"
"log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
"testing"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
+ "github.com/retr0h/osapi/internal/api"
apinode "github.com/retr0h/osapi/internal/api/node"
"github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
jobmocks "github.com/retr0h/osapi/internal/job/mocks"
"github.com/retr0h/osapi/internal/provider/node/host"
"github.com/retr0h/osapi/internal/validation"
@@ -43,6 +50,8 @@ type NodeOSGetPublicTestSuite struct {
mockJobClient *jobmocks.MockJobClient
handler *apinode.Node
ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
}
func (s *NodeOSGetPublicTestSuite) SetupSuite() {
@@ -59,6 +68,8 @@ func (s *NodeOSGetPublicTestSuite) SetupTest() {
s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
s.handler = apinode.New(slog.Default(), s.mockJobClient)
s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
}
func (s *NodeOSGetPublicTestSuite) TearDownTest() {
@@ -180,6 +191,179 @@ func (s *NodeOSGetPublicTestSuite) TestGetNodeOS() {
}
}
+func (s *NodeOSGetPublicTestSuite) TestGetNodeOSValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when get Ok",
+ path: "/node/server1/os",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeOS(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &host.Result{Distribution: "Ubuntu", Version: "22.04"},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ },
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20/os",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"error"},
+ },
+ {
+ name: "when job client errors",
+ path: "/node/server1/os",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeOS(gomock.Any(), "server1").
+ Return("", nil, "", assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacOSTestSigningKey = "test-signing-key-for-os-rbac"
+
+func (s *NodeOSGetPublicTestSuite) TestGetNodeOSRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacOSTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"job:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with node:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacOSTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ nil,
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeOS(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &host.Result{Distribution: "Ubuntu", Version: "22.04"},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacOSTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/node/server1/os", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
func TestNodeOSGetPublicTestSuite(t *testing.T) {
suite.Run(t, new(NodeOSGetPublicTestSuite))
}
diff --git a/internal/api/node/node_status_get_public_test.go b/internal/api/node/node_status_get_public_test.go
index 409df420..8e6cf6fa 100644
--- a/internal/api/node/node_status_get_public_test.go
+++ b/internal/api/node/node_status_get_public_test.go
@@ -196,7 +196,7 @@ func (s *NodeStatusGetPublicTestSuite) TestGetNodeStatus() {
}
}
-func (s *NodeStatusGetPublicTestSuite) TestGetNodeStatusHTTP() {
+func (s *NodeStatusGetPublicTestSuite) TestGetNodeStatusValidationHTTP() {
tests := []struct {
name string
path string
@@ -205,6 +205,15 @@ func (s *NodeStatusGetPublicTestSuite) TestGetNodeStatusHTTP() {
wantBody string
wantContains []string
}{
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{`"error"`},
+ },
{
name: "when get Ok",
path: "/node/server1",
diff --git a/internal/api/node/node_uptime_get_public_test.go b/internal/api/node/node_uptime_get_public_test.go
index 8d521bc7..496124bd 100644
--- a/internal/api/node/node_uptime_get_public_test.go
+++ b/internal/api/node/node_uptime_get_public_test.go
@@ -22,15 +22,22 @@ package node_test
import (
"context"
+ "fmt"
"log/slog"
+ "net/http"
+ "net/http/httptest"
+ "os"
"testing"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
+ "github.com/retr0h/osapi/internal/api"
apinode "github.com/retr0h/osapi/internal/api/node"
"github.com/retr0h/osapi/internal/api/node/gen"
+ "github.com/retr0h/osapi/internal/authtoken"
+ "github.com/retr0h/osapi/internal/config"
"github.com/retr0h/osapi/internal/job"
jobmocks "github.com/retr0h/osapi/internal/job/mocks"
"github.com/retr0h/osapi/internal/validation"
@@ -43,6 +50,8 @@ type NodeUptimeGetPublicTestSuite struct {
mockJobClient *jobmocks.MockJobClient
handler *apinode.Node
ctx context.Context
+ appConfig config.Config
+ logger *slog.Logger
}
func (s *NodeUptimeGetPublicTestSuite) SetupSuite() {
@@ -59,6 +68,8 @@ func (s *NodeUptimeGetPublicTestSuite) SetupTest() {
s.mockJobClient = jobmocks.NewMockJobClient(s.mockCtrl)
s.handler = apinode.New(slog.Default(), s.mockJobClient)
s.ctx = context.Background()
+ s.appConfig = config.Config{}
+ s.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
}
func (s *NodeUptimeGetPublicTestSuite) TearDownTest() {
@@ -179,6 +190,179 @@ func (s *NodeUptimeGetPublicTestSuite) TestGetNodeUptime() {
}
}
+func (s *NodeUptimeGetPublicTestSuite) TestGetNodeUptimeValidationHTTP() {
+ tests := []struct {
+ name string
+ path string
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when get Ok",
+ path: "/node/server1/uptime",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeUptime(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &job.NodeUptimeResponse{Uptime: "3 days, 2 hours, 15 minutes"},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ },
+ {
+ name: "when empty hostname returns 400",
+ path: "/node/%20/uptime",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusBadRequest,
+ wantContains: []string{"error"},
+ },
+ {
+ name: "when job client errors",
+ path: "/node/server1/uptime",
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeUptime(gomock.Any(), "server1").
+ Return("", nil, "", assert.AnError)
+ return mock
+ },
+ wantCode: http.StatusInternalServerError,
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ nodeHandler := apinode.New(s.logger, jobMock)
+ strictHandler := gen.NewStrictHandler(nodeHandler, nil)
+
+ a := api.New(s.appConfig, s.logger)
+ gen.RegisterHandlers(a.Echo, strictHandler)
+
+ req := httptest.NewRequest(http.MethodGet, tc.path, nil)
+ rec := httptest.NewRecorder()
+
+ a.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
+const rbacUptimeTestSigningKey = "test-signing-key-for-uptime-rbac"
+
+func (s *NodeUptimeGetPublicTestSuite) TestGetNodeUptimeRBACHTTP() {
+ tokenManager := authtoken.New(s.logger)
+
+ tests := []struct {
+ name string
+ setupAuth func(req *http.Request)
+ setupJobMock func() *jobmocks.MockJobClient
+ wantCode int
+ wantContains []string
+ }{
+ {
+ name: "when no token returns 401",
+ setupAuth: func(_ *http.Request) {
+ // No auth header set
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusUnauthorized,
+ wantContains: []string{"Bearer token required"},
+ },
+ {
+ name: "when insufficient permissions returns 403",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacUptimeTestSigningKey,
+ []string{"read"},
+ "test-user",
+ []string{"job:read"},
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ return jobmocks.NewMockJobClient(s.mockCtrl)
+ },
+ wantCode: http.StatusForbidden,
+ wantContains: []string{"Insufficient permissions"},
+ },
+ {
+ name: "when valid token with node:read returns 200",
+ setupAuth: func(req *http.Request) {
+ token, err := tokenManager.Generate(
+ rbacUptimeTestSigningKey,
+ []string{"admin"},
+ "test-user",
+ nil,
+ )
+ s.Require().NoError(err)
+ req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
+ },
+ setupJobMock: func() *jobmocks.MockJobClient {
+ mock := jobmocks.NewMockJobClient(s.mockCtrl)
+ mock.EXPECT().
+ QueryNodeUptime(gomock.Any(), "server1").
+ Return(
+ "550e8400-e29b-41d4-a716-446655440000",
+ &job.NodeUptimeResponse{Uptime: "3 days, 2 hours, 15 minutes"},
+ "agent1",
+ nil,
+ )
+ return mock
+ },
+ wantCode: http.StatusOK,
+ wantContains: []string{`"job_id"`},
+ },
+ }
+
+ for _, tc := range tests {
+ s.Run(tc.name, func() {
+ jobMock := tc.setupJobMock()
+
+ appConfig := config.Config{
+ API: config.API{
+ Server: config.Server{
+ Security: config.ServerSecurity{
+ SigningKey: rbacUptimeTestSigningKey,
+ },
+ },
+ },
+ }
+
+ server := api.New(appConfig, s.logger)
+ handlers := server.GetNodeHandler(jobMock)
+ server.RegisterHandlers(handlers)
+
+ req := httptest.NewRequest(http.MethodGet, "/node/server1/uptime", nil)
+ tc.setupAuth(req)
+ rec := httptest.NewRecorder()
+
+ server.Echo.ServeHTTP(rec, req)
+
+ s.Equal(tc.wantCode, rec.Code)
+ for _, str := range tc.wantContains {
+ s.Contains(rec.Body.String(), str)
+ }
+ })
+ }
+}
+
func TestNodeUptimeGetPublicTestSuite(t *testing.T) {
suite.Run(t, new(NodeUptimeGetPublicTestSuite))
}
diff --git a/internal/authtoken/permissions.go b/internal/authtoken/permissions.go
index df3f2611..73baaf6e 100644
--- a/internal/authtoken/permissions.go
+++ b/internal/authtoken/permissions.go
@@ -35,6 +35,8 @@ const (
PermHealthRead Permission = "health:read"
PermAuditRead Permission = "audit:read"
PermCommandExecute Permission = "command:execute"
+ PermFileRead Permission = "file:read"
+ PermFileWrite Permission = "file:write"
)
// AllPermissions is the full set of known permissions.
@@ -49,6 +51,8 @@ var AllPermissions = []Permission{
PermHealthRead,
PermAuditRead,
PermCommandExecute,
+ PermFileRead,
+ PermFileWrite,
}
// DefaultRolePermissions maps built-in role names to their granted permissions.
@@ -64,6 +68,8 @@ var DefaultRolePermissions = map[string][]Permission{
PermHealthRead,
PermAuditRead,
PermCommandExecute,
+ PermFileRead,
+ PermFileWrite,
},
"write": {
PermAgentRead,
@@ -73,6 +79,8 @@ var DefaultRolePermissions = map[string][]Permission{
PermJobRead,
PermJobWrite,
PermHealthRead,
+ PermFileRead,
+ PermFileWrite,
},
"read": {
PermAgentRead,
@@ -80,6 +88,7 @@ var DefaultRolePermissions = map[string][]Permission{
PermNetworkRead,
PermJobRead,
PermHealthRead,
+ PermFileRead,
},
}
diff --git a/internal/authtoken/permissions_public_test.go b/internal/authtoken/permissions_public_test.go
index 32416d00..05068624 100644
--- a/internal/authtoken/permissions_public_test.go
+++ b/internal/authtoken/permissions_public_test.go
@@ -57,6 +57,8 @@ func (s *PermissionsPublicTestSuite) TestResolvePermissions() {
authtoken.PermJobRead,
authtoken.PermJobWrite,
authtoken.PermHealthRead,
+ authtoken.PermFileRead,
+ authtoken.PermFileWrite,
},
expectMissing: []string{
authtoken.PermAuditRead,
@@ -70,11 +72,13 @@ func (s *PermissionsPublicTestSuite) TestResolvePermissions() {
authtoken.PermNetworkRead,
authtoken.PermJobRead,
authtoken.PermHealthRead,
+ authtoken.PermFileRead,
},
expectMissing: []string{
authtoken.PermNetworkWrite,
authtoken.PermJobWrite,
authtoken.PermAuditRead,
+ authtoken.PermFileWrite,
},
},
{
diff --git a/internal/cli/nats.go b/internal/cli/nats.go
index b3d7e1c7..4514de26 100644
--- a/internal/cli/nats.go
+++ b/internal/cli/nats.go
@@ -138,3 +138,33 @@ func BuildAuditKVConfig(
Replicas: auditCfg.Replicas,
}
}
+
+// BuildObjectStoreConfig builds a jetstream.ObjectStoreConfig from objects config values.
+func BuildObjectStoreConfig(
+ namespace string,
+ objectsCfg config.NATSObjects,
+) jetstream.ObjectStoreConfig {
+ objectsBucket := job.ApplyNamespaceToInfraName(namespace, objectsCfg.Bucket)
+
+ return jetstream.ObjectStoreConfig{
+ Bucket: objectsBucket,
+ MaxBytes: objectsCfg.MaxBytes,
+ Storage: ParseJetstreamStorageType(objectsCfg.Storage),
+ Replicas: objectsCfg.Replicas,
+ }
+}
+
+// BuildFileStateKVConfig builds a jetstream.KeyValueConfig from file-state config values.
+// The file-state bucket has no TTL so deployment SHA tracking persists indefinitely.
+func BuildFileStateKVConfig(
+ namespace string,
+ fileStateCfg config.NATSFileState,
+) jetstream.KeyValueConfig {
+ fileStateBucket := job.ApplyNamespaceToInfraName(namespace, fileStateCfg.Bucket)
+
+ return jetstream.KeyValueConfig{
+ Bucket: fileStateBucket,
+ Storage: ParseJetstreamStorageType(fileStateCfg.Storage),
+ Replicas: fileStateCfg.Replicas,
+ }
+}
diff --git a/internal/cli/nats_public_test.go b/internal/cli/nats_public_test.go
index 89180b59..e1dd849d 100644
--- a/internal/cli/nats_public_test.go
+++ b/internal/cli/nats_public_test.go
@@ -399,3 +399,114 @@ func (suite *NATSPublicTestSuite) TestBuildAuditKVConfig() {
})
}
}
+
+func (suite *NATSPublicTestSuite) TestBuildObjectStoreConfig() {
+ tests := []struct {
+ name string
+ namespace string
+ objectsCfg config.NATSObjects
+ validateFn func(jetstream.ObjectStoreConfig)
+ }{
+ {
+ name: "when namespace is set",
+ namespace: "osapi",
+ objectsCfg: config.NATSObjects{
+ Bucket: "file-objects",
+ MaxBytes: 104857600,
+ Storage: "file",
+ Replicas: 1,
+ },
+ validateFn: func(cfg jetstream.ObjectStoreConfig) {
+ assert.Equal(suite.T(), "osapi-file-objects", cfg.Bucket)
+ assert.Equal(suite.T(), int64(104857600), cfg.MaxBytes)
+ assert.Equal(suite.T(), jetstream.FileStorage, cfg.Storage)
+ assert.Equal(suite.T(), 1, cfg.Replicas)
+ },
+ },
+ {
+ name: "when namespace is empty",
+ namespace: "",
+ objectsCfg: config.NATSObjects{
+ Bucket: "file-objects",
+ MaxBytes: 52428800,
+ Storage: "memory",
+ Replicas: 3,
+ },
+ validateFn: func(cfg jetstream.ObjectStoreConfig) {
+ assert.Equal(suite.T(), "file-objects", cfg.Bucket)
+ assert.Equal(suite.T(), int64(52428800), cfg.MaxBytes)
+ assert.Equal(suite.T(), jetstream.MemoryStorage, cfg.Storage)
+ assert.Equal(suite.T(), 3, cfg.Replicas)
+ },
+ },
+ {
+ name: "when max_bytes is zero",
+ namespace: "osapi",
+ objectsCfg: config.NATSObjects{
+ Bucket: "file-objects",
+ MaxBytes: 0,
+ Storage: "file",
+ Replicas: 1,
+ },
+ validateFn: func(cfg jetstream.ObjectStoreConfig) {
+ assert.Equal(suite.T(), "osapi-file-objects", cfg.Bucket)
+ assert.Equal(suite.T(), int64(0), cfg.MaxBytes)
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ got := cli.BuildObjectStoreConfig(tc.namespace, tc.objectsCfg)
+
+ tc.validateFn(got)
+ })
+ }
+}
+
+func (suite *NATSPublicTestSuite) TestBuildFileStateKVConfig() {
+ tests := []struct {
+ name string
+ namespace string
+ fileStateCfg config.NATSFileState
+ validateFn func(jetstream.KeyValueConfig)
+ }{
+ {
+ name: "when namespace is set",
+ namespace: "osapi",
+ fileStateCfg: config.NATSFileState{
+ Bucket: "file-state",
+ Storage: "file",
+ Replicas: 1,
+ },
+ validateFn: func(cfg jetstream.KeyValueConfig) {
+ assert.Equal(suite.T(), "osapi-file-state", cfg.Bucket)
+ assert.Equal(suite.T(), time.Duration(0), cfg.TTL)
+ assert.Equal(suite.T(), jetstream.FileStorage, cfg.Storage)
+ assert.Equal(suite.T(), 1, cfg.Replicas)
+ },
+ },
+ {
+ name: "when namespace is empty",
+ namespace: "",
+ fileStateCfg: config.NATSFileState{
+ Bucket: "file-state",
+ Storage: "memory",
+ Replicas: 3,
+ },
+ validateFn: func(cfg jetstream.KeyValueConfig) {
+ assert.Equal(suite.T(), "file-state", cfg.Bucket)
+ assert.Equal(suite.T(), jetstream.MemoryStorage, cfg.Storage)
+ assert.Equal(suite.T(), 3, cfg.Replicas)
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ got := cli.BuildFileStateKVConfig(tc.namespace, tc.fileStateCfg)
+
+ tc.validateFn(got)
+ })
+ }
+}
diff --git a/internal/config/types.go b/internal/config/types.go
index afbcbfb6..a4e25b08 100644
--- a/internal/config/types.go
+++ b/internal/config/types.go
@@ -86,14 +86,16 @@ type NATSServerUser struct {
// NATS configuration settings.
type NATS struct {
- Server NATSServer `mapstructure:"server,omitempty"`
- Stream NATSStream `mapstructure:"stream,omitempty"`
- KV NATSKV `mapstructure:"kv,omitempty"`
- DLQ NATSDLQ `mapstructure:"dlq,omitempty"`
- Audit NATSAudit `mapstructure:"audit,omitempty"`
- Registry NATSRegistry `mapstructure:"registry,omitempty"`
- Facts NATSFacts `mapstructure:"facts,omitempty"`
- State NATSState `mapstructure:"state,omitempty"`
+ Server NATSServer `mapstructure:"server,omitempty"`
+ Stream NATSStream `mapstructure:"stream,omitempty"`
+ KV NATSKV `mapstructure:"kv,omitempty"`
+ DLQ NATSDLQ `mapstructure:"dlq,omitempty"`
+ Audit NATSAudit `mapstructure:"audit,omitempty"`
+ Registry NATSRegistry `mapstructure:"registry,omitempty"`
+ Facts NATSFacts `mapstructure:"facts,omitempty"`
+ State NATSState `mapstructure:"state,omitempty"`
+ Objects NATSObjects `mapstructure:"objects,omitempty"`
+ FileState NATSFileState `mapstructure:"file_state,omitempty"`
}
// NATSAudit configuration for the audit log KV bucket.
@@ -132,6 +134,24 @@ type NATSState struct {
Replicas int `mapstructure:"replicas"`
}
+// NATSObjects configuration for the NATS Object Store bucket.
+type NATSObjects struct {
+ // Bucket is the Object Store bucket name for file content.
+ Bucket string `mapstructure:"bucket"`
+ MaxBytes int64 `mapstructure:"max_bytes"`
+ Storage string `mapstructure:"storage"` // "file" or "memory"
+ Replicas int `mapstructure:"replicas"`
+}
+
+// NATSFileState configuration for the file deployment state KV bucket.
+// No TTL — deployed file state persists until explicitly removed.
+type NATSFileState struct {
+ // Bucket is the KV bucket name for file deployment SHA tracking.
+ Bucket string `mapstructure:"bucket"`
+ Storage string `mapstructure:"storage"` // "file" or "memory"
+ Replicas int `mapstructure:"replicas"`
+}
+
// NATSServer configuration settings for the embedded NATS server.
type NATSServer struct {
// Host the server will bind to.
diff --git a/internal/job/client/client.go b/internal/job/client/client.go
index 8b1dfeef..276327dd 100644
--- a/internal/job/client/client.go
+++ b/internal/job/client/client.go
@@ -108,10 +108,8 @@ func (c *Client) publishAndWait(
jobID := req.JobID
createdTime := req.Timestamp.Format(time.RFC3339)
- // Build operation type from category and operation
- operationType := req.Category + "." + req.Operation
operationData := map[string]interface{}{
- "type": operationType,
+ "type": req.Operation,
"data": req.Data,
}
@@ -205,10 +203,8 @@ func (c *Client) publishAndCollect(
jobID := req.JobID
createdTime := req.Timestamp.Format(time.RFC3339)
- // Build operation type from category and operation
- operationType := req.Category + "." + req.Operation
operationData := map[string]interface{}{
- "type": operationType,
+ "type": req.Operation,
"data": req.Data,
}
diff --git a/internal/job/client/file.go b/internal/job/client/file.go
new file mode 100644
index 00000000..4a646935
--- /dev/null
+++ b/internal/job/client/file.go
@@ -0,0 +1,108 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package client
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+
+ "github.com/retr0h/osapi/internal/job"
+ "github.com/retr0h/osapi/internal/provider/file"
+)
+
+// ModifyFileDeploy deploys a file to a specific hostname.
+func (c *Client) ModifyFileDeploy(
+ ctx context.Context,
+ hostname string,
+ objectName string,
+ path string,
+ contentType string,
+ mode string,
+ owner string,
+ group string,
+ vars map[string]any,
+) (string, string, bool, error) {
+ data, _ := json.Marshal(file.DeployRequest{
+ ObjectName: objectName,
+ Path: path,
+ Mode: mode,
+ Owner: owner,
+ Group: group,
+ ContentType: contentType,
+ Vars: vars,
+ })
+
+ req := &job.Request{
+ Type: job.TypeModify,
+ Category: "file",
+ Operation: job.OperationFileDeployExecute,
+ Data: json.RawMessage(data),
+ }
+
+ subject := job.BuildSubjectFromTarget(job.JobsModifyPrefix, hostname)
+ jobID, resp, err := c.publishAndWait(ctx, subject, req)
+ if err != nil {
+ return "", "", false, fmt.Errorf("failed to publish and wait: %w", err)
+ }
+
+ if resp.Status == "failed" {
+ return "", "", false, fmt.Errorf("job failed: %s", resp.Error)
+ }
+
+ changed := resp.Changed != nil && *resp.Changed
+ return jobID, resp.Hostname, changed, nil
+}
+
+// QueryFileStatus queries the status of a deployed file on a specific hostname.
+func (c *Client) QueryFileStatus(
+ ctx context.Context,
+ hostname string,
+ path string,
+) (string, *file.StatusResult, string, error) {
+ data, _ := json.Marshal(file.StatusRequest{
+ Path: path,
+ })
+
+ req := &job.Request{
+ Type: job.TypeQuery,
+ Category: "file",
+ Operation: job.OperationFileStatusGet,
+ Data: json.RawMessage(data),
+ }
+
+ subject := job.BuildSubjectFromTarget(job.JobsQueryPrefix, hostname)
+ jobID, resp, err := c.publishAndWait(ctx, subject, req)
+ if err != nil {
+ return "", nil, "", fmt.Errorf("failed to publish and wait: %w", err)
+ }
+
+ if resp.Status == "failed" {
+ return "", nil, "", fmt.Errorf("job failed: %s", resp.Error)
+ }
+
+ var result file.StatusResult
+ if err := json.Unmarshal(resp.Data, &result); err != nil {
+ return "", nil, "", fmt.Errorf("failed to unmarshal file status response: %w", err)
+ }
+
+ return jobID, &result, resp.Hostname, nil
+}
diff --git a/internal/job/client/file_public_test.go b/internal/job/client/file_public_test.go
new file mode 100644
index 00000000..83506fb3
--- /dev/null
+++ b/internal/job/client/file_public_test.go
@@ -0,0 +1,239 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package client_test
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+ "testing"
+ "time"
+
+ "github.com/golang/mock/gomock"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/job/client"
+ jobmocks "github.com/retr0h/osapi/internal/job/mocks"
+)
+
+type FilePublicTestSuite struct {
+ suite.Suite
+
+ mockCtrl *gomock.Controller
+ mockNATSClient *jobmocks.MockNATSClient
+ mockKV *jobmocks.MockKeyValue
+ jobsClient *client.Client
+ ctx context.Context
+}
+
+func (s *FilePublicTestSuite) SetupTest() {
+ s.mockCtrl = gomock.NewController(s.T())
+ s.mockNATSClient = jobmocks.NewMockNATSClient(s.mockCtrl)
+ s.mockKV = jobmocks.NewMockKeyValue(s.mockCtrl)
+ s.ctx = context.Background()
+
+ opts := &client.Options{
+ Timeout: 30 * time.Second,
+ KVBucket: s.mockKV,
+ }
+ var err error
+ s.jobsClient, err = client.New(slog.Default(), s.mockNATSClient, opts)
+ s.Require().NoError(err)
+}
+
+func (s *FilePublicTestSuite) TearDownTest() {
+ s.mockCtrl.Finish()
+}
+
+func (s *FilePublicTestSuite) TestModifyFileDeploy() {
+ tests := []struct {
+ name string
+ hostname string
+ responseData string
+ mockError error
+ expectError bool
+ errorContains string
+ expectChanged bool
+ }{
+ {
+ name: "when deploy succeeds",
+ hostname: "server1",
+ responseData: `{
+ "status": "completed",
+ "hostname": "server1",
+ "changed": true,
+ "data": {"changed":true,"sha256":"abc123","path":"/etc/app.conf"}
+ }`,
+ expectChanged: true,
+ },
+ {
+ name: "when deploy succeeds unchanged",
+ hostname: "server1",
+ responseData: `{
+ "status": "completed",
+ "hostname": "server1",
+ "changed": false,
+ "data": {"changed":false,"sha256":"abc123","path":"/etc/app.conf"}
+ }`,
+ expectChanged: false,
+ },
+ {
+ name: "when job failed",
+ hostname: "server1",
+ responseData: `{
+ "status": "failed",
+ "error": "failed to get object: not found",
+ "data": {}
+ }`,
+ expectError: true,
+ errorContains: "job failed",
+ },
+ {
+ name: "when publish fails",
+ hostname: "server1",
+ mockError: errors.New("connection failed"),
+ expectError: true,
+ errorContains: "failed to publish and wait",
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ setupPublishAndWaitMocks(
+ s.mockCtrl,
+ s.mockKV,
+ s.mockNATSClient,
+ "jobs.modify.host.server1",
+ tt.responseData,
+ tt.mockError,
+ )
+
+ jobID, hostname, changed, err := s.jobsClient.ModifyFileDeploy(
+ s.ctx,
+ tt.hostname,
+ "app.conf",
+ "/etc/app.conf",
+ "raw",
+ "0644",
+ "root",
+ "root",
+ nil,
+ )
+
+ if tt.expectError {
+ s.Error(err)
+ if tt.errorContains != "" {
+ s.Contains(err.Error(), tt.errorContains)
+ }
+ } else {
+ s.NoError(err)
+ s.NotEmpty(jobID)
+ s.Equal("server1", hostname)
+ s.Equal(tt.expectChanged, changed)
+ }
+ })
+ }
+}
+
+func (s *FilePublicTestSuite) TestQueryFileStatus() {
+ tests := []struct {
+ name string
+ hostname string
+ responseData string
+ mockError error
+ expectError bool
+ errorContains string
+ }{
+ {
+ name: "when status succeeds",
+ hostname: "server1",
+ responseData: `{
+ "status": "completed",
+ "hostname": "server1",
+ "data": {"path":"/etc/app.conf","status":"in-sync","sha256":"abc123"}
+ }`,
+ },
+ {
+ name: "when publish fails",
+ hostname: "server1",
+ mockError: errors.New("connection failed"),
+ expectError: true,
+ errorContains: "failed to publish and wait",
+ },
+ {
+ name: "when job failed",
+ hostname: "server1",
+ responseData: `{
+ "status": "failed",
+ "error": "file not found",
+ "data": {}
+ }`,
+ expectError: true,
+ errorContains: "job failed: file not found",
+ },
+ {
+ name: "when unmarshal fails",
+ hostname: "server1",
+ responseData: `{
+ "status": "completed",
+ "data": "not valid json object"
+ }`,
+ expectError: true,
+ errorContains: "failed to unmarshal file status response",
+ },
+ }
+
+ for _, tt := range tests {
+ s.Run(tt.name, func() {
+ setupPublishAndWaitMocks(
+ s.mockCtrl,
+ s.mockKV,
+ s.mockNATSClient,
+ "jobs.query.host.server1",
+ tt.responseData,
+ tt.mockError,
+ )
+
+ jobID, result, hostname, err := s.jobsClient.QueryFileStatus(
+ s.ctx,
+ tt.hostname,
+ "/etc/app.conf",
+ )
+
+ if tt.expectError {
+ s.Error(err)
+ s.Nil(result)
+ if tt.errorContains != "" {
+ s.Contains(err.Error(), tt.errorContains)
+ }
+ } else {
+ s.NoError(err)
+ s.NotEmpty(jobID)
+ s.NotNil(result)
+ s.Equal("server1", hostname)
+ }
+ })
+ }
+}
+
+func TestFilePublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FilePublicTestSuite))
+}
diff --git a/internal/job/client/modify.go b/internal/job/client/modify.go
index 82d05d16..6751ea26 100644
--- a/internal/job/client/modify.go
+++ b/internal/job/client/modify.go
@@ -44,7 +44,7 @@ func (c *Client) ModifyNetworkDNS(
req := &job.Request{
Type: job.TypeModify,
Category: "network",
- Operation: "dns.update",
+ Operation: job.OperationNetworkDNSUpdate,
Data: json.RawMessage(data),
}
@@ -89,7 +89,7 @@ func (c *Client) ModifyNetworkDNSBroadcast(
req := &job.Request{
Type: job.TypeModify,
Category: "network",
- Operation: "dns.update",
+ Operation: job.OperationNetworkDNSUpdate,
Data: json.RawMessage(data),
}
diff --git a/internal/job/client/modify_command.go b/internal/job/client/modify_command.go
index b02cc1d9..2258aa8b 100644
--- a/internal/job/client/modify_command.go
+++ b/internal/job/client/modify_command.go
@@ -47,7 +47,7 @@ func (c *Client) ModifyCommandExec(
req := &job.Request{
Type: job.TypeModify,
Category: "command",
- Operation: "exec.execute",
+ Operation: job.OperationCommandExecExecute,
Data: json.RawMessage(data),
}
@@ -87,7 +87,7 @@ func (c *Client) ModifyCommandExecBroadcast(
req := &job.Request{
Type: job.TypeModify,
Category: "command",
- Operation: "exec.execute",
+ Operation: job.OperationCommandExecExecute,
Data: json.RawMessage(data),
}
@@ -131,7 +131,7 @@ func (c *Client) ModifyCommandShell(
req := &job.Request{
Type: job.TypeModify,
Category: "command",
- Operation: "shell.execute",
+ Operation: job.OperationCommandShellExecute,
Data: json.RawMessage(data),
}
@@ -169,7 +169,7 @@ func (c *Client) ModifyCommandShellBroadcast(
req := &job.Request{
Type: job.TypeModify,
Category: "command",
- Operation: "shell.execute",
+ Operation: job.OperationCommandShellExecute,
Data: json.RawMessage(data),
}
diff --git a/internal/job/client/query.go b/internal/job/client/query.go
index 117bcf40..c8cd9f97 100644
--- a/internal/job/client/query.go
+++ b/internal/job/client/query.go
@@ -39,7 +39,7 @@ func (c *Client) QueryNodeStatus(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "status.get",
+ Operation: job.OperationNodeStatusGet,
Data: json.RawMessage(`{}`),
}
@@ -69,7 +69,7 @@ func (c *Client) QueryNodeHostname(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "hostname.get",
+ Operation: job.OperationNodeHostnameGet,
Data: json.RawMessage(`{}`),
}
@@ -111,7 +111,7 @@ func (c *Client) QueryNetworkDNS(
req := &job.Request{
Type: job.TypeQuery,
Category: "network",
- Operation: "dns.get",
+ Operation: job.OperationNetworkDNSGet,
Data: json.RawMessage(data),
}
@@ -149,7 +149,7 @@ func (c *Client) QueryNodeStatusBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "status.get",
+ Operation: job.OperationNodeStatusGet,
Data: json.RawMessage(`{}`),
}
@@ -201,7 +201,7 @@ func (c *Client) QueryNetworkPing(
req := &job.Request{
Type: job.TypeQuery,
Category: "network",
- Operation: "ping.do",
+ Operation: job.OperationNetworkPingDo,
Data: json.RawMessage(data),
}
@@ -240,7 +240,7 @@ func (c *Client) QueryNodeHostnameBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "hostname.get",
+ Operation: job.OperationNodeHostnameGet,
Data: json.RawMessage(`{}`),
}
@@ -295,7 +295,7 @@ func (c *Client) QueryNetworkDNSBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "network",
- Operation: "dns.get",
+ Operation: job.OperationNetworkDNSGet,
Data: json.RawMessage(data),
}
@@ -345,7 +345,7 @@ func (c *Client) QueryNetworkPingBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "network",
- Operation: "ping.do",
+ Operation: job.OperationNetworkPingDo,
Data: json.RawMessage(data),
}
diff --git a/internal/job/client/query_node.go b/internal/job/client/query_node.go
index 1f7d474f..71a40687 100644
--- a/internal/job/client/query_node.go
+++ b/internal/job/client/query_node.go
@@ -39,7 +39,7 @@ func (c *Client) QueryNodeDisk(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "disk.get",
+ Operation: job.OperationNodeDiskGet,
Data: json.RawMessage(`{}`),
}
@@ -69,7 +69,7 @@ func (c *Client) QueryNodeDiskBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "disk.get",
+ Operation: job.OperationNodeDiskGet,
Data: json.RawMessage(`{}`),
}
@@ -106,7 +106,7 @@ func (c *Client) QueryNodeMemory(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "memory.get",
+ Operation: job.OperationNodeMemoryGet,
Data: json.RawMessage(`{}`),
}
@@ -136,7 +136,7 @@ func (c *Client) QueryNodeMemoryBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "memory.get",
+ Operation: job.OperationNodeMemoryGet,
Data: json.RawMessage(`{}`),
}
@@ -173,7 +173,7 @@ func (c *Client) QueryNodeLoad(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "load.get",
+ Operation: job.OperationNodeLoadGet,
Data: json.RawMessage(`{}`),
}
@@ -203,7 +203,7 @@ func (c *Client) QueryNodeLoadBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "load.get",
+ Operation: job.OperationNodeLoadGet,
Data: json.RawMessage(`{}`),
}
@@ -240,7 +240,7 @@ func (c *Client) QueryNodeOS(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "os.get",
+ Operation: job.OperationNodeOSGet,
Data: json.RawMessage(`{}`),
}
@@ -270,7 +270,7 @@ func (c *Client) QueryNodeOSBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "os.get",
+ Operation: job.OperationNodeOSGet,
Data: json.RawMessage(`{}`),
}
@@ -307,7 +307,7 @@ func (c *Client) QueryNodeUptime(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "uptime.get",
+ Operation: job.OperationNodeUptimeGet,
Data: json.RawMessage(`{}`),
}
@@ -337,7 +337,7 @@ func (c *Client) QueryNodeUptimeBroadcast(
req := &job.Request{
Type: job.TypeQuery,
Category: "node",
- Operation: "uptime.get",
+ Operation: job.OperationNodeUptimeGet,
Data: json.RawMessage(`{}`),
}
diff --git a/internal/job/client/types.go b/internal/job/client/types.go
index 6b82b1dc..b0d20aa4 100644
--- a/internal/job/client/types.go
+++ b/internal/job/client/types.go
@@ -28,6 +28,7 @@ import (
"github.com/retr0h/osapi/internal/job"
"github.com/retr0h/osapi/internal/provider/command"
+ "github.com/retr0h/osapi/internal/provider/file"
"github.com/retr0h/osapi/internal/provider/network/dns"
"github.com/retr0h/osapi/internal/provider/network/ping"
"github.com/retr0h/osapi/internal/provider/node/host"
@@ -219,6 +220,24 @@ type JobClient interface {
timeout int,
) (string, map[string]*command.Result, map[string]string, error)
+ // File operations
+ ModifyFileDeploy(
+ ctx context.Context,
+ hostname string,
+ objectName string,
+ path string,
+ contentType string,
+ mode string,
+ owner string,
+ group string,
+ vars map[string]any,
+ ) (string, string, bool, error)
+ QueryFileStatus(
+ ctx context.Context,
+ hostname string,
+ path string,
+ ) (string, *file.StatusResult, string, error)
+
// Agent discovery
ListAgents(
ctx context.Context,
diff --git a/internal/job/mocks/job_client.gen.go b/internal/job/mocks/job_client.gen.go
index 434b67ea..ea8ad1d2 100644
--- a/internal/job/mocks/job_client.gen.go
+++ b/internal/job/mocks/job_client.gen.go
@@ -14,6 +14,7 @@ import (
job "github.com/retr0h/osapi/internal/job"
client0 "github.com/retr0h/osapi/internal/job/client"
command "github.com/retr0h/osapi/internal/provider/command"
+ file "github.com/retr0h/osapi/internal/provider/file"
dns "github.com/retr0h/osapi/internal/provider/network/dns"
ping "github.com/retr0h/osapi/internal/provider/network/ping"
host "github.com/retr0h/osapi/internal/provider/node/host"
@@ -317,6 +318,23 @@ func (mr *MockJobClientMockRecorder) ModifyCommandShellBroadcast(arg0, arg1, arg
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModifyCommandShellBroadcast", reflect.TypeOf((*MockJobClient)(nil).ModifyCommandShellBroadcast), arg0, arg1, arg2, arg3, arg4)
}
+// ModifyFileDeploy mocks base method.
+func (m *MockJobClient) ModifyFileDeploy(arg0 context.Context, arg1, arg2, arg3, arg4, arg5, arg6, arg7 string, arg8 map[string]interface{}) (string, string, bool, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "ModifyFileDeploy", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8)
+ ret0, _ := ret[0].(string)
+ ret1, _ := ret[1].(string)
+ ret2, _ := ret[2].(bool)
+ ret3, _ := ret[3].(error)
+ return ret0, ret1, ret2, ret3
+}
+
+// ModifyFileDeploy indicates an expected call of ModifyFileDeploy.
+func (mr *MockJobClientMockRecorder) ModifyFileDeploy(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8 interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModifyFileDeploy", reflect.TypeOf((*MockJobClient)(nil).ModifyFileDeploy), arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8)
+}
+
// ModifyNetworkDNS mocks base method.
func (m *MockJobClient) ModifyNetworkDNS(arg0 context.Context, arg1 string, arg2, arg3 []string, arg4 string) (string, string, bool, error) {
m.ctrl.T.Helper()
@@ -385,6 +403,23 @@ func (mr *MockJobClientMockRecorder) ModifyNetworkDNSBroadcast(arg0, arg1, arg2,
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ModifyNetworkDNSBroadcast", reflect.TypeOf((*MockJobClient)(nil).ModifyNetworkDNSBroadcast), arg0, arg1, arg2, arg3, arg4)
}
+// QueryFileStatus mocks base method.
+func (m *MockJobClient) QueryFileStatus(arg0 context.Context, arg1, arg2 string) (string, *file.StatusResult, string, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "QueryFileStatus", arg0, arg1, arg2)
+ ret0, _ := ret[0].(string)
+ ret1, _ := ret[1].(*file.StatusResult)
+ ret2, _ := ret[2].(string)
+ ret3, _ := ret[3].(error)
+ return ret0, ret1, ret2, ret3
+}
+
+// QueryFileStatus indicates an expected call of QueryFileStatus.
+func (mr *MockJobClientMockRecorder) QueryFileStatus(arg0, arg1, arg2 interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "QueryFileStatus", reflect.TypeOf((*MockJobClient)(nil).QueryFileStatus), arg0, arg1, arg2)
+}
+
// QueryNetworkDNS mocks base method.
func (m *MockJobClient) QueryNetworkDNS(arg0 context.Context, arg1, arg2 string) (string, *dns.GetResult, string, error) {
m.ctrl.T.Helper()
diff --git a/internal/job/mocks/messaging.gen.go b/internal/job/mocks/messaging.gen.go
index acc5a682..f2f3f1ae 100644
--- a/internal/job/mocks/messaging.gen.go
+++ b/internal/job/mocks/messaging.gen.go
@@ -127,6 +127,21 @@ func (mr *MockNATSClientMockRecorder) CreateOrUpdateKVBucketWithConfig(arg0, arg
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateOrUpdateKVBucketWithConfig", reflect.TypeOf((*MockNATSClient)(nil).CreateOrUpdateKVBucketWithConfig), arg0, arg1)
}
+// CreateOrUpdateObjectStore mocks base method.
+func (m *MockNATSClient) CreateOrUpdateObjectStore(arg0 context.Context, arg1 jetstream.ObjectStoreConfig) (jetstream.ObjectStore, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "CreateOrUpdateObjectStore", arg0, arg1)
+ ret0, _ := ret[0].(jetstream.ObjectStore)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// CreateOrUpdateObjectStore indicates an expected call of CreateOrUpdateObjectStore.
+func (mr *MockNATSClientMockRecorder) CreateOrUpdateObjectStore(arg0, arg1 interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateOrUpdateObjectStore", reflect.TypeOf((*MockNATSClient)(nil).CreateOrUpdateObjectStore), arg0, arg1)
+}
+
// CreateOrUpdateStreamWithConfig mocks base method.
func (m *MockNATSClient) CreateOrUpdateStreamWithConfig(arg0 context.Context, arg1 jetstream.StreamConfig) error {
m.ctrl.T.Helper()
@@ -214,6 +229,21 @@ func (mr *MockNATSClientMockRecorder) KVPut(arg0, arg1, arg2 interface{}) *gomoc
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "KVPut", reflect.TypeOf((*MockNATSClient)(nil).KVPut), arg0, arg1, arg2)
}
+// ObjectStore mocks base method.
+func (m *MockNATSClient) ObjectStore(arg0 context.Context, arg1 string) (jetstream.ObjectStore, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "ObjectStore", arg0, arg1)
+ ret0, _ := ret[0].(jetstream.ObjectStore)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// ObjectStore indicates an expected call of ObjectStore.
+func (mr *MockNATSClientMockRecorder) ObjectStore(arg0, arg1 interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ObjectStore", reflect.TypeOf((*MockNATSClient)(nil).ObjectStore), arg0, arg1)
+}
+
// Publish mocks base method.
func (m *MockNATSClient) Publish(arg0 context.Context, arg1 string, arg2 []byte) error {
m.ctrl.T.Helper()
diff --git a/internal/job/types.go b/internal/job/types.go
index 7da99d2c..196bd095 100644
--- a/internal/job/types.go
+++ b/internal/job/types.go
@@ -104,6 +104,7 @@ const (
OperationNodeLoadGet = "node.load.get"
OperationNodeMemoryGet = "node.memory.get"
OperationNodeDiskGet = "node.disk.get"
+ OperationNodeOSGet = "node.os.get"
)
// Network operations - operations that can modify network configuration
@@ -125,6 +126,12 @@ const (
OperationCommandShellExecute = "command.shell.execute"
)
+// File operations — manage file deployments and status.
+const (
+ OperationFileDeployExecute = "file.deploy.execute"
+ OperationFileStatusGet = "file.status.get"
+)
+
// Operation represents an operation in the new hierarchical format
type Operation struct {
// Type specifies the type of operation using hierarchical format
@@ -249,6 +256,19 @@ type NodeShutdownData struct {
Message string `json:"message,omitempty"`
}
+// FileState represents a deployed file's state in the file-state KV.
+// Keyed by ..
+type FileState struct {
+ ObjectName string `json:"object_name"`
+ Path string `json:"path"`
+ SHA256 string `json:"sha256"`
+ Mode string `json:"mode,omitempty"`
+ Owner string `json:"owner,omitempty"`
+ Group string `json:"group,omitempty"`
+ DeployedAt string `json:"deployed_at"`
+ ContentType string `json:"content_type"`
+}
+
// NetworkInterface represents a network interface with its address.
type NetworkInterface struct {
Name string `json:"name"`
diff --git a/internal/messaging/types.go b/internal/messaging/types.go
index 3d427bde..d0dae8a7 100644
--- a/internal/messaging/types.go
+++ b/internal/messaging/types.go
@@ -75,6 +75,16 @@ type NATSClient interface {
bucket string,
) ([]string, error)
+ // Object Store operations
+ CreateOrUpdateObjectStore(
+ ctx context.Context,
+ cfg jetstream.ObjectStoreConfig,
+ ) (jetstream.ObjectStore, error)
+ ObjectStore(
+ ctx context.Context,
+ name string,
+ ) (jetstream.ObjectStore, error)
+
// Message publishing
Publish(
ctx context.Context,
diff --git a/internal/provider/command/command.go b/internal/provider/command/command.go
index 697e0b0c..15f60d44 100644
--- a/internal/provider/command/command.go
+++ b/internal/provider/command/command.go
@@ -25,10 +25,13 @@ import (
"log/slog"
"github.com/retr0h/osapi/internal/exec"
+ "github.com/retr0h/osapi/internal/provider"
)
// Executor implements the Provider interface for command execution.
type Executor struct {
+ provider.FactsAware
+
logger *slog.Logger
execManager exec.Manager
}
diff --git a/internal/provider/facts.go b/internal/provider/facts.go
new file mode 100644
index 00000000..40d76620
--- /dev/null
+++ b/internal/provider/facts.go
@@ -0,0 +1,73 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+// Package provider defines shared types for all provider implementations.
+package provider
+
+// FactsFunc returns the current agent facts for use by providers.
+// Called at execution time so providers always get the latest facts.
+type FactsFunc func() map[string]any
+
+// FactsSetter is satisfied by any provider that embeds FactsAware.
+// Used at wiring time to inject the facts function into all providers.
+type FactsSetter interface {
+ SetFactsFunc(fn FactsFunc)
+}
+
+// FactsAware provides facts access to providers via embedding.
+// Embed this in any provider struct to gain access to agent facts:
+//
+// type MyProvider struct {
+// provider.FactsAware
+// // ... other fields
+// }
+type FactsAware struct {
+ factsFn FactsFunc
+}
+
+// SetFactsFunc sets the facts getter. Called after agent initialization
+// to wire the provider to the agent's live facts.
+func (f *FactsAware) SetFactsFunc(
+ fn FactsFunc,
+) {
+ f.factsFn = fn
+}
+
+// Facts returns the current agent facts, or nil if not available.
+func (f *FactsAware) Facts() map[string]any {
+ if f.factsFn == nil {
+ return nil
+ }
+
+ return f.factsFn()
+}
+
+// WireProviderFacts sets the facts function on all providers that support it.
+// Providers that embed FactsAware automatically satisfy FactsSetter.
+func WireProviderFacts(
+ factsFn FactsFunc,
+ providers ...any,
+) {
+ for _, p := range providers {
+ if fs, ok := p.(FactsSetter); ok {
+ fs.SetFactsFunc(factsFn)
+ }
+ }
+}
diff --git a/internal/provider/facts_public_test.go b/internal/provider/facts_public_test.go
new file mode 100644
index 00000000..9850d9f4
--- /dev/null
+++ b/internal/provider/facts_public_test.go
@@ -0,0 +1,154 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package provider_test
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/provider"
+)
+
+type FactsPublicTestSuite struct {
+ suite.Suite
+}
+
+func (suite *FactsPublicTestSuite) SetupTest() {}
+
+func (suite *FactsPublicTestSuite) TearDownTest() {}
+
+func (suite *FactsPublicTestSuite) TestSetFactsFunc() {
+ tests := []struct {
+ name string
+ factsFn provider.FactsFunc
+ wantFacts map[string]any
+ }{
+ {
+ name: "when sets the facts function",
+ factsFn: func() map[string]any {
+ return map[string]any{"cpu_count": 4}
+ },
+ wantFacts: map[string]any{"cpu_count": 4},
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ fa := &provider.FactsAware{}
+ fa.SetFactsFunc(tc.factsFn)
+
+ got := fa.Facts()
+ suite.Equal(tc.wantFacts, got)
+ })
+ }
+}
+
+func (suite *FactsPublicTestSuite) TestFacts() {
+ tests := []struct {
+ name string
+ factsFn provider.FactsFunc
+ setFacts bool
+ wantFacts map[string]any
+ }{
+ {
+ name: "when factsFn is nil returns nil",
+ setFacts: false,
+ wantFacts: nil,
+ },
+ {
+ name: "when factsFn is set returns facts",
+ setFacts: true,
+ factsFn: func() map[string]any {
+ return map[string]any{"cpu_count": 4}
+ },
+ wantFacts: map[string]any{"cpu_count": 4},
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ fa := &provider.FactsAware{}
+ if tc.setFacts {
+ fa.SetFactsFunc(tc.factsFn)
+ }
+
+ got := fa.Facts()
+ suite.Equal(tc.wantFacts, got)
+ })
+ }
+}
+
+// testFactsProvider is a helper type that embeds FactsAware to test WireProviderFacts.
+type testFactsProvider struct {
+ provider.FactsAware
+}
+
+func (suite *FactsPublicTestSuite) TestWireProviderFacts() {
+ tests := []struct {
+ name string
+ providers []any
+ wantFacts map[string]any
+ checkIdx int
+ }{
+ {
+ name: "when wires facts to implementing providers",
+ providers: []any{
+ &testFactsProvider{},
+ },
+ wantFacts: map[string]any{"os": "linux"},
+ checkIdx: 0,
+ },
+ {
+ name: "when skips non-implementing providers",
+ providers: []any{
+ "not-a-provider",
+ &testFactsProvider{},
+ },
+ wantFacts: map[string]any{"os": "linux"},
+ checkIdx: 1,
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ factsFn := func() map[string]any {
+ return map[string]any{"os": "linux"}
+ }
+
+ suite.NotPanics(func() {
+ provider.WireProviderFacts(factsFn, tc.providers...)
+ })
+
+ p, ok := tc.providers[tc.checkIdx].(*testFactsProvider)
+ suite.Require().True(ok)
+
+ got := p.Facts()
+ suite.Equal(tc.wantFacts, got)
+ })
+ }
+}
+
+// In order for `go test` to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run.
+func TestFactsPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(FactsPublicTestSuite))
+}
diff --git a/internal/provider/file/deploy.go b/internal/provider/file/deploy.go
new file mode 100644
index 00000000..4b89f64a
--- /dev/null
+++ b/internal/provider/file/deploy.go
@@ -0,0 +1,160 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "log/slog"
+ "os"
+ "strconv"
+ "time"
+
+ "github.com/spf13/afero"
+
+ "github.com/retr0h/osapi/internal/job"
+)
+
+// marshalJSON is a package-level variable for testing the marshal error path.
+var marshalJSON = json.Marshal
+
+// Deploy writes file content to the target path with the specified
+// permissions. It uses SHA-256 checksums for idempotency: if the
+// content hasn't changed since the last deploy, the file is not
+// rewritten and changed is false.
+func (p *Service) Deploy(
+ ctx context.Context,
+ req DeployRequest,
+) (*DeployResult, error) {
+ content, err := p.objStore.GetBytes(ctx, req.ObjectName)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get object %q: %w", req.ObjectName, err)
+ }
+
+ if req.ContentType == "template" {
+ content, err = p.renderTemplate(content, req.Vars)
+ if err != nil {
+ return nil, fmt.Errorf("failed to render template: %w", err)
+ }
+ }
+
+ sha := computeSHA256(content)
+ stateKey := buildStateKey(p.hostname, req.Path)
+
+ entry, err := p.stateKV.Get(ctx, stateKey)
+ if err == nil {
+ var state job.FileState
+ if unmarshalErr := json.Unmarshal(entry.Value(), &state); unmarshalErr == nil {
+ if state.SHA256 == sha {
+ p.logger.Debug(
+ "file unchanged, skipping deploy",
+ slog.String("path", req.Path),
+ slog.String("sha256", sha),
+ )
+
+ return &DeployResult{
+ Changed: false,
+ SHA256: sha,
+ Path: req.Path,
+ }, nil
+ }
+ }
+ }
+
+ mode := parseFileMode(req.Mode)
+
+ if err := afero.WriteFile(p.fs, req.Path, content, mode); err != nil {
+ return nil, fmt.Errorf("failed to write file %q: %w", req.Path, err)
+ }
+
+ state := job.FileState{
+ ObjectName: req.ObjectName,
+ Path: req.Path,
+ SHA256: sha,
+ Mode: req.Mode,
+ Owner: req.Owner,
+ Group: req.Group,
+ DeployedAt: time.Now().UTC().Format(time.RFC3339),
+ ContentType: req.ContentType,
+ }
+
+ stateBytes, err := marshalJSON(state)
+ if err != nil {
+ return nil, fmt.Errorf("failed to marshal file state: %w", err)
+ }
+
+ if _, err := p.stateKV.Put(ctx, stateKey, stateBytes); err != nil {
+ return nil, fmt.Errorf("failed to update file state: %w", err)
+ }
+
+ p.logger.Info(
+ "file deployed",
+ slog.String("path", req.Path),
+ slog.String("sha256", sha),
+ slog.Bool("changed", true),
+ )
+
+ return &DeployResult{
+ Changed: true,
+ SHA256: sha,
+ Path: req.Path,
+ }, nil
+}
+
+// computeSHA256 returns the hex-encoded SHA-256 hash of the given data.
+func computeSHA256(
+ data []byte,
+) string {
+ h := sha256.Sum256(data)
+
+ return hex.EncodeToString(h[:])
+}
+
+// buildStateKey returns the KV key for a file's deploy state.
+// Format: ..
+func buildStateKey(
+ hostname string,
+ path string,
+) string {
+ pathHash := computeSHA256([]byte(path))
+
+ return hostname + "." + pathHash
+}
+
+// parseFileMode parses a string file mode (e.g., "0644") into an os.FileMode.
+// Returns 0644 as the default if the string is empty or invalid.
+func parseFileMode(
+ mode string,
+) os.FileMode {
+ if mode == "" {
+ return 0o644
+ }
+
+ parsed, err := strconv.ParseUint(mode, 8, 32)
+ if err != nil {
+ return 0o644
+ }
+
+ return os.FileMode(parsed)
+}
diff --git a/internal/provider/file/deploy_public_test.go b/internal/provider/file/deploy_public_test.go
new file mode 100644
index 00000000..4e3e40be
--- /dev/null
+++ b/internal/provider/file/deploy_public_test.go
@@ -0,0 +1,434 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "encoding/json"
+ "log/slog"
+ "os"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/spf13/afero"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/job"
+ jobmocks "github.com/retr0h/osapi/internal/job/mocks"
+ "github.com/retr0h/osapi/internal/provider/file"
+)
+
+type DeployPublicTestSuite struct {
+ suite.Suite
+
+ logger *slog.Logger
+ ctx context.Context
+}
+
+func (suite *DeployPublicTestSuite) SetupTest() {
+ suite.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+ suite.ctx = context.Background()
+}
+
+func (suite *DeployPublicTestSuite) TearDownTest() {}
+
+func (suite *DeployPublicTestSuite) TestDeploy() {
+ fileContent := []byte("server { listen 80; }")
+ existingSHA := computeTestSHA256(fileContent)
+ differentContent := []byte("server { listen 443; }")
+ differentSHA := computeTestSHA256(differentContent)
+
+ tests := []struct {
+ name string
+ setupMock func(*gomock.Controller, *stubObjectStore, *jobmocks.MockKeyValue, *afero.Fs)
+ req file.DeployRequest
+ want *file.DeployResult
+ wantErr bool
+ wantErrMsg string
+ validateFunc func(afero.Fs)
+ }{
+ {
+ name: "when deploy succeeds (new file)",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ Mode: "0644",
+ ContentType: "raw",
+ },
+ want: &file.DeployResult{
+ Changed: true,
+ SHA256: existingSHA,
+ Path: "/etc/nginx/nginx.conf",
+ },
+ validateFunc: func(appFs afero.Fs) {
+ data, err := afero.ReadFile(appFs, "/etc/nginx/nginx.conf")
+ suite.Require().NoError(err)
+ suite.Equal(fileContent, data)
+ },
+ },
+ {
+ name: "when deploy succeeds (changed content)",
+ setupMock: func(
+ ctrl *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ existingState := job.FileState{
+ SHA256: differentSHA,
+ Path: "/etc/nginx/nginx.conf",
+ }
+ stateBytes, _ := json.Marshal(existingState)
+
+ mockEntry := jobmocks.NewMockKeyValueEntry(ctrl)
+ mockEntry.EXPECT().Value().Return(stateBytes)
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ Mode: "0644",
+ ContentType: "raw",
+ },
+ want: &file.DeployResult{
+ Changed: true,
+ SHA256: existingSHA,
+ Path: "/etc/nginx/nginx.conf",
+ },
+ validateFunc: func(appFs afero.Fs) {
+ data, err := afero.ReadFile(appFs, "/etc/nginx/nginx.conf")
+ suite.Require().NoError(err)
+ suite.Equal(fileContent, data)
+ },
+ },
+ {
+ name: "when deploy skips (unchanged)",
+ setupMock: func(
+ ctrl *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ existingState := job.FileState{
+ SHA256: existingSHA,
+ Path: "/etc/nginx/nginx.conf",
+ }
+ stateBytes, _ := json.Marshal(existingState)
+
+ mockEntry := jobmocks.NewMockKeyValueEntry(ctrl)
+ mockEntry.EXPECT().Value().Return(stateBytes)
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: "raw",
+ },
+ want: &file.DeployResult{
+ Changed: false,
+ SHA256: existingSHA,
+ Path: "/etc/nginx/nginx.conf",
+ },
+ },
+ {
+ name: "when Object Store get fails",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ _ *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesErr = assert.AnError
+ },
+ req: file.DeployRequest{
+ ObjectName: "missing.conf",
+ Path: "/etc/missing.conf",
+ ContentType: "raw",
+ },
+ wantErr: true,
+ wantErrMsg: "failed to get object",
+ },
+ {
+ name: "when content type is template",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = []byte("server {{ .Vars.host }}")
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: "template",
+ Vars: map[string]any{"host": "10.0.0.1"},
+ },
+ want: &file.DeployResult{
+ Changed: true,
+ SHA256: computeTestSHA256([]byte("server 10.0.0.1")),
+ Path: "/etc/nginx/nginx.conf",
+ },
+ validateFunc: func(appFs afero.Fs) {
+ data, err := afero.ReadFile(appFs, "/etc/nginx/nginx.conf")
+ suite.Require().NoError(err)
+ suite.Equal("server 10.0.0.1", string(data))
+ },
+ },
+ {
+ name: "when file write fails",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ appFs *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ // Use a read-only filesystem to trigger write failure.
+ *appFs = afero.NewReadOnlyFs(afero.NewMemMapFs())
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: "raw",
+ },
+ wantErr: true,
+ wantErrMsg: "failed to write file",
+ },
+ {
+ name: "when state KV put fails",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(0), assert.AnError)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: "raw",
+ },
+ wantErr: true,
+ wantErrMsg: "failed to update file state",
+ },
+ {
+ name: "when state KV has corrupt data proceeds to deploy",
+ setupMock: func(
+ ctrl *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ mockEntry := jobmocks.NewMockKeyValueEntry(ctrl)
+ mockEntry.EXPECT().Value().Return([]byte("not-json"))
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: "raw",
+ },
+ want: &file.DeployResult{
+ Changed: true,
+ SHA256: existingSHA,
+ Path: "/etc/nginx/nginx.conf",
+ },
+ },
+ {
+ name: "when mode is invalid defaults to 0644",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ Mode: "not-octal",
+ ContentType: "raw",
+ },
+ want: &file.DeployResult{
+ Changed: true,
+ SHA256: existingSHA,
+ Path: "/etc/nginx/nginx.conf",
+ },
+ validateFunc: func(appFs afero.Fs) {
+ info, err := appFs.Stat("/etc/nginx/nginx.conf")
+ suite.Require().NoError(err)
+ suite.Equal(os.FileMode(0o644), info.Mode())
+ },
+ },
+ {
+ name: "when mode is set",
+ setupMock: func(
+ _ *gomock.Controller,
+ mockObj *stubObjectStore,
+ mockKV *jobmocks.MockKeyValue,
+ _ *afero.Fs,
+ ) {
+ mockObj.getBytesData = fileContent
+
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ },
+ req: file.DeployRequest{
+ ObjectName: "script.sh",
+ Path: "/usr/local/bin/script.sh",
+ Mode: "0755",
+ ContentType: "raw",
+ },
+ want: &file.DeployResult{
+ Changed: true,
+ SHA256: existingSHA,
+ Path: "/usr/local/bin/script.sh",
+ },
+ validateFunc: func(appFs afero.Fs) {
+ info, err := appFs.Stat("/usr/local/bin/script.sh")
+ suite.Require().NoError(err)
+ suite.Equal(os.FileMode(0o755), info.Mode())
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ ctrl := gomock.NewController(suite.T())
+ defer ctrl.Finish()
+
+ appFs := afero.Fs(afero.NewMemMapFs())
+ mockKV := jobmocks.NewMockKeyValue(ctrl)
+ mockObj := &stubObjectStore{}
+
+ if tc.setupMock != nil {
+ tc.setupMock(ctrl, mockObj, mockKV, &appFs)
+ }
+
+ provider := file.New(
+ suite.logger,
+ appFs,
+ mockObj,
+ mockKV,
+ "test-host",
+ )
+
+ got, err := provider.Deploy(suite.ctx, tc.req)
+
+ if tc.wantErr {
+ suite.Error(err)
+ suite.ErrorContains(err, tc.wantErrMsg)
+ suite.Nil(got)
+ } else {
+ suite.NoError(err)
+ suite.Require().NotNil(got)
+ suite.Equal(tc.want, got)
+ }
+
+ if tc.validateFunc != nil {
+ tc.validateFunc(appFs)
+ }
+ })
+ }
+}
+
+// In order for `go test` to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run.
+func TestDeployPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(DeployPublicTestSuite))
+}
diff --git a/internal/provider/file/deploy_test.go b/internal/provider/file/deploy_test.go
new file mode 100644
index 00000000..18c671da
--- /dev/null
+++ b/internal/provider/file/deploy_test.go
@@ -0,0 +1,147 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "log/slog"
+ "os"
+ "testing"
+
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/spf13/afero"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+)
+
+type DeployTestSuite struct {
+ suite.Suite
+
+ logger *slog.Logger
+ ctx context.Context
+}
+
+func (suite *DeployTestSuite) SetupTest() {
+ suite.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+ suite.ctx = context.Background()
+}
+
+func (suite *DeployTestSuite) TearDownTest() {
+ marshalJSON = json.Marshal
+}
+
+func (suite *DeployTestSuite) TestDeploy() {
+ fileContent := []byte("server { listen 80; }")
+
+ tests := []struct {
+ name string
+ setupFunc func()
+ setupStubs func() (jetstream.ObjectStore, jetstream.KeyValue)
+ req DeployRequest
+ wantErr bool
+ wantErrMsg string
+ }{
+ {
+ name: "when marshal state fails returns error",
+ setupFunc: func() {
+ marshalJSON = func(_ interface{}) ([]byte, error) {
+ return nil, fmt.Errorf("marshal failure")
+ }
+ },
+ setupStubs: func() (jetstream.ObjectStore, jetstream.KeyValue) {
+ obj := &stubObjStoreInternal{getBytesData: fileContent}
+ kv := &stubKVInternal{getErr: assert.AnError}
+ return obj, kv
+ },
+ req: DeployRequest{
+ ObjectName: "nginx.conf",
+ Path: "/etc/nginx/nginx.conf",
+ ContentType: "raw",
+ },
+ wantErr: true,
+ wantErrMsg: "failed to marshal file state",
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ if tc.setupFunc != nil {
+ tc.setupFunc()
+ }
+
+ objStore, stateKV := tc.setupStubs()
+
+ provider := New(
+ suite.logger,
+ afero.NewMemMapFs(),
+ objStore,
+ stateKV,
+ "test-host",
+ )
+
+ got, err := provider.Deploy(suite.ctx, tc.req)
+
+ if tc.wantErr {
+ suite.Error(err)
+ suite.ErrorContains(err, tc.wantErrMsg)
+ suite.Nil(got)
+ } else {
+ suite.NoError(err)
+ suite.Require().NotNil(got)
+ }
+ })
+ }
+}
+
+func TestDeployTestSuite(t *testing.T) {
+ suite.Run(t, new(DeployTestSuite))
+}
+
+// stubObjStoreInternal embeds jetstream.ObjectStore to satisfy the interface.
+// Only GetBytes is implemented; other methods panic if called.
+type stubObjStoreInternal struct {
+ jetstream.ObjectStore
+ getBytesData []byte
+}
+
+func (s *stubObjStoreInternal) GetBytes(
+ _ context.Context,
+ _ string,
+ _ ...jetstream.GetObjectOpt,
+) ([]byte, error) {
+ return s.getBytesData, nil
+}
+
+// stubKVInternal embeds jetstream.KeyValue to satisfy the interface.
+// Only Get is implemented; other methods panic if called.
+type stubKVInternal struct {
+ jetstream.KeyValue
+ getErr error
+}
+
+func (s *stubKVInternal) Get(
+ _ context.Context,
+ _ string,
+) (jetstream.KeyValueEntry, error) {
+ return nil, s.getErr
+}
diff --git a/internal/provider/file/fixtures_test.go b/internal/provider/file/fixtures_test.go
new file mode 100644
index 00000000..210b73da
--- /dev/null
+++ b/internal/provider/file/fixtures_test.go
@@ -0,0 +1,175 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "io"
+
+ "github.com/nats-io/nats.go/jetstream"
+)
+
+// computeTestSHA256 returns the hex-encoded SHA-256 hash of the given data.
+func computeTestSHA256(
+ data []byte,
+) string {
+ h := sha256.Sum256(data)
+
+ return hex.EncodeToString(h[:])
+}
+
+// stubObjectStore is a minimal test stub implementing jetstream.ObjectStore.
+// Only GetBytes is functional; all other methods panic if called.
+type stubObjectStore struct {
+ getBytesData []byte
+ getBytesErr error
+}
+
+func (s *stubObjectStore) GetBytes(
+ _ context.Context,
+ _ string,
+ _ ...jetstream.GetObjectOpt,
+) ([]byte, error) {
+ return s.getBytesData, s.getBytesErr
+}
+
+func (s *stubObjectStore) Put(
+ _ context.Context,
+ _ jetstream.ObjectMeta,
+ _ io.Reader,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: Put not implemented")
+}
+
+func (s *stubObjectStore) PutBytes(
+ _ context.Context,
+ _ string,
+ _ []byte,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: PutBytes not implemented")
+}
+
+func (s *stubObjectStore) PutString(
+ _ context.Context,
+ _ string,
+ _ string,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: PutString not implemented")
+}
+
+func (s *stubObjectStore) PutFile(
+ _ context.Context,
+ _ string,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: PutFile not implemented")
+}
+
+func (s *stubObjectStore) Get(
+ _ context.Context,
+ _ string,
+ _ ...jetstream.GetObjectOpt,
+) (jetstream.ObjectResult, error) {
+ panic("stubObjectStore: Get not implemented")
+}
+
+func (s *stubObjectStore) GetString(
+ _ context.Context,
+ _ string,
+ _ ...jetstream.GetObjectOpt,
+) (string, error) {
+ panic("stubObjectStore: GetString not implemented")
+}
+
+func (s *stubObjectStore) GetFile(
+ _ context.Context,
+ _ string,
+ _ string,
+ _ ...jetstream.GetObjectOpt,
+) error {
+ panic("stubObjectStore: GetFile not implemented")
+}
+
+func (s *stubObjectStore) GetInfo(
+ _ context.Context,
+ _ string,
+ _ ...jetstream.GetObjectInfoOpt,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: GetInfo not implemented")
+}
+
+func (s *stubObjectStore) UpdateMeta(
+ _ context.Context,
+ _ string,
+ _ jetstream.ObjectMeta,
+) error {
+ panic("stubObjectStore: UpdateMeta not implemented")
+}
+
+func (s *stubObjectStore) Delete(
+ _ context.Context,
+ _ string,
+) error {
+ panic("stubObjectStore: Delete not implemented")
+}
+
+func (s *stubObjectStore) AddLink(
+ _ context.Context,
+ _ string,
+ _ *jetstream.ObjectInfo,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: AddLink not implemented")
+}
+
+func (s *stubObjectStore) AddBucketLink(
+ _ context.Context,
+ _ string,
+ _ jetstream.ObjectStore,
+) (*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: AddBucketLink not implemented")
+}
+
+func (s *stubObjectStore) Seal(
+ _ context.Context,
+) error {
+ panic("stubObjectStore: Seal not implemented")
+}
+
+func (s *stubObjectStore) Watch(
+ _ context.Context,
+ _ ...jetstream.WatchOpt,
+) (jetstream.ObjectWatcher, error) {
+ panic("stubObjectStore: Watch not implemented")
+}
+
+func (s *stubObjectStore) List(
+ _ context.Context,
+ _ ...jetstream.ListObjectsOpt,
+) ([]*jetstream.ObjectInfo, error) {
+ panic("stubObjectStore: List not implemented")
+}
+
+func (s *stubObjectStore) Status(
+ _ context.Context,
+) (jetstream.ObjectStoreStatus, error) {
+ panic("stubObjectStore: Status not implemented")
+}
diff --git a/internal/provider/file/mocks/generate.go b/internal/provider/file/mocks/generate.go
new file mode 100644
index 00000000..fb0a0384
--- /dev/null
+++ b/internal/provider/file/mocks/generate.go
@@ -0,0 +1,24 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+// Package mocks provides mock implementations for testing.
+package mocks
+
+//go:generate go tool github.com/golang/mock/mockgen -source=../types.go -destination=types.gen.go -package=mocks
diff --git a/internal/provider/file/mocks/mocks.go b/internal/provider/file/mocks/mocks.go
new file mode 100644
index 00000000..068f8788
--- /dev/null
+++ b/internal/provider/file/mocks/mocks.go
@@ -0,0 +1,51 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package mocks
+
+import (
+ "github.com/golang/mock/gomock"
+
+ "github.com/retr0h/osapi/internal/provider/file"
+)
+
+// NewPlainMockProvider creates a Mock without defaults.
+func NewPlainMockProvider(ctrl *gomock.Controller) *MockProvider {
+ return NewMockProvider(ctrl)
+}
+
+// NewDefaultMockProvider creates a Mock with defaults.
+func NewDefaultMockProvider(ctrl *gomock.Controller) *MockProvider {
+ mock := NewPlainMockProvider(ctrl)
+
+ mock.EXPECT().Deploy(gomock.Any(), gomock.Any()).Return(&file.DeployResult{
+ Changed: true,
+ SHA256: "abc123def456",
+ Path: "/etc/mock/file.conf",
+ }, nil).AnyTimes()
+
+ mock.EXPECT().Status(gomock.Any(), gomock.Any()).Return(&file.StatusResult{
+ Path: "/etc/mock/file.conf",
+ Status: "in-sync",
+ SHA256: "abc123def456",
+ }, nil).AnyTimes()
+
+ return mock
+}
diff --git a/internal/provider/file/mocks/types.gen.go b/internal/provider/file/mocks/types.gen.go
new file mode 100644
index 00000000..6361529e
--- /dev/null
+++ b/internal/provider/file/mocks/types.gen.go
@@ -0,0 +1,66 @@
+// Code generated by MockGen. DO NOT EDIT.
+// Source: ../types.go
+
+// Package mocks is a generated GoMock package.
+package mocks
+
+import (
+ context "context"
+ reflect "reflect"
+
+ gomock "github.com/golang/mock/gomock"
+ file "github.com/retr0h/osapi/internal/provider/file"
+)
+
+// MockProvider is a mock of Provider interface.
+type MockProvider struct {
+ ctrl *gomock.Controller
+ recorder *MockProviderMockRecorder
+}
+
+// MockProviderMockRecorder is the mock recorder for MockProvider.
+type MockProviderMockRecorder struct {
+ mock *MockProvider
+}
+
+// NewMockProvider creates a new mock instance.
+func NewMockProvider(ctrl *gomock.Controller) *MockProvider {
+ mock := &MockProvider{ctrl: ctrl}
+ mock.recorder = &MockProviderMockRecorder{mock}
+ return mock
+}
+
+// EXPECT returns an object that allows the caller to indicate expected use.
+func (m *MockProvider) EXPECT() *MockProviderMockRecorder {
+ return m.recorder
+}
+
+// Deploy mocks base method.
+func (m *MockProvider) Deploy(ctx context.Context, req file.DeployRequest) (*file.DeployResult, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Deploy", ctx, req)
+ ret0, _ := ret[0].(*file.DeployResult)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// Deploy indicates an expected call of Deploy.
+func (mr *MockProviderMockRecorder) Deploy(ctx, req interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Deploy", reflect.TypeOf((*MockProvider)(nil).Deploy), ctx, req)
+}
+
+// Status mocks base method.
+func (m *MockProvider) Status(ctx context.Context, req file.StatusRequest) (*file.StatusResult, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Status", ctx, req)
+ ret0, _ := ret[0].(*file.StatusResult)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// Status indicates an expected call of Status.
+func (mr *MockProviderMockRecorder) Status(ctx, req interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Status", reflect.TypeOf((*MockProvider)(nil).Status), ctx, req)
+}
diff --git a/internal/provider/file/provider.go b/internal/provider/file/provider.go
new file mode 100644
index 00000000..4d647f64
--- /dev/null
+++ b/internal/provider/file/provider.go
@@ -0,0 +1,66 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+// Package file implements file deploy and status operations using NATS
+// Object Store for content and KV for state tracking.
+package file
+
+import (
+ "log/slog"
+
+ "github.com/nats-io/nats.go/jetstream"
+ "github.com/spf13/afero"
+
+ "github.com/retr0h/osapi/internal/provider"
+)
+
+// Compile-time interface check.
+var _ Provider = (*Service)(nil)
+
+// Service implements the Provider interface for file deploy and status
+// operations using NATS Object Store for content and KV for state tracking.
+type Service struct {
+ provider.FactsAware
+
+ logger *slog.Logger
+ fs afero.Fs
+ objStore jetstream.ObjectStore
+ stateKV jetstream.KeyValue
+ hostname string
+}
+
+// New creates a new Service with the given dependencies.
+// Facts are not available at construction time; call SetFactsFunc after
+// the agent is initialized to wire template rendering to live facts.
+func New(
+ logger *slog.Logger,
+ fs afero.Fs,
+ objStore jetstream.ObjectStore,
+ stateKV jetstream.KeyValue,
+ hostname string,
+) *Service {
+ return &Service{
+ logger: logger,
+ fs: fs,
+ objStore: objStore,
+ stateKV: stateKV,
+ hostname: hostname,
+ }
+}
diff --git a/internal/provider/file/status.go b/internal/provider/file/status.go
new file mode 100644
index 00000000..e4af6c8e
--- /dev/null
+++ b/internal/provider/file/status.go
@@ -0,0 +1,77 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+
+ "github.com/spf13/afero"
+
+ "github.com/retr0h/osapi/internal/job"
+)
+
+// Status checks the current state of a deployed file against its expected
+// SHA-256 from the file-state KV. Returns "in-sync" if the file matches,
+// "drifted" if it differs, or "missing" if the file or state entry is absent.
+func (p *Service) Status(
+ ctx context.Context,
+ req StatusRequest,
+) (*StatusResult, error) {
+ stateKey := buildStateKey(p.hostname, req.Path)
+
+ entry, err := p.stateKV.Get(ctx, stateKey)
+ if err != nil {
+ return &StatusResult{
+ Path: req.Path,
+ Status: "missing",
+ }, nil
+ }
+
+ var state job.FileState
+ if err := json.Unmarshal(entry.Value(), &state); err != nil {
+ return nil, fmt.Errorf("failed to parse file state: %w", err)
+ }
+
+ data, err := afero.ReadFile(p.fs, req.Path)
+ if err != nil {
+ return &StatusResult{
+ Path: req.Path,
+ Status: "missing",
+ }, nil
+ }
+
+ localSHA := computeSHA256(data)
+ if localSHA == state.SHA256 {
+ return &StatusResult{
+ Path: req.Path,
+ Status: "in-sync",
+ SHA256: localSHA,
+ }, nil
+ }
+
+ return &StatusResult{
+ Path: req.Path,
+ Status: "drifted",
+ SHA256: localSHA,
+ }, nil
+}
diff --git a/internal/provider/file/status_public_test.go b/internal/provider/file/status_public_test.go
new file mode 100644
index 00000000..35245b5c
--- /dev/null
+++ b/internal/provider/file/status_public_test.go
@@ -0,0 +1,223 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "encoding/json"
+ "log/slog"
+ "os"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/spf13/afero"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ "github.com/retr0h/osapi/internal/job"
+ jobmocks "github.com/retr0h/osapi/internal/job/mocks"
+ "github.com/retr0h/osapi/internal/provider/file"
+)
+
+type StatusPublicTestSuite struct {
+ suite.Suite
+
+ ctrl *gomock.Controller
+ logger *slog.Logger
+ ctx context.Context
+ appFs afero.Fs
+ mockKV *jobmocks.MockKeyValue
+}
+
+func (suite *StatusPublicTestSuite) SetupTest() {
+ suite.ctrl = gomock.NewController(suite.T())
+ suite.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+ suite.ctx = context.Background()
+ suite.appFs = afero.NewMemMapFs()
+ suite.mockKV = jobmocks.NewMockKeyValue(suite.ctrl)
+}
+
+func (suite *StatusPublicTestSuite) TearDownTest() {
+ suite.ctrl.Finish()
+}
+
+func (suite *StatusPublicTestSuite) TestStatus() {
+ fileContent := []byte("server { listen 80; }")
+ fileSHA := computeTestSHA256(fileContent)
+ driftedContent := []byte("server { listen 443; }")
+ driftedSHA := computeTestSHA256(driftedContent)
+
+ tests := []struct {
+ name string
+ setupMock func()
+ req file.StatusRequest
+ want *file.StatusResult
+ wantErr bool
+ wantErrMsg string
+ }{
+ {
+ name: "when file in sync",
+ setupMock: func() {
+ _ = afero.WriteFile(suite.appFs, "/etc/nginx/nginx.conf", fileContent, 0o644)
+
+ existingState := job.FileState{
+ SHA256: fileSHA,
+ Path: "/etc/nginx/nginx.conf",
+ }
+ stateBytes, _ := json.Marshal(existingState)
+
+ mockEntry := jobmocks.NewMockKeyValueEntry(suite.ctrl)
+ mockEntry.EXPECT().Value().Return(stateBytes)
+
+ suite.mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+ },
+ req: file.StatusRequest{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ want: &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "in-sync",
+ SHA256: fileSHA,
+ },
+ },
+ {
+ name: "when file drifted",
+ setupMock: func() {
+ _ = afero.WriteFile(suite.appFs, "/etc/nginx/nginx.conf", driftedContent, 0o644)
+
+ existingState := job.FileState{
+ SHA256: fileSHA,
+ Path: "/etc/nginx/nginx.conf",
+ }
+ stateBytes, _ := json.Marshal(existingState)
+
+ mockEntry := jobmocks.NewMockKeyValueEntry(suite.ctrl)
+ mockEntry.EXPECT().Value().Return(stateBytes)
+
+ suite.mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+ },
+ req: file.StatusRequest{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ want: &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "drifted",
+ SHA256: driftedSHA,
+ },
+ },
+ {
+ name: "when file missing on disk",
+ setupMock: func() {
+ existingState := job.FileState{
+ SHA256: fileSHA,
+ Path: "/etc/nginx/nginx.conf",
+ }
+ stateBytes, _ := json.Marshal(existingState)
+
+ mockEntry := jobmocks.NewMockKeyValueEntry(suite.ctrl)
+ mockEntry.EXPECT().Value().Return(stateBytes)
+
+ suite.mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+ },
+ req: file.StatusRequest{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ want: &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "missing",
+ },
+ },
+ {
+ name: "when state entry has invalid JSON",
+ setupMock: func() {
+ mockEntry := jobmocks.NewMockKeyValueEntry(suite.ctrl)
+ mockEntry.EXPECT().Value().Return([]byte("not-json"))
+
+ suite.mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(mockEntry, nil)
+ },
+ req: file.StatusRequest{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ wantErr: true,
+ wantErrMsg: "failed to parse file state",
+ },
+ {
+ name: "when no state entry",
+ setupMock: func() {
+ suite.mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+ },
+ req: file.StatusRequest{
+ Path: "/etc/nginx/nginx.conf",
+ },
+ want: &file.StatusResult{
+ Path: "/etc/nginx/nginx.conf",
+ Status: "missing",
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ // Reset filesystem for each test case.
+ suite.appFs = afero.NewMemMapFs()
+
+ if tc.setupMock != nil {
+ tc.setupMock()
+ }
+
+ provider := file.New(
+ suite.logger,
+ suite.appFs,
+ &stubObjectStore{},
+ suite.mockKV,
+ "test-host",
+ )
+
+ got, err := provider.Status(suite.ctx, tc.req)
+
+ if tc.wantErr {
+ suite.Error(err)
+ suite.ErrorContains(err, tc.wantErrMsg)
+ suite.Nil(got)
+ } else {
+ suite.NoError(err)
+ suite.Require().NotNil(got)
+ suite.Equal(tc.want, got)
+ }
+ })
+ }
+}
+
+// In order for `go test` to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run.
+func TestStatusPublicTestSuite(t *testing.T) {
+ suite.Run(t, new(StatusPublicTestSuite))
+}
diff --git a/internal/provider/file/template.go b/internal/provider/file/template.go
new file mode 100644
index 00000000..df64af88
--- /dev/null
+++ b/internal/provider/file/template.go
@@ -0,0 +1,62 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import (
+ "bytes"
+ "fmt"
+ "text/template"
+)
+
+// TemplateContext is the data available to Go templates during rendering.
+type TemplateContext struct {
+ // Facts contains agent facts (architecture, kernel, etc.).
+ Facts map[string]any
+ // Vars contains user-supplied template variables.
+ Vars map[string]any
+ // Hostname is the agent's hostname.
+ Hostname string
+}
+
+// renderTemplate parses rawTemplate as a Go text/template and executes it
+// with the provider's cached facts, the supplied vars, and the hostname.
+func (p *Service) renderTemplate(
+ rawTemplate []byte,
+ vars map[string]any,
+) ([]byte, error) {
+ tmpl, err := template.New("file").Parse(string(rawTemplate))
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse template: %w", err)
+ }
+
+ ctx := TemplateContext{
+ Facts: p.Facts(),
+ Vars: vars,
+ Hostname: p.hostname,
+ }
+
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, ctx); err != nil {
+ return nil, fmt.Errorf("failed to execute template: %w", err)
+ }
+
+ return buf.Bytes(), nil
+}
diff --git a/internal/provider/file/template_public_test.go b/internal/provider/file/template_public_test.go
new file mode 100644
index 00000000..d7096742
--- /dev/null
+++ b/internal/provider/file/template_public_test.go
@@ -0,0 +1,234 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file_test
+
+import (
+ "context"
+ "log/slog"
+ "os"
+ "testing"
+
+ "github.com/golang/mock/gomock"
+ "github.com/spf13/afero"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/suite"
+
+ jobmocks "github.com/retr0h/osapi/internal/job/mocks"
+ "github.com/retr0h/osapi/internal/provider"
+ "github.com/retr0h/osapi/internal/provider/file"
+)
+
+type TemplatePublicTestSuite struct {
+ suite.Suite
+
+ logger *slog.Logger
+ ctx context.Context
+}
+
+func (suite *TemplatePublicTestSuite) SetupTest() {
+ suite.logger = slog.New(slog.NewTextHandler(os.Stdout, nil))
+ suite.ctx = context.Background()
+}
+
+func (suite *TemplatePublicTestSuite) TearDownTest() {}
+
+func (suite *TemplatePublicTestSuite) TestDeployTemplate() {
+ tests := []struct {
+ name string
+ template string
+ vars map[string]any
+ factsFn provider.FactsFunc
+ hostname string
+ wantContent string
+ wantErr bool
+ wantErrMsg string
+ wantChanged bool
+ }{
+ {
+ name: "when simple var substitution",
+ template: "server {{ .Vars.host }}",
+ vars: map[string]any{"host": "10.0.0.1"},
+ hostname: "web-01",
+ wantContent: "server 10.0.0.1",
+ wantChanged: true,
+ },
+ {
+ name: "when hostname",
+ template: "# {{ .Hostname }}",
+ hostname: "web-01",
+ wantContent: "# web-01",
+ wantChanged: true,
+ },
+ {
+ name: "when conditional with vars",
+ template: `{{ if eq .Vars.env "prod" }}production{{ else }}dev{{ end }}`,
+ vars: map[string]any{"env": "prod"},
+ hostname: "web-01",
+ wantContent: "production",
+ wantChanged: true,
+ },
+ {
+ name: "when facts available",
+ template: `arch: {{ index .Facts "architecture" }}`,
+ factsFn: func() map[string]any {
+ return map[string]any{"architecture": "amd64"}
+ },
+ hostname: "web-01",
+ wantContent: "arch: amd64",
+ wantChanged: true,
+ },
+ {
+ name: "when nil facts",
+ template: "{{ .Hostname }}",
+ factsFn: nil,
+ hostname: "web-01",
+ wantContent: "web-01",
+ wantChanged: true,
+ },
+ {
+ name: "when nil vars",
+ template: "{{ .Hostname }}",
+ vars: nil,
+ hostname: "web-01",
+ wantContent: "web-01",
+ wantChanged: true,
+ },
+ {
+ name: "when template execution fails",
+ template: "{{ call .Hostname }}",
+ hostname: "web-01",
+ wantErr: true,
+ wantErrMsg: "failed to render template",
+ },
+ {
+ name: "when invalid template syntax",
+ template: "{{ .Invalid",
+ hostname: "web-01",
+ wantErr: true,
+ wantErrMsg: "failed to render template",
+ },
+ {
+ name: "when missing var key renders no value",
+ template: "val={{ .Vars.missing }}",
+ vars: map[string]any{},
+ hostname: "web-01",
+ wantContent: "val=",
+ wantChanged: true,
+ },
+ {
+ name: "when multiple vars",
+ template: "{{ .Vars.host }}:{{ .Vars.port }}",
+ vars: map[string]any{"host": "10.0.0.1", "port": "8080"},
+ hostname: "web-01",
+ wantContent: "10.0.0.1:8080",
+ wantChanged: true,
+ },
+ {
+ name: "when facts and vars combined",
+ template: `host={{ .Hostname }} arch={{ index .Facts "architecture" }} env={{ .Vars.env }}`,
+ factsFn: func() map[string]any {
+ return map[string]any{"architecture": "arm64"}
+ },
+ vars: map[string]any{"env": "staging"},
+ hostname: "web-02",
+ wantContent: "host=web-02 arch=arm64 env=staging",
+ wantChanged: true,
+ },
+ {
+ name: "when conditional false branch",
+ template: `{{ if eq .Vars.env "prod" }}production{{ else }}dev{{ end }}`,
+ vars: map[string]any{"env": "dev"},
+ hostname: "web-01",
+ wantContent: "dev",
+ wantChanged: true,
+ },
+ {
+ name: "when range over vars slice",
+ template: `{{ range .Vars.servers }}{{ . }} {{ end }}`,
+ vars: map[string]any{"servers": []any{"a", "b", "c"}},
+ hostname: "web-01",
+ wantContent: "a b c ",
+ wantChanged: true,
+ },
+ }
+
+ for _, tc := range tests {
+ suite.Run(tc.name, func() {
+ ctrl := gomock.NewController(suite.T())
+ defer ctrl.Finish()
+
+ appFs := afero.Fs(afero.NewMemMapFs())
+ mockKV := jobmocks.NewMockKeyValue(ctrl)
+ mockObj := &stubObjectStore{
+ getBytesData: []byte(tc.template),
+ }
+
+ if !tc.wantErr {
+ mockKV.EXPECT().
+ Get(gomock.Any(), gomock.Any()).
+ Return(nil, assert.AnError)
+
+ mockKV.EXPECT().
+ Put(gomock.Any(), gomock.Any(), gomock.Any()).
+ Return(uint64(1), nil)
+ }
+
+ p := file.New(
+ suite.logger,
+ appFs,
+ mockObj,
+ mockKV,
+ tc.hostname,
+ )
+ if tc.factsFn != nil {
+ p.SetFactsFunc(tc.factsFn)
+ }
+
+ got, err := p.Deploy(suite.ctx, file.DeployRequest{
+ ObjectName: "test.conf",
+ Path: "/etc/test.conf",
+ ContentType: "template",
+ Vars: tc.vars,
+ })
+
+ if tc.wantErr {
+ suite.Error(err)
+ suite.ErrorContains(err, tc.wantErrMsg)
+ suite.Nil(got)
+ } else {
+ suite.NoError(err)
+ suite.Require().NotNil(got)
+ suite.Equal(tc.wantChanged, got.Changed)
+ suite.Equal("/etc/test.conf", got.Path)
+
+ data, readErr := afero.ReadFile(appFs, "/etc/test.conf")
+ suite.Require().NoError(readErr)
+ suite.Equal(tc.wantContent, string(data))
+ }
+ })
+ }
+}
+
+// In order for `go test` to run this suite, we need to create
+// a normal test function and pass our suite to suite.Run.
+func TestTemplatePublicTestSuite(t *testing.T) {
+ suite.Run(t, new(TemplatePublicTestSuite))
+}
diff --git a/internal/provider/file/types.go b/internal/provider/file/types.go
new file mode 100644
index 00000000..67c58509
--- /dev/null
+++ b/internal/provider/file/types.go
@@ -0,0 +1,83 @@
+// Copyright (c) 2026 John Dewey
+
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+// DEALINGS IN THE SOFTWARE.
+
+package file
+
+import "context"
+
+// DeployRequest contains parameters for deploying a file to disk.
+type DeployRequest struct {
+ // ObjectName is the name of the object in the NATS object store.
+ ObjectName string `json:"object_name"`
+ // Path is the destination path on the target filesystem.
+ Path string `json:"path"`
+ // Mode is the file permission mode (e.g., "0644").
+ Mode string `json:"mode,omitempty"`
+ // Owner is the file owner user.
+ Owner string `json:"owner,omitempty"`
+ // Group is the file owner group.
+ Group string `json:"group,omitempty"`
+ // ContentType specifies whether the content is "raw" or "template".
+ ContentType string `json:"content_type"`
+ // Vars contains template variables when ContentType is "template".
+ Vars map[string]any `json:"vars,omitempty"`
+}
+
+// DeployResult contains the result of a file deploy operation.
+type DeployResult struct {
+ // Changed indicates whether the file was written (false if SHA matched).
+ Changed bool `json:"changed"`
+ // SHA256 is the SHA-256 hash of the deployed file content.
+ SHA256 string `json:"sha256"`
+ // Path is the destination path where the file was deployed.
+ Path string `json:"path"`
+}
+
+// StatusRequest contains parameters for checking file status.
+type StatusRequest struct {
+ // Path is the filesystem path to check.
+ Path string `json:"path"`
+}
+
+// StatusResult contains the result of a file status check.
+type StatusResult struct {
+ // Path is the filesystem path that was checked.
+ Path string `json:"path"`
+ // Status indicates the file state: "in-sync", "drifted", or "missing".
+ Status string `json:"status"`
+ // SHA256 is the current SHA-256 hash of the file on disk, if present.
+ SHA256 string `json:"sha256,omitempty"`
+}
+
+// Provider defines the interface for file operations.
+type Provider interface {
+ // Deploy writes file content to the target path with the specified
+ // permissions. Returns whether the file was changed and its SHA-256.
+ Deploy(
+ ctx context.Context,
+ req DeployRequest,
+ ) (*DeployResult, error)
+ // Status checks the current state of a deployed file against its
+ // expected SHA-256 from the file-state KV.
+ Status(
+ ctx context.Context,
+ req StatusRequest,
+ ) (*StatusResult, error)
+}
diff --git a/internal/provider/network/dns/darwin.go b/internal/provider/network/dns/darwin.go
index bcd87a16..b0406dff 100644
--- a/internal/provider/network/dns/darwin.go
+++ b/internal/provider/network/dns/darwin.go
@@ -24,10 +24,13 @@ import (
"log/slog"
"github.com/retr0h/osapi/internal/exec"
+ "github.com/retr0h/osapi/internal/provider"
)
// Darwin implements the DNS interface for Darwin (macOS).
type Darwin struct {
+ provider.FactsAware
+
logger *slog.Logger
execManager exec.Manager
}
diff --git a/internal/provider/network/dns/linux.go b/internal/provider/network/dns/linux.go
index 7d995ef0..8d7ef4a5 100644
--- a/internal/provider/network/dns/linux.go
+++ b/internal/provider/network/dns/linux.go
@@ -21,8 +21,14 @@
// Package dns provides DNS configuration management.
package dns
+import (
+ "github.com/retr0h/osapi/internal/provider"
+)
+
// Linux implements the DNS interface for Linux.
-type Linux struct{}
+type Linux struct {
+ provider.FactsAware
+}
// NewLinuxProvider factory to create a new Linux instance.
func NewLinuxProvider() *Linux {
diff --git a/internal/provider/network/dns/ubuntu.go b/internal/provider/network/dns/ubuntu.go
index 19bba70a..d5bc583d 100644
--- a/internal/provider/network/dns/ubuntu.go
+++ b/internal/provider/network/dns/ubuntu.go
@@ -24,10 +24,13 @@ import (
"log/slog"
"github.com/retr0h/osapi/internal/exec"
+ "github.com/retr0h/osapi/internal/provider"
)
// Ubuntu implements the DNS interface for Ubuntu.
type Ubuntu struct {
+ provider.FactsAware
+
logger *slog.Logger
execManager exec.Manager
}
diff --git a/internal/provider/network/netinfo/netinfo.go b/internal/provider/network/netinfo/netinfo.go
index 6c6bd733..82dffeee 100644
--- a/internal/provider/network/netinfo/netinfo.go
+++ b/internal/provider/network/netinfo/netinfo.go
@@ -23,12 +23,16 @@ package netinfo
import (
"net"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Netinfo provides cross-platform network interface information.
// Platform-specific types (Linux, Darwin) embed this for shared
// interface enumeration and add their own route implementations.
type Netinfo struct {
+ provider.FactsAware
+
InterfacesFn func() ([]net.Interface, error)
AddrsFn func(iface net.Interface) ([]net.Addr, error)
}
diff --git a/internal/provider/network/ping/darwin.go b/internal/provider/network/ping/darwin.go
index efbbb938..c9fc1cce 100644
--- a/internal/provider/network/ping/darwin.go
+++ b/internal/provider/network/ping/darwin.go
@@ -22,10 +22,14 @@ package ping
import (
probing "github.com/prometheus-community/pro-bing"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Darwin implements the Ping interface for Darwin (macOS).
type Darwin struct {
+ provider.FactsAware
+
NewPingerFn func(address string) (Pinger, error)
}
diff --git a/internal/provider/network/ping/linux.go b/internal/provider/network/ping/linux.go
index afa986c7..c407230d 100644
--- a/internal/provider/network/ping/linux.go
+++ b/internal/provider/network/ping/linux.go
@@ -21,8 +21,14 @@
// Package ping provides network ping functionality.
package ping
+import (
+ "github.com/retr0h/osapi/internal/provider"
+)
+
// Linux implements the Ping interface for Linux.
-type Linux struct{}
+type Linux struct {
+ provider.FactsAware
+}
// NewLinuxProvider factory to create a new Linux instance.
func NewLinuxProvider() *Linux {
diff --git a/internal/provider/network/ping/ubuntu.go b/internal/provider/network/ping/ubuntu.go
index 52c43acc..4b4cb9b3 100644
--- a/internal/provider/network/ping/ubuntu.go
+++ b/internal/provider/network/ping/ubuntu.go
@@ -22,10 +22,14 @@ package ping
import (
probing "github.com/prometheus-community/pro-bing"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Ubuntu implements the Ping interface for Ubuntu.
type Ubuntu struct {
+ provider.FactsAware
+
NewPingerFn func(address string) (Pinger, error)
}
diff --git a/internal/provider/node/disk/darwin.go b/internal/provider/node/disk/darwin.go
index 3951b0fe..4af07969 100644
--- a/internal/provider/node/disk/darwin.go
+++ b/internal/provider/node/disk/darwin.go
@@ -24,10 +24,14 @@ import (
"log/slog"
"github.com/shirou/gopsutil/v4/disk"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Darwin implements the Disk interface for Darwin (macOS).
type Darwin struct {
+ provider.FactsAware
+
logger *slog.Logger
PartitionsFn func(all bool) ([]disk.PartitionStat, error)
UsageFn func(path string) (*disk.UsageStat, error)
diff --git a/internal/provider/node/disk/linux.go b/internal/provider/node/disk/linux.go
index b39b21d5..1aa3bad3 100644
--- a/internal/provider/node/disk/linux.go
+++ b/internal/provider/node/disk/linux.go
@@ -20,8 +20,14 @@
package disk
+import (
+ "github.com/retr0h/osapi/internal/provider"
+)
+
// Linux implements the Disk interface for Linux.
-type Linux struct{}
+type Linux struct {
+ provider.FactsAware
+}
// NewLinuxProvider factory to create a new Linux instance.
func NewLinuxProvider() *Linux {
diff --git a/internal/provider/node/disk/ubuntu.go b/internal/provider/node/disk/ubuntu.go
index 4ed037a6..024f7c4e 100644
--- a/internal/provider/node/disk/ubuntu.go
+++ b/internal/provider/node/disk/ubuntu.go
@@ -24,10 +24,14 @@ import (
"log/slog"
"github.com/shirou/gopsutil/v4/disk"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Ubuntu implements the Mem interface for Ubuntu.
type Ubuntu struct {
+ provider.FactsAware
+
logger *slog.Logger
PartitionsFn func(all bool) ([]disk.PartitionStat, error)
UsageFn func(path string) (*disk.UsageStat, error)
diff --git a/internal/provider/node/host/darwin.go b/internal/provider/node/host/darwin.go
index 4ea944d0..bc3f5712 100644
--- a/internal/provider/node/host/darwin.go
+++ b/internal/provider/node/host/darwin.go
@@ -26,10 +26,14 @@ import (
"runtime"
"github.com/shirou/gopsutil/v4/host"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Darwin implements the Host interface for Darwin (macOS).
type Darwin struct {
+ provider.FactsAware
+
InfoFn func() (*host.InfoStat, error)
HostnameFn func() (string, error)
NumCPUFn func() int
diff --git a/internal/provider/node/host/linux.go b/internal/provider/node/host/linux.go
index c5bf45eb..d372a5b2 100644
--- a/internal/provider/node/host/linux.go
+++ b/internal/provider/node/host/linux.go
@@ -21,8 +21,14 @@
// Package host provides system host information.
package host
+import (
+ "github.com/retr0h/osapi/internal/provider"
+)
+
// Linux implements the Load interface for Linux.
-type Linux struct{}
+type Linux struct {
+ provider.FactsAware
+}
// NewLinuxProvider factory to create a new Linux instance.
func NewLinuxProvider() *Linux {
diff --git a/internal/provider/node/host/ubuntu.go b/internal/provider/node/host/ubuntu.go
index bee3b299..fa64b7a8 100644
--- a/internal/provider/node/host/ubuntu.go
+++ b/internal/provider/node/host/ubuntu.go
@@ -26,10 +26,14 @@ import (
"runtime"
"github.com/shirou/gopsutil/v4/host"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Ubuntu implements the Mem interface for Ubuntu.
type Ubuntu struct {
+ provider.FactsAware
+
InfoFn func() (*host.InfoStat, error)
HostnameFn func() (string, error)
NumCPUFn func() int
diff --git a/internal/provider/node/load/darwin.go b/internal/provider/node/load/darwin.go
index 77ac476e..ad72b5d7 100644
--- a/internal/provider/node/load/darwin.go
+++ b/internal/provider/node/load/darwin.go
@@ -22,10 +22,14 @@ package load
import (
"github.com/shirou/gopsutil/v4/load"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Darwin implements the Load interface for Darwin (macOS).
type Darwin struct {
+ provider.FactsAware
+
AvgFn func() (*load.AvgStat, error)
}
diff --git a/internal/provider/node/load/linux.go b/internal/provider/node/load/linux.go
index dea6c42f..1a012faa 100644
--- a/internal/provider/node/load/linux.go
+++ b/internal/provider/node/load/linux.go
@@ -21,8 +21,14 @@
// Package load provides system load average statistics.
package load
+import (
+ "github.com/retr0h/osapi/internal/provider"
+)
+
// Linux implements the Load interface for Linux.
-type Linux struct{}
+type Linux struct {
+ provider.FactsAware
+}
// NewLinuxProvider factory to create a new Linux instance.
func NewLinuxProvider() *Linux {
diff --git a/internal/provider/node/load/ubuntu.go b/internal/provider/node/load/ubuntu.go
index 7562dc70..2540b961 100644
--- a/internal/provider/node/load/ubuntu.go
+++ b/internal/provider/node/load/ubuntu.go
@@ -22,10 +22,14 @@ package load
import (
"github.com/shirou/gopsutil/v4/load"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Ubuntu implements the Mem interface for Ubuntu.
type Ubuntu struct {
+ provider.FactsAware
+
AvgFn func() (*load.AvgStat, error)
}
diff --git a/internal/provider/node/mem/darwin.go b/internal/provider/node/mem/darwin.go
index 5df8ef70..86d478cc 100644
--- a/internal/provider/node/mem/darwin.go
+++ b/internal/provider/node/mem/darwin.go
@@ -22,10 +22,14 @@ package mem
import (
"github.com/shirou/gopsutil/v4/mem"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Darwin implements the Mem interface for Darwin (macOS).
type Darwin struct {
+ provider.FactsAware
+
VirtualMemoryFn func() (*mem.VirtualMemoryStat, error)
}
diff --git a/internal/provider/node/mem/linux.go b/internal/provider/node/mem/linux.go
index 17693963..bfa10a0d 100644
--- a/internal/provider/node/mem/linux.go
+++ b/internal/provider/node/mem/linux.go
@@ -21,8 +21,14 @@
// Package mem provides memory usage statistics.
package mem
+import (
+ "github.com/retr0h/osapi/internal/provider"
+)
+
// Linux implements the Mem interface for Linux.
-type Linux struct{}
+type Linux struct {
+ provider.FactsAware
+}
// NewLinuxProvider factory to create a new Linux instance.
func NewLinuxProvider() *Linux {
diff --git a/internal/provider/node/mem/ubuntu.go b/internal/provider/node/mem/ubuntu.go
index a3dac02a..b74c6ad6 100644
--- a/internal/provider/node/mem/ubuntu.go
+++ b/internal/provider/node/mem/ubuntu.go
@@ -22,10 +22,14 @@ package mem
import (
"github.com/shirou/gopsutil/v4/mem"
+
+ "github.com/retr0h/osapi/internal/provider"
)
// Ubuntu implements the Mem interface for Ubuntu.
type Ubuntu struct {
+ provider.FactsAware
+
VirtualMemoryFn func() (*mem.VirtualMemoryStat, error)
}