Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions internal/data/dataexport/cmd/create/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,19 +99,28 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
defer cancel()
namespace, _ := cmd.Flags().GetString("namespace")
ttl, _ := cmd.Flags().GetString("ttl")
publish, _ := cmd.Flags().GetBool("publish")

deName, volumeKind, volumeName, err := parseArgs(args)
if err != nil {
return err
}

flags := cmd.PersistentFlags()
safeClient, err := safeClient.NewSafeClient(flags)
sc, err := safeClient.NewSafeClient(flags)
if err != nil {
return err
}
rtClient, err := safeClient.NewRTClient(v1alpha1.AddToScheme)
rtClient, err := sc.NewRTClient(v1alpha1.AddToScheme)
if err != nil {
return err
}

publishFlag, err := dataio.ParsePublishFlag(cmd.Flags())
if err != nil {
return err
}

publish, err := dataio.ResolvePublish(ctx, publishFlag, rtClient, sc, log)
if err != nil {
return err
}
Expand Down
11 changes: 10 additions & 1 deletion internal/data/dataexport/cmd/download/download.go
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ func recursiveDownload(ctx context.Context, sClient *safeClient.SafeClient, log
func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []string) error {
namespace, _ := cmd.Flags().GetString("namespace")
dstPath, _ := cmd.Flags().GetString("output")
publish, _ := cmd.Flags().GetBool("publish")
ttl, _ := cmd.Flags().GetString("ttl")

dataName, srcPath, err := dataio.ParseArgs(args)
Expand All @@ -243,6 +242,16 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
return err
}

publishFlag, err := dataio.ParsePublishFlag(cmd.Flags())
if err != nil {
return err
}

publish, err := dataio.ResolvePublish(ctx, publishFlag, rtClient, sClient, log)
if err != nil {
return err
}

deName, err := util.CreateDataExporterIfNeededFunc(ctx, log, dataName, namespace, publish, ttl, rtClient)
if err != nil {
return err
Expand Down
8 changes: 4 additions & 4 deletions internal/data/dataexport/cmd/download/download_http_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ func TestDownloadFilesystem_OK(t *testing.T) {
outFile := filepath.Join(t.TempDir(), "out.txt")

cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetArgs([]string{"myexport", "foo.txt", "-o", outFile})
cmd.SetArgs([]string{"myexport", "foo.txt", "-o", outFile, "--publish=false"})
var buf bytes.Buffer
cmd.SetOut(&buf)
cmd.SetErr(&buf)
Expand Down Expand Up @@ -87,7 +87,7 @@ func TestDownloadFilesystem_BadPath(t *testing.T) {
defer func() { util.PrepareDownloadFunc = origPrep; util.CreateDataExporterIfNeededFunc = origCreate }()

cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetArgs([]string{"myexport", "foo.txt", "-o", filepath.Join(t.TempDir(), "out.txt")})
cmd.SetArgs([]string{"myexport", "foo.txt", "-o", filepath.Join(t.TempDir(), "out.txt"), "--publish=false"})
require.NoError(t, cmd.Execute())
}

Expand Down Expand Up @@ -115,7 +115,7 @@ func TestDownloadBlock_OK(t *testing.T) {

outFile := filepath.Join(t.TempDir(), "raw.img")
cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetArgs([]string{"myexport", "-o", outFile})
cmd.SetArgs([]string{"myexport", "-o", outFile, "--publish=false"})
cmd.SetOut(io.Discard)
cmd.SetErr(io.Discard)
require.NoError(t, cmd.Execute())
Expand All @@ -141,7 +141,7 @@ func TestDownloadBlock_WrongEndpoint(t *testing.T) {
defer func() { util.PrepareDownloadFunc = origPrep; util.CreateDataExporterIfNeededFunc = origCreate }()

cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetArgs([]string{"myexport", "-o", filepath.Join(t.TempDir(), "raw.img")})
cmd.SetArgs([]string{"myexport", "-o", filepath.Join(t.TempDir(), "raw.img"), "--publish=false"})
cmd.SetOut(io.Discard)
cmd.SetErr(io.Discard)
require.NoError(t, cmd.Execute())
Expand Down
12 changes: 11 additions & 1 deletion internal/data/dataexport/cmd/list/list.go
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
defer cancel()

namespace, _ := cmd.Flags().GetString("namespace")
publish, _ := cmd.Flags().GetBool("publish")
ttl, _ := cmd.Flags().GetString("ttl")

dataName, srcPath, err := parseArgs(args)
Expand All @@ -181,6 +180,17 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
if err != nil {
return err
}

publishFlag, err := dataio.ParsePublishFlag(cmd.Flags())
if err != nil {
return err
}

publish, err := dataio.ResolvePublish(ctx, publishFlag, rtClient, sClient, log)
if err != nil {
return err
}

deName, err := util.CreateDataExporterIfNeededFunc(ctx, log, dataName, namespace, publish, ttl, rtClient)
if err != nil {
return err
Expand Down
7 changes: 4 additions & 3 deletions internal/data/dataexport/cmd/list/list_http_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ func TestListFilesystem_OK(t *testing.T) {
os.Stdout = w

cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetArgs([]string{"myexport", "/"})
cmd.SetArgs([]string{"myexport", "/", "--publish=false"})
require.NoError(t, cmd.Execute())

w.Close()
Expand Down Expand Up @@ -92,7 +92,7 @@ func TestListBlock_OK(t *testing.T) {
os.Stdout = w

cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetArgs([]string{"myexport"})
cmd.SetArgs([]string{"myexport", "--publish=false"})
require.NoError(t, cmd.Execute())

w.Close()
Expand Down Expand Up @@ -124,7 +124,8 @@ func TestListFilesystem_NotDir(t *testing.T) {
cmd := NewCommand(context.TODO(), slog.Default())
cmd.SetOut(&bytes.Buffer{})
cmd.SetErr(&bytes.Buffer{})
cmd.SetArgs([]string{"myexport", "some/invalid"})
cmd.SetArgs([]string{"myexport", "some/invalid", "--publish=false"})
err := cmd.Execute()
require.Error(t, err)
require.Contains(t, err.Error(), "invalid source path")
}
50 changes: 48 additions & 2 deletions internal/data/dataexport/util/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,9 @@ func GetDataExport(ctx context.Context, deName, namespace string, rtClient ctrlr
return deObj, nil
}

func GetDataExportWithRestart(ctx context.Context, deName, namespace string, rtClient ctrlrtclient.Client) (*v1alpha1.DataExport, error) {
func GetDataExportWithRestart(ctx context.Context, deName, namespace string, publish bool, rtClient ctrlrtclient.Client) (*v1alpha1.DataExport, error) {
deObj := &v1alpha1.DataExport{}
publishReconciled := false

for i := 0; ; i++ {
var returnErr error
Expand All @@ -77,6 +78,19 @@ func GetDataExportWithRestart(ctx context.Context, deName, namespace string, rtC
return nil, fmt.Errorf("kube Get dataexport with restart: %s", err.Error())
}

// On the first iteration, reconcile Spec.Publish with the resolved value.
// If the object was patched, restart the loop to pick up the updated status.
if !publishReconciled {
patched, err := EnsureDataExportPublish(ctx, deObj, publish, rtClient)
if err != nil {
return nil, err
}
publishReconciled = true
if patched {
continue
}
}

for _, condition := range deObj.Status.Conditions {
// restart DataExport if Expired
if condition.Type == "Expired" {
Expand Down Expand Up @@ -230,7 +244,7 @@ func getExportStatus(ctx context.Context, log *slog.Logger, deName, namespace st
var podURL, volumeMode, internalCAData string

log.Info("Waiting for DataExport to be ready", slog.String("name", deName), slog.String("namespace", namespace))
deObj, err := GetDataExportWithRestart(ctx, deName, namespace, rtClient)
deObj, err := GetDataExportWithRestart(ctx, deName, namespace, public, rtClient)
if err != nil {
return "", "", "", err
}
Expand Down Expand Up @@ -307,3 +321,35 @@ func PrepareDownload(ctx context.Context, log *slog.Logger, deName, namespace st

return url, volumeMode, subClient, nil
}

// EnsureDataExportPublish patches DataExport.Spec.Publish to match the resolved value.
// Only upgrades publish: false -> true is patched, true -> false is intentionally skipped
// to avoid downgrading already-published resources.
// Returns (true, nil) if the object was patched and the caller should re-read it.
func EnsureDataExportPublish(
ctx context.Context,
deObj *v1alpha1.DataExport,
publish bool,
rtClient ctrlrtclient.Client,
) (bool, error) {
if !publish {
return false, nil
}

if deObj == nil {
return false, fmt.Errorf("nil DataExport object")
}

if deObj.Spec.Publish == publish {
return false, nil
}

patch := ctrlrtclient.MergeFrom(deObj.DeepCopy())
deObj.Spec.Publish = publish

if err := rtClient.Patch(ctx, deObj, patch); err != nil {
return false, fmt.Errorf("patch DataExport publish: %w", err)
}

return true, nil
}
11 changes: 10 additions & 1 deletion internal/data/dataimport/cmd/create/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
name := args[0]
namespace, _ := cmd.Flags().GetString("namespace")
ttl, _ := cmd.Flags().GetString("ttl")
publish, _ := cmd.Flags().GetBool("publish")
pvcFilePath, _ := cmd.Flags().GetString("file")
wffc, _ := cmd.Flags().GetBool("wffc")

Expand Down Expand Up @@ -109,6 +108,16 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
namespace = pvcSpec.Namespace
}

publishFlag, err := dataio.ParsePublishFlag(cmd.Flags())
if err != nil {
return err
}

publish, err := dataio.ResolvePublish(ctx, publishFlag, rtClient, sc, log)
if err != nil {
return err
}

if err := util.CreateDataImport(ctx, name, namespace, ttl, publish, wffc, pvcSpec, rtClient); err != nil {
return err
}
Expand Down
18 changes: 17 additions & 1 deletion internal/data/dataimport/cmd/upload/upload.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (
"github.com/spf13/cobra"

dataio "github.com/deckhouse/deckhouse-cli/internal/data"
v1alpha1 "github.com/deckhouse/deckhouse-cli/internal/data/dataimport/api/v1alpha1"
"github.com/deckhouse/deckhouse-cli/internal/data/dataimport/util"
client "github.com/deckhouse/deckhouse-cli/pkg/libsaferequest/client"
)
Expand Down Expand Up @@ -65,7 +66,6 @@ func cmdExamples() string {
func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []string) error {
pathToFile, _ := cmd.Flags().GetString("file")
chunks, _ := cmd.Flags().GetInt("chunks")
publish, _ := cmd.Flags().GetBool("publish")
namespace, _ := cmd.Flags().GetString("namespace")
dstPath, _ := cmd.Flags().GetString("dstPath")
resume, _ := cmd.Flags().GetBool("resume")
Expand All @@ -83,6 +83,22 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin

log.Info("Run")

// Create runtime client for publish auto-detection and reconciliation.
rtClient, err := httpClient.NewRTClient(v1alpha1.AddToScheme)
if err != nil {
return err
}

publishFlag, err := dataio.ParsePublishFlag(cmd.Flags())
if err != nil {
return err
}

publish, err := dataio.ResolvePublish(ctx, publishFlag, rtClient, httpClient, log)
if err != nil {
return err
}

permOctal := defaultFilePermissions
uid := os.Getuid()
gid := os.Getgid()
Expand Down
21 changes: 19 additions & 2 deletions internal/data/dataimport/cmd/upload/upload_windows.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@ import (
"strconv"
"strings"

"github.com/spf13/cobra"

dataio "github.com/deckhouse/deckhouse-cli/internal/data"
v1alpha1 "github.com/deckhouse/deckhouse-cli/internal/data/dataimport/api/v1alpha1"
"github.com/deckhouse/deckhouse-cli/internal/data/dataimport/util"
client "github.com/deckhouse/deckhouse-cli/pkg/libsaferequest/client"
"github.com/spf13/cobra"
)

const (
Expand Down Expand Up @@ -63,7 +65,6 @@ func cmdExamples() string {
func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []string) error {
pathToFile, _ := cmd.Flags().GetString("file")
chunks, _ := cmd.Flags().GetInt("chunks")
publish, _ := cmd.Flags().GetBool("publish")
namespace, _ := cmd.Flags().GetString("namespace")
dstPath, _ := cmd.Flags().GetString("dstPath")
resume, _ := cmd.Flags().GetBool("resume")
Expand Down Expand Up @@ -91,6 +92,22 @@ func Run(ctx context.Context, log *slog.Logger, cmd *cobra.Command, args []strin
}
}

// Create runtime client for publish auto-detection and reconciliation.
rtClient, err := httpClient.NewRTClient(v1alpha1.AddToScheme)
if err != nil {
return err
}

publishFlag, err := dataio.ParsePublishFlag(cmd.Flags())
if err != nil {
return err
}

publish, err := dataio.ResolvePublish(ctx, publishFlag, rtClient, httpClient, log)
if err != nil {
return err
}

podUrl, _, subClient, err := util.PrepareUpload(ctx, log, diName, namespace, publish, httpClient)
if err != nil {
return err
Expand Down
Loading