Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions acceptance/pipelines/generate/unknown-attribute/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Warning: unknown field: unknown_attribute
in src/my_pipeline/spark-pipeline.yml:2:1

Generated pipeline configuration: resources/my_pipeline.pipeline.yml

Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
resources:
pipelines:
my_pipeline:
name: My Pipeline
catalog: ${var.catalog}
schema: ${var.schema}
root_path: ../src/my_pipeline
serverless: true
libraries: []
environment:
dependencies:
- --editable ${workspace.file_path}
5 changes: 5 additions & 0 deletions acceptance/pipelines/generate/unknown-attribute/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
export ENABLE_PIPELINES_CLI=1

$CLI pipelines generate --existing-pipeline-dir src/my_pipeline

mv resources output/
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
name: My Pipeline
unknown_attribute: foo
27 changes: 22 additions & 5 deletions cmd/pipelines/generate.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package pipelines

import (
"context"
"errors"
"fmt"
"os"
Expand All @@ -13,6 +14,7 @@ import (
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/dyn/yamlsaver"
"github.com/databricks/cli/libs/logdiag"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/spf13/cobra"
)
Expand Down Expand Up @@ -50,8 +52,10 @@ Use --existing-pipeline-dir to generate pipeline configuration from spark-pipeli
}

cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := logdiag.InitContext(cmd.Context())
cmd.SetContext(ctx)

folderPath := existingPipelineDir
ctx := cmd.Context()

info, err := validateAndParsePath(folderPath)
if err != nil {
Expand All @@ -66,7 +70,7 @@ Use --existing-pipeline-dir to generate pipeline configuration from spark-pipeli
}
}

spec, err := parseSparkPipelineYAML(sparkPipelineFile)
spec, err := parseSparkPipelineYAML(ctx, sparkPipelineFile)
if err != nil {
return fmt.Errorf("failed to parse %s: %w", sparkPipelineFile, err)
}
Expand Down Expand Up @@ -181,6 +185,7 @@ type sdpPipeline struct {
Catalog string `json:"catalog,omitempty"`
Database string `json:"database,omitempty"`
Libraries []sdpPipelineLibrary `json:"libraries,omitempty"`
Storage string `json:"storage,omitempty"`
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are you using this?

Configuration map[string]string `json:"configuration,omitempty"`
}

Expand All @@ -195,7 +200,7 @@ type sdpPipelineLibraryGlob struct {
}

// parseSparkPipelineYAML parses a spark-pipeline.yml file.
func parseSparkPipelineYAML(filePath string) (*sdpPipeline, error) {
func parseSparkPipelineYAML(ctx context.Context, filePath string) (*sdpPipeline, error) {
file, err := os.Open(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open %s: %w", filePath, err)
Expand All @@ -208,9 +213,18 @@ func parseSparkPipelineYAML(filePath string) (*sdpPipeline, error) {
}

out := sdpPipeline{}
err = convert.ToTyped(&out, dv)
normalized, diags := convert.Normalize(&out, dv)
if diags.HasError() {
return nil, fmt.Errorf("failed to parse %s: %w", filePath, diags.Error())
}

for _, diag := range diags {
logdiag.LogDiag(ctx, diag)
}

err = convert.ToTyped(&out, normalized)
if err != nil {
return nil, fmt.Errorf("failed to parse %s: %w", filePath, err)
return nil, fmt.Errorf("failed to parse %s: %w", filePath, diags.Error())
}

return &out, nil
Expand Down Expand Up @@ -261,6 +275,9 @@ func convertToResources(spec *sdpPipeline, resourceName, srcFolder string) (map[
if err != nil {
return nil, fmt.Errorf("failed to convert libraries into dyn.Value: %w", err)
}
if librariesDyn.Kind() == dyn.KindNil {
librariesDyn = dyn.V([]dyn.Value{})
Comment on lines +278 to +279
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This code would benefit from a comment and/or a helper function that just gets librariesDyn.

}

// maps are unordered, and saver is sorting keys by dyn.Location
// this is helper function to monotonically assign locations as keys are created
Expand Down