Skip to content

Commit 7a3a6fc

Browse files
committed
pipelines: warn about unknown attributes
1 parent 9aab143 commit 7a3a6fc

6 files changed

Lines changed: 51 additions & 5 deletions

File tree

acceptance/pipelines/generate/unknown-attribute/out.test.toml

Lines changed: 5 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
Warning: unknown field: unknown_attribute
2+
in src/my_pipeline/spark-pipeline.yml:2:1
3+
4+
Generated pipeline configuration: resources/my_pipeline.pipeline.yml
5+
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
resources:
2+
pipelines:
3+
my_pipeline:
4+
name: My Pipeline
5+
catalog: ${var.catalog}
6+
schema: ${var.schema}
7+
root_path: ../src/my_pipeline
8+
serverless: true
9+
libraries: []
10+
environment:
11+
dependencies:
12+
- --editable ${workspace.file_path}
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
export ENABLE_PIPELINES_CLI=1
2+
3+
$CLI pipelines generate --existing-pipeline-dir src/my_pipeline
4+
5+
mv resources output/
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
name: My Pipeline
2+
unknown_attribute: foo

cmd/pipelines/generate.go

Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package pipelines
22

33
import (
4+
"context"
45
"errors"
56
"fmt"
67
"os"
@@ -13,6 +14,7 @@ import (
1314
"github.com/databricks/cli/libs/dyn/convert"
1415
"github.com/databricks/cli/libs/dyn/yamlloader"
1516
"github.com/databricks/cli/libs/dyn/yamlsaver"
17+
"github.com/databricks/cli/libs/logdiag"
1618
"github.com/databricks/databricks-sdk-go/service/pipelines"
1719
"github.com/spf13/cobra"
1820
)
@@ -50,8 +52,10 @@ Use --existing-pipeline-dir to generate pipeline configuration from spark-pipeli
5052
}
5153

5254
cmd.RunE = func(cmd *cobra.Command, args []string) error {
55+
ctx := logdiag.InitContext(cmd.Context())
56+
cmd.SetContext(ctx)
57+
5358
folderPath := existingPipelineDir
54-
ctx := cmd.Context()
5559

5660
info, err := validateAndParsePath(folderPath)
5761
if err != nil {
@@ -66,7 +70,7 @@ Use --existing-pipeline-dir to generate pipeline configuration from spark-pipeli
6670
}
6771
}
6872

69-
spec, err := parseSparkPipelineYAML(sparkPipelineFile)
73+
spec, err := parseSparkPipelineYAML(ctx, sparkPipelineFile)
7074
if err != nil {
7175
return fmt.Errorf("failed to parse %s: %w", sparkPipelineFile, err)
7276
}
@@ -181,6 +185,7 @@ type sdpPipeline struct {
181185
Catalog string `json:"catalog,omitempty"`
182186
Database string `json:"database,omitempty"`
183187
Libraries []sdpPipelineLibrary `json:"libraries,omitempty"`
188+
Storage string `json:"storage,omitempty"`
184189
Configuration map[string]string `json:"configuration,omitempty"`
185190
}
186191

@@ -195,7 +200,7 @@ type sdpPipelineLibraryGlob struct {
195200
}
196201

197202
// parseSparkPipelineYAML parses a spark-pipeline.yml file.
198-
func parseSparkPipelineYAML(filePath string) (*sdpPipeline, error) {
203+
func parseSparkPipelineYAML(ctx context.Context, filePath string) (*sdpPipeline, error) {
199204
file, err := os.Open(filePath)
200205
if err != nil {
201206
return nil, fmt.Errorf("failed to open %s: %w", filePath, err)
@@ -208,9 +213,18 @@ func parseSparkPipelineYAML(filePath string) (*sdpPipeline, error) {
208213
}
209214

210215
out := sdpPipeline{}
211-
err = convert.ToTyped(&out, dv)
216+
normalized, diags := convert.Normalize(&out, dv)
217+
if diags.HasError() {
218+
return nil, fmt.Errorf("failed to parse %s: %w", filePath, diags.Error())
219+
}
220+
221+
for _, diag := range diags {
222+
logdiag.LogDiag(ctx, diag)
223+
}
224+
225+
err = convert.ToTyped(&out, normalized)
212226
if err != nil {
213-
return nil, fmt.Errorf("failed to parse %s: %w", filePath, err)
227+
return nil, fmt.Errorf("failed to parse %s: %w", filePath, diags.Error())
214228
}
215229

216230
return &out, nil
@@ -261,6 +275,9 @@ func convertToResources(spec *sdpPipeline, resourceName, srcFolder string) (map[
261275
if err != nil {
262276
return nil, fmt.Errorf("failed to convert libraries into dyn.Value: %w", err)
263277
}
278+
if librariesDyn.Kind() == dyn.KindNil {
279+
librariesDyn = dyn.V([]dyn.Value{})
280+
}
264281

265282
// maps are unordered, and saver is sorting keys by dyn.Location
266283
// this is helper function to monotonically assign locations as keys are created

0 commit comments

Comments
 (0)