11package pipelines
22
33import (
4+ "context"
45 "errors"
56 "fmt"
67 "os"
@@ -13,6 +14,7 @@ import (
1314 "github.com/databricks/cli/libs/dyn/convert"
1415 "github.com/databricks/cli/libs/dyn/yamlloader"
1516 "github.com/databricks/cli/libs/dyn/yamlsaver"
17+ "github.com/databricks/cli/libs/logdiag"
1618 "github.com/databricks/databricks-sdk-go/service/pipelines"
1719 "github.com/spf13/cobra"
1820)
@@ -50,8 +52,10 @@ Use --existing-pipeline-dir to generate pipeline configuration from spark-pipeli
5052 }
5153
5254 cmd .RunE = func (cmd * cobra.Command , args []string ) error {
55+ ctx := logdiag .InitContext (cmd .Context ())
56+ cmd .SetContext (ctx )
57+
5358 folderPath := existingPipelineDir
54- ctx := cmd .Context ()
5559
5660 info , err := validateAndParsePath (folderPath )
5761 if err != nil {
@@ -66,7 +70,7 @@ Use --existing-pipeline-dir to generate pipeline configuration from spark-pipeli
6670 }
6771 }
6872
69- spec , err := parseSparkPipelineYAML (sparkPipelineFile )
73+ spec , err := parseSparkPipelineYAML (ctx , sparkPipelineFile )
7074 if err != nil {
7175 return fmt .Errorf ("failed to parse %s: %w" , sparkPipelineFile , err )
7276 }
@@ -181,6 +185,7 @@ type sdpPipeline struct {
181185 Catalog string `json:"catalog,omitempty"`
182186 Database string `json:"database,omitempty"`
183187 Libraries []sdpPipelineLibrary `json:"libraries,omitempty"`
188+ Storage string `json:"storage,omitempty"`
184189 Configuration map [string ]string `json:"configuration,omitempty"`
185190}
186191
@@ -195,7 +200,7 @@ type sdpPipelineLibraryGlob struct {
195200}
196201
197202// parseSparkPipelineYAML parses a spark-pipeline.yml file.
198- func parseSparkPipelineYAML (filePath string ) (* sdpPipeline , error ) {
203+ func parseSparkPipelineYAML (ctx context. Context , filePath string ) (* sdpPipeline , error ) {
199204 file , err := os .Open (filePath )
200205 if err != nil {
201206 return nil , fmt .Errorf ("failed to open %s: %w" , filePath , err )
@@ -208,9 +213,18 @@ func parseSparkPipelineYAML(filePath string) (*sdpPipeline, error) {
208213 }
209214
210215 out := sdpPipeline {}
211- err = convert .ToTyped (& out , dv )
216+ normalized , diags := convert .Normalize (& out , dv )
217+ if diags .HasError () {
218+ return nil , fmt .Errorf ("failed to parse %s: %w" , filePath , diags .Error ())
219+ }
220+
221+ for _ , diag := range diags {
222+ logdiag .LogDiag (ctx , diag )
223+ }
224+
225+ err = convert .ToTyped (& out , normalized )
212226 if err != nil {
213- return nil , fmt .Errorf ("failed to parse %s: %w" , filePath , err )
227+ return nil , fmt .Errorf ("failed to parse %s: %w" , filePath , diags . Error () )
214228 }
215229
216230 return & out , nil
@@ -261,6 +275,9 @@ func convertToResources(spec *sdpPipeline, resourceName, srcFolder string) (map[
261275 if err != nil {
262276 return nil , fmt .Errorf ("failed to convert libraries into dyn.Value: %w" , err )
263277 }
278+ if librariesDyn .Kind () == dyn .KindNil {
279+ librariesDyn = dyn .V ([]dyn.Value {})
280+ }
264281
265282 // maps are unordered, and saver is sorting keys by dyn.Location
266283 // this is helper function to monotonically assign locations as keys are created
0 commit comments