From c7dde7d154bf1a17af68dd3b78c1b4c2eb158269 Mon Sep 17 00:00:00 2001 From: Steve Ramage Date: Wed, 14 Jan 2026 12:25:43 -0800 Subject: [PATCH] feat: add support for epcc get-all (Resolves #632) --- .github/workflows/test.yml | 10 + cmd/get-all-smoke-tests.sh | 161 +++ cmd/get-all.go | 1076 +++++++++++++++++ cmd/helper.go | 4 + cmd/root.go | 3 + .../map_collection_response_to_ids.go | 13 +- external/json/to_json.go | 6 + external/resources/resources.go | 8 + external/resources/resources_schema.json | 10 + .../resources/yaml/commerce-extensions.yaml | 17 + external/resources/yaml/customers.yaml | 2 + external/resources/yaml/payments.yaml | 4 +- external/resources/yaml/pxm.yaml | 18 + external/runbooks/run-all-runbooks.sh | 3 +- external/toposort/toposort.go | 100 ++ external/toposort/toposort_test.go | 78 ++ 16 files changed, 1507 insertions(+), 6 deletions(-) create mode 100755 cmd/get-all-smoke-tests.sh create mode 100644 cmd/get-all.go create mode 100644 external/toposort/toposort.go create mode 100644 external/toposort/toposort_test.go diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f94239c6..3d96173a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -55,6 +55,16 @@ jobs: run: | go test -v -cover ./cmd/ ./external/... + - name: Get-All Smoke Test + timeout-minutes: 10 + env: + EPCC_CLIENT_ID: ${{ secrets.EPCC_CLIENT_ID }} + EPCC_CLIENT_SECRET: ${{ secrets.EPCC_CLIENT_SECRET }} + EPCC_API_BASE_URL: ${{ vars.EPCC_API_BASE_URL }} + run: | + export PATH=./bin/:$PATH + ./cmd/get-all-smoke-tests.sh + - name: Runbook Smoke Test timeout-minutes: 15 env: diff --git a/cmd/get-all-smoke-tests.sh b/cmd/get-all-smoke-tests.sh new file mode 100755 index 00000000..312a5a39 --- /dev/null +++ b/cmd/get-all-smoke-tests.sh @@ -0,0 +1,161 @@ +#!/usr/bin/env bash + +# Round-trip smoke test for the get-all command +# 1. Create resources from different families +# 2. Export them with a single get-all --output-format epcc-cli +# 3. Delete the resources +# 4. Run the exported script to recreate them +# 5. Verify resources were recreated + +set -e + +TEMP_DIR=$(mktemp -d) + +cleanup() { + rm -rf "$TEMP_DIR" +} +trap cleanup EXIT + +echo "=== get-all Round-Trip Smoke Test ===" +epcc reset-store .+ + +# Step 1: Create resources from different families +echo "=== Step 1: Creating test resources ===" + +# Account and sub-resource (account-address) +epcc create account name "get-all-test-account" legal_name "Test Account for get-all" +epcc create account-address account/name=get-all-test-account name "Test Address" first_name "John" last_name "Doe" line_1 "123 Test St" city "Test City" postcode "H0H 0H0" county "Test County" country "US" + +# Customer +epcc create customer name "get-all-test-customer" email "get-all-test@example.com" + + +# Custom API with fields and entry +epcc create custom-api name "smoke-test-api" slug "smoke-test-api" api_type "smoke_test_ext" description "Smoke test API" +epcc create custom-field custom_api/slug=smoke-test-api name "test_string" description blah slug "test_string" field_type "string" +epcc create custom-field custom_api/slug=smoke-test-api name "test_int" description blah slug "test_int" field_type "integer" +epcc create custom-field custom_api/slug=smoke-test-api name "test_bool" description blah slug "test_bool" field_type "boolean" +epcc create custom-api-settings-entry custom_api/slug=smoke-test-api data.test_string "hello world" data.test_int 42 data.test_bool true data.type "smoke_test_ext" + +# PCM resources with hierarchy and node-product relationships +epcc create pcm-product --auto-fill name "Smoke Test Product" +epcc create pcm-hierarchy name "smoke-test-hierarchy" +epcc create pcm-node name=smoke-test-hierarchy name "Parent Node" --auto-fill +epcc create pcm-node name=smoke-test-hierarchy name "Child Node" --auto-fill relationships.parent.data.id name=Parent_Node +epcc create pcm-node-product name=smoke-test-hierarchy name=Child_Node data\[0\].id last_read=entity + +echo "=== Step 2: Export all resources with single get-all ===" + +epcc get-all accounts account-addresses customers custom-apis custom-fields custom-api-settings-entries \ + pcm-products pcm-hierarchies pcm-nodes pcm-node-products \ + --output-file "$TEMP_DIR/export.sh" --output-format epcc-cli --truncate-output + +echo "=== Step 3: Delete resources ===" + +# Delete in reverse dependency order +epcc delete-all custom-api-settings-entries +epcc delete-all custom-fields +epcc delete-all custom-apis +epcc delete-all customers +epcc delete-all account-addresses +epcc delete-all accounts +epcc delete-all pcm-nodes +epcc delete-all pcm-hierarchies +epcc delete-all pcm-products + +echo "=== Step 4: Run exported script to recreate resources ===" + +"$TEMP_DIR/export.sh" + +echo "=== Step 5: Verify resources were recreated ===" + +# Check account exists (use collection query since aliases aren't saved with --skip-alias-processing) +# Note: --output-jq returns JSON-formatted values (strings have quotes), so we strip them with tr +ACCOUNT_ID=$(epcc get accounts --output-jq '.data[] | select(.name == "get-all-test-account") | .id' 2>/dev/null | tr -d '"' || echo "") +if [ -z "$ACCOUNT_ID" ]; then + echo "FAIL: Account get-all-test-account not found after recreation" + exit 1 +fi +echo "PASS: Account recreated (id: $ACCOUNT_ID)" + +# Check account-address exists (use ID since aliases aren't saved with --skip-alias-processing) +ADDRESS_COUNT=$(epcc get account-addresses "$ACCOUNT_ID" --output-jq '.meta.results.total' 2>/dev/null | tr -d '"' || echo "0") +if [ "$ADDRESS_COUNT" -lt 1 ]; then + echo "FAIL: No account-addresses found after recreation" + exit 1 +fi +echo "PASS: Account-addresses recreated ($ADDRESS_COUNT found)" + +# Check customer exists +CUSTOMER_COUNT=$(epcc get customers --output-jq '.meta.results.total' | tr -d '"') +if [ "$CUSTOMER_COUNT" -lt 1 ]; then + echo "FAIL: No customers found after recreation" + exit 1 +fi +echo "PASS: Customers recreated ($CUSTOMER_COUNT found)" + +# Check custom-api exists (use collection query since aliases aren't saved with --skip-alias-processing) +CUSTOM_API_ID=$(epcc get custom-apis --output-jq '.data[] | select(.slug == "smoke-test-api") | .id' 2>/dev/null | tr -d '"' || echo "") +if [ -z "$CUSTOM_API_ID" ]; then + echo "FAIL: Custom API smoke-test-api not found after recreation" + epcc get custom-apis + exit 1 +fi +echo "PASS: Custom API recreated (id: $CUSTOM_API_ID)" + +# Check custom-fields exist (use ID since aliases aren't saved with --skip-alias-processing) +FIELD_COUNT=$(epcc get custom-fields "$CUSTOM_API_ID" --output-jq '.meta.results.total' 2>/dev/null | tr -d '"' || echo "0") +if [ "$FIELD_COUNT" -lt 3 ]; then + echo "FAIL: Expected at least 3 custom-fields, found $FIELD_COUNT" + exit 1 +fi +echo "PASS: Custom fields recreated ($FIELD_COUNT found)" + +# Check custom-api-settings-entry exists (use ID since aliases aren't saved with --skip-alias-processing) +ENTRY_COUNT=$(epcc get custom-api-settings-entries "$CUSTOM_API_ID" --output-jq '.meta.results.total' 2>/dev/null | tr -d '"' || echo "0") +if [ "$ENTRY_COUNT" -lt 1 ]; then + echo "FAIL: No custom-api-settings-entries found after recreation" + exit 1 +fi +echo "PASS: Custom API entries recreated ($ENTRY_COUNT found)" + +# Check pcm-product exists +PRODUCT_ID=$(epcc get pcm-products --output-jq '.data[] | select(.attributes.name == "Smoke Test Product") | .id' 2>/dev/null | tr -d '"' || echo "") +if [ -z "$PRODUCT_ID" ]; then + echo "FAIL: PCM product 'Smoke Test Product' not found after recreation" + exit 1 +fi +echo "PASS: PCM product recreated (id: $PRODUCT_ID)" + +# Check pcm-hierarchy exists +HIERARCHY_ID=$(epcc get pcm-hierarchies --output-jq '.data[] | select(.attributes.name == "smoke-test-hierarchy") | .id' 2>/dev/null | tr -d '"' || echo "") +if [ -z "$HIERARCHY_ID" ]; then + echo "FAIL: PCM hierarchy 'smoke-test-hierarchy' not found after recreation" + exit 1 +fi +echo "PASS: PCM hierarchy recreated (id: $HIERARCHY_ID)" + +# Check pcm-nodes exist +NODE_COUNT=$(epcc get pcm-nodes "$HIERARCHY_ID" --output-jq '.data | length' 2>/dev/null | tr -d '"' || echo "0") +if [ "$NODE_COUNT" -lt 2 ]; then + echo "FAIL: Expected at least 2 pcm-nodes, found $NODE_COUNT" + exit 1 +fi +echo "PASS: PCM nodes recreated ($NODE_COUNT found)" + +# Check pcm-node-products exist (get child node ID first) +CHILD_NODE_ID=$(epcc get pcm-nodes "$HIERARCHY_ID" --output-jq '.data[] | select(.attributes.name == "Child Node") | .id' 2>/dev/null | tr -d '"' || echo "") +if [ -n "$CHILD_NODE_ID" ]; then + NODE_PRODUCT_COUNT=$(epcc get pcm-node-products "$HIERARCHY_ID" "$CHILD_NODE_ID" --output-jq '.data | length' 2>/dev/null | tr -d '"' || echo "0") + if [ "$NODE_PRODUCT_COUNT" -lt 1 ]; then + echo "FAIL: No pcm-node-products found after recreation" + exit 1 + fi + echo "PASS: PCM node-products recreated ($NODE_PRODUCT_COUNT found)" +else + echo "FAIL: Child node not found for pcm-node-products verification" + exit 1 +fi + +echo "" +echo "=== get-all Round-Trip Smoke Test PASSED ===" diff --git a/cmd/get-all.go b/cmd/get-all.go new file mode 100644 index 00000000..fa8940cf --- /dev/null +++ b/cmd/get-all.go @@ -0,0 +1,1076 @@ +package cmd + +/* +get-all: Export all instances of one or more resource types from an EPCC store. + +Usage: + epcc get-all # Single resource (uses subcommand) + epcc get-all ... # Multiple resources + +Algorithm Overview: + +1. CROSS-RESOURCE DEPENDENCY RESOLUTION (for multiple resources) + When multiple resources are requested, we determine the order to process them: + - Build a dependency graph based on URL templates (e.g., account-addresses depends on accounts + because its URL is /v2/accounts/{accountId}/addresses) + - Also consider RESOURCE_ID attribute dependencies and explicit export-depends-on declarations + (for subtle dependencies not visible in URL structure, e.g., custom-fields -> custom-api-settings-entries) + - Use topological sort to determine processing order + - Process resources in dependency order so parent aliases exist before children reference them + +2. PARENT RESOLUTION (per resource) + Many EPCC resources are nested under parent resources (e.g., customer-addresses + are under customers, entries are under flows). Before we can fetch the target + resource, we must first discover all parent resource IDs. This is done recursively: + - Parse the resource URL template to find parent types (e.g., /v2/customers/{customerId}/addresses) + - For each parent type, recursively fetch all IDs using the same algorithm + - This produces a list of "parent paths" - each path is a list of IDs leading to the target + +3. PAGINATION + For each parent path, we paginate through the target resource collection: + - Fetch pages of 100 items at a time using page[limit] and page[offset] + - Continue until we get an empty page or detect duplicate results (some endpoints don't paginate) + - Send each page's raw JSON to the output processor via a channel + +4. OUTPUT PROCESSING (runs concurrently) + A goroutine receives pages and processes them according to the output format: + - jsonl/json/csv: Transform and output the data directly + - epcc-cli/epcc-cli-runbook: Generate `epcc create` commands to recreate resources + +5. TOPOLOGICAL SORTING (for self-referential resources) + Some resources reference other resources of the same type (e.g., hierarchical nodes + with parent_id pointing to another node). For these: + - Build a dependency graph as we process records + - Use Kahn's algorithm to determine creation order (dependencies before dependents) + - Output commands in stages where each stage can be run in parallel + +Output Formats: + - jsonl: One JSON object per line (default, streamable) + - json: Single JSON array containing all results + - csv: Flattened CSV with dot-notation headers + - epcc-cli: Shell commands to recreate resources via `epcc create` + - epcc-cli-runbook: Same as epcc-cli but formatted for runbook YAML +*/ + +import ( + "context" + gojson "encoding/json" + "fmt" + "io" + "net/url" + "os" + "reflect" + "sort" + "strconv" + "strings" + "sync" + + "github.com/elasticpath/epcc-cli/external/apihelper" + "github.com/elasticpath/epcc-cli/external/clictx" + "github.com/elasticpath/epcc-cli/external/completion" + "github.com/elasticpath/epcc-cli/external/httpclient" + "github.com/elasticpath/epcc-cli/external/id" + "github.com/elasticpath/epcc-cli/external/json" + "github.com/elasticpath/epcc-cli/external/resources" + "github.com/elasticpath/epcc-cli/external/toposort" + log "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/thediveo/enumflag" + "github.com/yukithm/json2csv" + "github.com/yukithm/json2csv/jsonpointer" +) + +type OutputFormat enumflag.Flag + +const ( + Jsonl OutputFormat = iota + Json + Csv + EpccCli + EpccCliRunbook +) + +var OutputFormatIds = map[OutputFormat][]string{ + Jsonl: {"jsonl"}, + Json: {"json"}, + Csv: {"csv"}, + EpccCli: {"epcc-cli"}, + EpccCliRunbook: {"epcc-cli-runbook"}, +} + +// outputFormatCompletionFunc provides tab completion for the --output-format flag +var outputFormatCompletionFunc = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{ + "jsonl\tJSON Lines format (default, one object per line)", + "json\tSingle JSON array with all results", + "csv\tCSV format with flattened fields", + "epcc-cli\tGenerate epcc create commands", + "epcc-cli-runbook\tGenerate epcc create commands for runbook YAML", + }, cobra.ShellCompDirectiveNoFileComp +} + +func NewGetAllCommand(parentCmd *cobra.Command) func() { + + var outputFile string + var outputFormat OutputFormat + var truncateOutput bool + + // Note: Both the parent get-all command and each resource subcommand have RunE handlers. + // This is intentional to support two usage patterns: + // 1. Multi-resource mode: "epcc get-all accounts customers" - handled by parent RunE + // Cobra doesn't match "accounts" as a subcommand when followed by "customers" + // 2. Single-resource mode: "epcc get-all accounts" - handled by subcommand RunE + // Allows resource-specific help and tab completion + // Both ultimately call getAllInternal, but subcommands provide better UX for single resources. + var getAll = &cobra.Command{ + Use: "get-all [resource1] [resource2] ...", + Short: "Get all of one or more resources", + Long: `Get all instances of one or more resource types. + +When multiple resources are specified, they are processed in dependency order +(parent resources before children) so that aliases are available for reference. + +Examples: + epcc get-all accounts # Get all accounts + epcc get-all accounts account-addresses # Get accounts then their addresses + epcc get-all --output-format epcc-cli accounts # Output as epcc create commands`, + SilenceUsage: false, + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) == 0 { + return fmt.Errorf("please specify a resource, epcc get-all [RESOURCE...], see epcc get-all --help") + } + // This handles unknown resources or when called directly without subcommand routing + return getAllInternal(clictx.Ctx, outputFormat, outputFile, truncateOutput, args) + }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + // Complete with plural resource names that support GET collection + return completion.Complete(completion.Request{ + Type: completion.CompletePluralResource, + Verb: completion.Get, // Get verb checks for GetCollectionInfo + }) + }, + } + + // Add flags to the root get-all command for multi-resource mode + getAll.Flags().StringVarP(&outputFile, "output-file", "o", "", "The file to output results to") + getAll.Flags().BoolVarP(&truncateOutput, "truncate-output", "t", false, "Truncate the output file before writing (instead of appending)") + getAll.Flags().VarP( + enumflag.New(&outputFormat, "output-format", OutputFormatIds, enumflag.EnumCaseInsensitive), + "output-format", "f", + "sets output format; can be 'jsonl', 'json', 'csv', 'epcc-cli', 'epcc-cli-runbook'") + _ = getAll.RegisterFlagCompletionFunc("output-format", outputFormatCompletionFunc) + + for _, resource := range resources.GetPluralResources() { + if resource.GetCollectionInfo == nil { + continue + } + + resourceName := resource.PluralName + + // Each subcommand gets its own flags + var subOutputFile string + var subOutputFormat OutputFormat + var subTruncateOutput bool + + var getAllResourceCmd = &cobra.Command{ + Use: resourceName, + Short: GetGetAllShort(resource), + Hidden: false, + RunE: func(cmd *cobra.Command, args []string) error { + return getAllInternal(clictx.Ctx, subOutputFormat, subOutputFile, subTruncateOutput, append([]string{resourceName}, args...)) + }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + // Complete with additional plural resource names that support GET collection + return completion.Complete(completion.Request{ + Type: completion.CompletePluralResource, + Verb: completion.Get, + }) + }, + } + + getAllResourceCmd.Flags().StringVarP(&subOutputFile, "output-file", "o", "", "The file to output results to") + getAllResourceCmd.Flags().BoolVarP(&subTruncateOutput, "truncate-output", "t", false, "Truncate the output file before writing (instead of appending)") + + getAllResourceCmd.Flags().VarP( + enumflag.New(&subOutputFormat, "output-format", OutputFormatIds, enumflag.EnumCaseInsensitive), + "output-format", "f", + "sets output format; can be 'jsonl', 'json', 'csv', 'epcc-cli', 'epcc-cli-runbook'") + _ = getAllResourceCmd.RegisterFlagCompletionFunc("output-format", outputFormatCompletionFunc) + + getAll.AddCommand(getAllResourceCmd) + } + + parentCmd.AddCommand(getAll) + return func() {} + +} + +// sortResourcesByDependency orders resources so that parent resources come before children. +// Dependencies are determined by URL templates (e.g., account-addresses depends on accounts +// because its URL is /v2/accounts/{accountId}/addresses). +func sortResourcesByDependency(resourceList []resources.Resource) ([]resources.Resource, error) { + if len(resourceList) <= 1 { + return resourceList, nil + } + + // Build a map of resource names for quick lookup + requestedResources := make(map[string]resources.Resource) + for _, r := range resourceList { + requestedResources[r.PluralName] = r + requestedResources[r.SingularName] = r + } + + // Build dependency graph + graph := toposort.NewGraph() + + for _, resource := range resourceList { + graph.AddNode(resource.PluralName) + + // Get URL template dependencies + if resource.GetCollectionInfo != nil { + deps, err := resources.GetTypesOfVariablesNeeded(resource.GetCollectionInfo.Url) + if err != nil { + log.Warnf("Could not get URL dependencies for %s: %v", resource.PluralName, err) + continue + } + + for _, dep := range deps { + // Check if this dependency is in our requested list + if depResource, ok := requestedResources[dep]; ok { + // Add edge: dependency -> resource (dependency must come first) + graph.AddEdge(depResource.PluralName, resource.PluralName) + log.Debugf("Resource %s depends on %s (URL template)", resource.PluralName, depResource.PluralName) + } + } + } + + // Also check attribute-level RESOURCE_ID dependencies + for attrName, attr := range resource.Attributes { + if strings.HasPrefix(attr.Type, "RESOURCE_ID:") { + depType := strings.TrimPrefix(attr.Type, "RESOURCE_ID:") + if depResource, ok := requestedResources[depType]; ok { + // Only add if not self-referential (that's handled separately) + if depResource.PluralName != resource.PluralName { + graph.AddEdge(depResource.PluralName, resource.PluralName) + log.Debugf("Resource %s depends on %s (attribute %s)", resource.PluralName, depResource.PluralName, attrName) + } + } + } + } + + // Check explicit export dependencies (for subtle dependencies not visible in URL structure) + if len(resource.ExportDependsOn) > 0 { + log.Debugf("Resource %s has export-depends-on: %v", resource.PluralName, resource.ExportDependsOn) + } + for _, dep := range resource.ExportDependsOn { + if depResource, ok := requestedResources[dep]; ok { + graph.AddEdge(depResource.PluralName, resource.PluralName) + log.Debugf("Resource %s depends on %s (explicit export-depends-on)", resource.PluralName, depResource.PluralName) + } else { + log.Debugf("Resource %s has export-depends-on %s but it's not in the requested list", resource.PluralName, dep) + } + } + } + + // Topologically sort + sortedNames, err := graph.TopologicalSort() + if err != nil { + return nil, fmt.Errorf("dependency cycle detected: %w", err) + } + + // Convert back to Resource slice + result := make([]resources.Resource, 0, len(sortedNames)) + for _, name := range sortedNames { + if r, ok := requestedResources[name]; ok { + // Avoid duplicates (since we added both plural and singular names) + found := false + for _, existing := range result { + if existing.PluralName == r.PluralName { + found = true + break + } + } + if !found { + result = append(result, r) + } + } + } + + return result, nil +} + +// getResourceNames returns a slice of plural names for logging. +func getResourceNames(resourceList []resources.Resource) []string { + names := make([]string, len(resourceList)) + for i, r := range resourceList { + names[i] = r.PluralName + } + return names +} + +func writeJson(obj interface{}, writer io.Writer) error { + line, err := gojson.Marshal(&obj) + + if err != nil { + return fmt.Errorf("could not create JSON for %s, error: %v", line, err) + + } + + _, err = writer.Write(line) + + if err != nil { + return fmt.Errorf("could not save line %s, error: %v", line, err) + + } + + _, err = writer.Write([]byte{10}) + + if err != nil { + return fmt.Errorf("could not save line %s, error: %v", line, err) + } + + return nil +} + +func getAllInternal(ctx context.Context, outputFormat OutputFormat, outputFile string, truncateOutput bool, args []string) error { + if len(args) == 0 { + return fmt.Errorf("no resources specified") + } + + // Truncate output file if requested (do this once before processing any resources) + if truncateOutput && outputFile != "" { + if err := os.Truncate(outputFile, 0); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("could not truncate output file: %w", err) + } + } + + // Write shebang for epcc-cli format (shell script) + if outputFormat == EpccCli && outputFile != "" { + f, err := os.OpenFile(outputFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0755) + if err != nil { + return fmt.Errorf("could not open output file for shebang: %w", err) + } + _, err = f.WriteString("#!/bin/bash\nset -e\n") + f.Close() + if err != nil { + return fmt.Errorf("could not write shebang: %w", err) + } + } + + // Validate all resources and deduplicate + seen := make(map[string]bool) + resourceList := make([]resources.Resource, 0, len(args)) + + for _, name := range args { + resource, ok := resources.GetResourceByName(name) + if !ok { + return fmt.Errorf("could not find resource %s", name) + } + if resource.GetCollectionInfo == nil { + return fmt.Errorf("resource %s doesn't support GET collection", name) + } + // Deduplicate by plural name + if !seen[resource.PluralName] { + seen[resource.PluralName] = true + resourceList = append(resourceList, resource) + } else { + log.Debugf("Skipping duplicate resource %s", name) + } + } + + // Sort resources by dependency if there's more than one + if len(resourceList) > 1 { + var err error + resourceList, err = sortResourcesByDependency(resourceList) + if err != nil { + return fmt.Errorf("could not sort resources by dependency: %w", err) + } + log.Infof("Processing %d resources in dependency order: %v", len(resourceList), getResourceNames(resourceList)) + } + + // Process each resource + for i, resource := range resourceList { + if len(resourceList) > 1 { + log.Infof("Processing resource %d/%d: %s", i+1, len(resourceList), resource.PluralName) + } + + err := getAllSingleResource(ctx, outputFormat, outputFile, resource) + if err != nil { + return fmt.Errorf("error processing resource %s: %w", resource.PluralName, err) + } + } + + // Make output file executable for epcc-cli format + if outputFormat == EpccCli && outputFile != "" { + if err := os.Chmod(outputFile, 0755); err != nil { + return fmt.Errorf("could not make output file executable: %w", err) + } + } + + // Log success message + if outputFile != "" { + log.Infof("Successfully exported %d resource type(s) to %s", len(resourceList), outputFile) + } else { + log.Infof("Successfully exported %d resource type(s) to stdout", len(resourceList)) + } + + if outputFormat == EpccCli || outputFormat == EpccCliRunbook { + log.Warnf("Output to EPCC CLI format is currently BETA, please report any bugs on GitHub") + } + + return nil +} + +// getAllSingleResource fetches all instances of a single resource type. +func getAllSingleResource(ctx context.Context, outputFormat OutputFormat, outputFile string, resource resources.Resource) error { + allParentEntityIds, err := getParentIdsForGetAll(ctx, resource) + + if err != nil { + return fmt.Errorf("could not retrieve parent ids for resource %s, error: %w", resource.PluralName, err) + } + + if len(allParentEntityIds) == 1 { + log.Debugf("Resource %s is a top level resource need to scan only one path to get all resources", resource.PluralName) + } else { + log.Debugf("Resource %s is not a top level resource, need to scan %d paths to get all resources", resource.PluralName, len(allParentEntityIds)) + } + + var syncGroup = sync.WaitGroup{} + + syncGroup.Add(1) + + type idableAttributesWithType struct { + id.IdableAttributes + Type string `yaml:"type,omitempty" json:"type,omitempty"` + EpccCliType string `yaml:"epcc_cli_type,omitempty" json:"epcc_cli_type,omitempty"` + } + + type msg struct { + txt []byte + id []idableAttributesWithType + } + var sendChannel = make(chan msg, 0) + + var writer io.Writer + if outputFile == "" { + writer = os.Stdout + } else { + file, err := os.OpenFile(outputFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600) + if err != nil { + return fmt.Errorf("could not open output file: %w", err) + } + defer file.Close() + writer = file + } + + topoSortNeeded := false + + topoSortKeys := make([]string, 0) + for k, v := range resource.Attributes { + if (v.Type == fmt.Sprintf("RESOURCE_ID:%s", resource.PluralName)) || (v.Type == fmt.Sprintf("RESOURCE_ID:%s", resource.SingularName)) { + topoSortKeys = append(topoSortKeys, k) + topoSortNeeded = true + } + + } + + lines := map[string]string{} + graph := toposort.NewGraph() + + outputWriter := func() { + defer syncGroup.Done() + + csvLines := make([]interface{}, 0) + + if outputFormat == EpccCliRunbook && !topoSortNeeded { + // We need to prefix + _, err := writer.Write([]byte("- |\n")) + + if err != nil { + log.Errorf("Error writing command: %v", err) + } + } + + endMessages: + for msgs := 0; ; msgs++ { + select { + case result, ok := <-sendChannel: + if !ok { + log.Debugf("Channel closed, we are done.") + break endMessages + } + var obj interface{} + err = gojson.Unmarshal(result.txt, &obj) + + if err != nil { + log.Errorf("Couldn't unmarshal JSON response %s due to error: %v", result, err) + continue + } + + newObjs, err := json.RunJQWithArray(".data[]", obj) + + if err != nil { + log.Errorf("Couldn't process response %s due to error: %v", result, err) + continue + } + + // Check if this is an array-notation no-wrapping resource + usesArrayNotation := false + if resource.NoWrapping { + for attrKey := range resource.Attributes { + if strings.Contains(attrKey, "[n]") || strings.Contains(attrKey, "[0]") { + usesArrayNotation = true + break + } + } + } + + // For array-notation no-wrapping resources with epcc-cli output, + // aggregate all items into a single command with incrementing indices + if usesArrayNotation && (outputFormat == EpccCli || outputFormat == EpccCliRunbook) && len(newObjs) > 0 { + sb := &strings.Builder{} + + sb.WriteString("epcc create -s --skip-alias-processing ") + sb.WriteString(resource.SingularName) + + // Use first item's ID for the alias (or create a composite alias) + var firstId = "" + if mp, ok := newObjs[0].(map[string]interface{}); ok { + if id, ok := mp["id"].(string); ok { + firstId = id + } + } + + sb.WriteString(" ") + sb.WriteString("--save-as-alias") + sb.WriteString(" ") + sb.WriteString("exported_source_id=") + sb.WriteString(firstId) + + // Add parent resource references + for _, resId := range result.id { + sb.WriteString(" ") + sb.WriteString(resources.MustGetResourceByName(resId.EpccCliType).JsonApiType) + sb.WriteString("/") + sb.WriteString("exported_source_id=") + sb.WriteString(resId.Id) + } + + // Process each item with its index + for itemIdx, newObj := range newObjs { + kvs, err := json2csv.JSON2CSV(newObj) + if err != nil { + log.Errorf("Error generating Key/Value pairs for item %d: %v", itemIdx, err) + continue + } + + for _, kv := range kvs { + keys := kv.Keys() + sort.Strings(keys) + + nextKeyArray: + for _, k := range keys { + v := kv[k] + + jp, err := jsonpointer.New(k) + if err != nil { + log.Errorf("Couldn't generate JSON Pointer for %s: %v", k, err) + continue + } + + jsonPointerKey := jp.DotNotation(true) + + if strings.HasPrefix(jsonPointerKey, "meta.") { + continue + } + if strings.HasPrefix(jsonPointerKey, "links.") { + continue + } + + // Skip timestamps and other read-only fields + excludedPrefixes := []string{"created_at", "updated_at", "timestamps."} + for _, prefix := range excludedPrefixes { + if strings.HasPrefix(jsonPointerKey, prefix) { + continue nextKeyArray + } + } + + // Skip resource-specific excluded JSON pointers + for _, excluded := range resource.ExcludedJsonPointersFromImport { + if strings.HasPrefix(jsonPointerKey, excluded) { + continue nextKeyArray + } + } + + sb.WriteString(" ") + // Use incrementing index for each item + sb.WriteString(fmt.Sprintf("data[%d].", itemIdx)) + sb.WriteString(jsonPointerKey) + sb.WriteString(" ") + + // Check if this attribute is a RESOURCE_ID type by looking up the attribute definition + // Convert jsonPointerKey to the generic attribute key format (e.g., "id" -> "data[n].id") + attrKey := "data[n]." + jsonPointerKey + isResourceId := false + if attr, ok := resource.Attributes[attrKey]; ok { + if strings.HasPrefix(attr.Type, "RESOURCE_ID:") { + isResourceId = true + } + } + + if s, ok := v.(string); ok { + if isResourceId { + // Use alias reference format for RESOURCE_ID attributes + sb.WriteString(`"`) + sb.WriteString("exported_source_id=") + sb.WriteString(s) + sb.WriteString(`"`) + } else { + sb.WriteString(`"`) + quoteArgument := json.ValueNeedsQuotes(s) + if quoteArgument { + sb.WriteString("\\\"") + } + value := strings.ReplaceAll(s, `\`, `\\`) + value = strings.ReplaceAll(value, `$`, `\$`) + value = strings.ReplaceAll(value, `"`, `\"`) + sb.WriteString(value) + if quoteArgument { + sb.WriteString("\\\"") + } + sb.WriteString(`"`) + } + } else { + sb.WriteString(fmt.Sprintf("%v", v)) + } + } + } + } + + sb.WriteString("\n") + + if outputFormat == EpccCliRunbook { + _, err := writer.Write([]byte(" ")) + if err != nil { + log.Errorf("Error writing command: %v", err) + } + } + + _, err = writer.Write([]byte(sb.String())) + if err != nil { + log.Errorf("Error writing command: %v", err) + } + + // Still need to handle jsonl/json/csv for array notation resources + for _, newObj := range newObjs { + wrappedObj := map[string]interface{}{ + "data": newObj, + "meta": map[string]interface{}{ + "_epcc_cli_parent_resources": result.id, + }, + } + if outputFormat == Jsonl { + err = writeJson(wrappedObj, writer) + if err != nil { + log.Errorf("Error writing JSON line: %v", err) + } + } else if outputFormat == Json || outputFormat == Csv { + csvLines = append(csvLines, wrappedObj) + } + } + continue // Skip the per-item processing below + } + + for _, newObj := range newObjs { + + wrappedObj := map[string]interface{}{ + "data": newObj, + "meta": map[string]interface{}{ + "_epcc_cli_parent_resources": result.id, + }, + } + + if outputFormat == Jsonl { + err = writeJson(wrappedObj, writer) + + if err != nil { + log.Errorf("Error writing JSON line: %v", err) + continue + } + } else if outputFormat == Json || outputFormat == Csv { + csvLines = append(csvLines, wrappedObj) + } else if outputFormat == EpccCli || outputFormat == EpccCliRunbook { + sb := &strings.Builder{} + + sb.WriteString("epcc create -s --skip-alias-processing ") + sb.WriteString(resource.SingularName) + + sb.WriteString(" ") + sb.WriteString("--save-as-alias") + sb.WriteString(" ") + sb.WriteString("exported_source_id=") + + var myId = "" + if mp, ok := newObj.(map[string]interface{}); ok { + // Try id at root level first, then under data + if id, ok := mp["id"].(string); ok { + myId = id + } else if dataMap, ok := mp["data"].(map[string]interface{}); ok { + if id, ok := dataMap["id"].(string); ok { + myId = id + } + } + sb.WriteString(myId) + } else { + log.Errorf("Error casting newObj to map[string]interface{}") + sb.WriteString("\n") + continue + } + + if topoSortNeeded { + graph.AddNode(myId) + } + + for _, resId := range result.id { + sb.WriteString(" ") + sb.WriteString(resources.MustGetResourceByName(resId.EpccCliType).JsonApiType) + sb.WriteString("/") + sb.WriteString("exported_source_id=") + sb.WriteString(resId.Id) + + } + + kvs, err := json2csv.JSON2CSV(newObj) + if err != nil { + log.Errorf("Error generating Key/Value pairs: %v", err) + sb.WriteString("\n") + continue + } + + for _, kv := range kvs { + + keys := kv.Keys() + + sort.Strings(keys) + + nextKey: + for _, k := range keys { + v := kv[k] + + jp, err := jsonpointer.New(k) + + if err != nil { + log.Errorf("Couldn't generate JSON Pointer for %s: %v", k, err) + + continue + } + + jsonPointerKey := jp.DotNotation(true) + + if strings.HasPrefix(jsonPointerKey, "meta.") { + continue + } + + if strings.HasPrefix(jsonPointerKey, "links.") { + continue + } + + // Skip id fields (id, data.id, data[n].id) unless no-wrapping (where data.id is needed for relationships) + if !resource.NoWrapping { + if jsonPointerKey == "id" || strings.HasPrefix(jsonPointerKey, "data.id") || + strings.HasPrefix(jsonPointerKey, "data[") && strings.HasSuffix(jsonPointerKey, "].id") { + continue + } + } + + // Skip type field unless no-wrapping (where data.type is needed) + if jsonPointerKey == "type" && !resource.NoWrapping { + continue + } + + // Skip timestamps and other read-only fields + excludedPrefixes := []string{"created_at", "updated_at", "timestamps."} + for _, prefix := range excludedPrefixes { + if strings.HasPrefix(jsonPointerKey, prefix) { + continue nextKey + } + } + + // Skip resource-specific excluded JSON pointers + for _, excluded := range resource.ExcludedJsonPointersFromImport { + if strings.HasPrefix(jsonPointerKey, excluded) { + continue nextKey + } + } + + sb.WriteString(" ") + // For no-wrapping resources, we need to prefix keys with "data." + // (array notation is handled separately above) + if resource.NoWrapping { + sb.WriteString("data.") + } + sb.WriteString(jsonPointerKey) + sb.WriteString(" ") + + if s, ok := v.(string); ok { + + writeValueFromJson := true + + for _, topoKey := range topoSortKeys { + if jsonPointerKey == topoKey { + dependentId := fmt.Sprintf("%s", v) + graph.AddEdge(dependentId, myId) + writeValueFromJson = false + sb.WriteString(`"`) + sb.WriteString("exported_source_id=") + sb.WriteString(dependentId) + sb.WriteString(`"`) + } + } + + if writeValueFromJson { + // This is to prevent shell characters from interpreting things + sb.WriteString(`"`) + + quoteArgument := json.ValueNeedsQuotes(s) + + if quoteArgument { + // This is to force the EPCC CLI to interpret the value as a string + sb.WriteString("\\\"") + } + value := strings.ReplaceAll(s, `\`, `\\`) + value = strings.ReplaceAll(value, `$`, `\$`) + value = strings.ReplaceAll(value, `"`, `\"`) + sb.WriteString(value) + + if quoteArgument { + // This is to force the EPCC CLI to interpret the value as a string + sb.WriteString("\\\"") + } + // This is to prevent shell characters from interpreting things + sb.WriteString(`"`) + } + } else { + sb.WriteString(fmt.Sprintf("%v", v)) + } + + } + } + + sb.WriteString("\n") + if topoSortNeeded { + lines[myId] = sb.String() + } else { + if outputFormat == EpccCliRunbook { + // We need to prefix + _, err := writer.Write([]byte(" ")) + + if err != nil { + log.Errorf("Error writing command: %v", err) + } + } + + _, err = writer.Write([]byte(sb.String())) + + if err != nil { + log.Errorf("Error writing command: %v", err) + } + } + } + } + } + } + + if outputFormat == Json { + err = writeJson(csvLines, writer) + + if err != nil { + log.Errorf("Error writing JSON line: %v", err) + } + } else if outputFormat == Csv { + + // Create writer that saves to string + results, err := json2csv.JSON2CSV(csvLines) + + if err != nil { + log.Errorf("Error converting to CSV: %v", err) + return + } + + csvWriter := json2csv.NewCSVWriter(writer) + + csvWriter.HeaderStyle = json2csv.DotBracketStyle + csvWriter.Transpose = false + + if err := csvWriter.WriteCSV(results); err != nil { + log.Errorf("Error writing CSV: %v", err) + return + } + } else if (outputFormat == EpccCli || outputFormat == EpccCliRunbook) && topoSortNeeded { + stages, err := graph.ParallelizableStages() + + if err != nil { + log.Errorf("Error sorting data: %v", err) + return + } + + for idx, stage := range stages { + writer.Write([]byte(fmt.Sprintf("# Stage %d\n", idx))) + if outputFormat == EpccCliRunbook { + writer.Write([]byte(fmt.Sprintf("- |\n"))) + } + + for _, id := range stage { + if outputFormat == EpccCliRunbook { + writer.Write([]byte(fmt.Sprintf(" "))) + } + + _, err = writer.Write([]byte(lines[id])) + + if err != nil { + log.Errorf("Error writing command: %v", err) + } + } + } + + } + + } + + go outputWriter() + + for _, parentEntityIds := range allParentEntityIds { + lastIds := make([][]id.IdableAttributes, 1) + + for offset := 0; ; offset += 100 { + // Check if context has been cancelled (e.g., user pressed Ctrl+C) + select { + case <-ctx.Done(): + close(sendChannel) + syncGroup.Wait() + return ctx.Err() + default: + } + + if offset > 10000 { + // Most pagination limits have a max offset of 10,000 + log.Warnf("Maximum pagination offset reached, could not retrieve all records") + break + } + resourceURL, err := resources.GenerateUrlViaIdableAttributes(resource.GetCollectionInfo, parentEntityIds) + + if err != nil { + return err + } + + types, err := resources.GetSingularTypesOfVariablesNeeded(resource.GetCollectionInfo.Url) + + if err != nil { + return err + } + + params := url.Values{} + params.Add("page[limit]", "100") + params.Add("page[offset]", strconv.Itoa(offset)) + + resp, err := httpclient.DoRequest(ctx, "GET", resourceURL, params.Encode(), nil) + + if err != nil { + return err + } + + if resp.StatusCode >= 400 { + log.Warnf("Could not retrieve page of data, aborting") + + break + } + + bodyTxt, err := io.ReadAll(resp.Body) + + if err != nil { + + return err + } + + ids, totalCount, err := apihelper.GetResourceIdsFromBody(bodyTxt) + resp.Body.Close() + + allIds := make([][]id.IdableAttributes, 0) + for _, id := range ids { + allIds = append(allIds, append(parentEntityIds, id)) + } + + if reflect.DeepEqual(allIds, lastIds) { + log.Warnf("Data on the previous two pages did not change. Does this resource support pagination? Aborting export") + + break + } else { + lastIds = allIds + } + + idsWithType := make([]idableAttributesWithType, len(types)) + + for i, t := range types { + idsWithType[i].IdableAttributes = parentEntityIds[i] + idsWithType[i].EpccCliType = t + idsWithType[i].Type = resources.MustGetResourceByName(t).JsonApiType + } + + sendChannel <- msg{ + bodyTxt, + idsWithType, + } + + if len(allIds) == 0 { + log.Infof("Total ids retrieved for %s in %s is %d, we are done", resource.PluralName, resourceURL, len(allIds)) + + break + } else { + if totalCount >= 0 { + log.Infof("Total number of %s in %s is %d", resource.PluralName, resourceURL, totalCount) + } else { + log.Infof("Total number %s in %s is unknown", resource.PluralName, resourceURL) + } + } + + } + } + + close(sendChannel) + + syncGroup.Wait() + + return nil +} + +// getParentIdsForGetAll retrieves all parent entity IDs for a resource. +// This is similar to getParentIds in delete-all.go but uses a default page length. +func getParentIdsForGetAll(ctx context.Context, resource resources.Resource) ([][]id.IdableAttributes, error) { + const defaultPageLength uint16 = 25 + + myEntityIds := make([][]id.IdableAttributes, 0) + if resource.GetCollectionInfo == nil { + return myEntityIds, fmt.Errorf("resource %s doesn't support GET collection", resource.PluralName) + } + + types, err := resources.GetTypesOfVariablesNeeded(resource.GetCollectionInfo.Url) + + if err != nil { + return myEntityIds, err + } + + if len(types) == 0 { + myEntityIds = append(myEntityIds, make([]id.IdableAttributes, 0)) + return myEntityIds, nil + } else { + immediateParentType := types[len(types)-1] + + parentResource, ok := resources.GetResourceByName(immediateParentType) + + if !ok { + return myEntityIds, fmt.Errorf("could not find parent resource %s", immediateParentType) + } + + return apihelper.GetAllIds(ctx, defaultPageLength, &parentResource) + } +} diff --git a/cmd/helper.go b/cmd/helper.go index bbc1bb29..cf33db89 100644 --- a/cmd/helper.go +++ b/cmd/helper.go @@ -396,6 +396,10 @@ func GetDeleteAllShort(resource resources.Resource) string { return fmt.Sprintf("Calls DELETE %s for every resource in GET %s", GetHelpResourceUrls(resource.DeleteEntityInfo.Url), GetHelpResourceUrls(resource.GetCollectionInfo.Url)) } +func GetGetAllShort(resource resources.Resource) string { + return fmt.Sprintf("Calls GET %s and iterates over all pages and parent resources (if applicable)", GetHelpResourceUrls(resource.GetCollectionInfo.Url)) +} + func GetGetLong(resourceName string, resourceUrl string, usageGetType string, completionVerb int, urlInfo *resources.CrudEntityInfo, resource resources.Resource) string { if DisableLongOutput { diff --git a/cmd/root.go b/cmd/root.go index 16ba21e0..77e0ee05 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -130,6 +130,9 @@ func InitializeCmd() { log.Tracef("Building Delete All Commands") NewDeleteAllCommand(RootCmd) + log.Tracef("Building Get All Commands") + NewGetAllCommand(RootCmd) + log.Tracef("Building Resource Info Commands") NewResourceInfoCommand(RootCmd) diff --git a/external/apihelper/map_collection_response_to_ids.go b/external/apihelper/map_collection_response_to_ids.go index d8f07beb..03b96293 100644 --- a/external/apihelper/map_collection_response_to_ids.go +++ b/external/apihelper/map_collection_response_to_ids.go @@ -3,10 +3,11 @@ package apihelper import ( json2 "encoding/json" "fmt" - "github.com/elasticpath/epcc-cli/external/id" - log "github.com/sirupsen/logrus" "io" "net/http" + + "github.com/elasticpath/epcc-cli/external/id" + log "github.com/sirupsen/logrus" ) func GetResourceIdsFromHttpResponse(resp *http.Response) ([]id.IdableAttributes, int, error) { @@ -17,8 +18,14 @@ func GetResourceIdsFromHttpResponse(resp *http.Response) ([]id.IdableAttributes, log.Fatal(err) } + return GetResourceIdsFromBody(body) +} + +// GetResourceIdsFromBody parses a JSON response body and extracts resource IDs. +// This is useful when you need to process the raw body bytes separately. +func GetResourceIdsFromBody(body []byte) ([]id.IdableAttributes, int, error) { var jsonStruct = map[string]interface{}{} - err = json2.Unmarshal(body, &jsonStruct) + err := json2.Unmarshal(body, &jsonStruct) if err != nil { return nil, 0, fmt.Errorf("response for get was not JSON: %w", err) } diff --git a/external/json/to_json.go b/external/json/to_json.go index c53f0523..4203665d 100644 --- a/external/json/to_json.go +++ b/external/json/to_json.go @@ -392,3 +392,9 @@ func formatValue(v string) string { return fmt.Sprintf("\"%s\"", v) } } + +// ValueNeedsQuotes returns true if a value would be treated as a literal (number, bool, null) +// and thus needs to be quoted if it should be treated as a string. +func ValueNeedsQuotes(v string) bool { + return TreatAsLiterals.MatchString(v) +} diff --git a/external/resources/resources.go b/external/resources/resources.go index 46e18dfa..1718f688 100644 --- a/external/resources/resources.go +++ b/external/resources/resources.go @@ -75,6 +75,14 @@ type Resource struct { // If another resource is used to create this resource, list it here CreatedBy []VerbResource `yaml:"created_by,omitempty"` + // Explicit dependencies for export ordering (resources that must be exported/created first) + // This is used when dependencies aren't visible in URL structure (e.g., custom-fields -> custom-api-settings-entries) + ExportDependsOn []string `yaml:"export-depends-on,omitempty"` + + // JSON pointers to exclude when generating epcc-cli import commands + // Useful for fields that are read-only or have different semantics on create (e.g., password on customers) + ExcludedJsonPointersFromImport []string `yaml:"excluded-json-pointers-from-import,omitempty"` + // Source Filename SourceFile string } diff --git a/external/resources/resources_schema.json b/external/resources/resources_schema.json index 219d00c4..e9830cd0 100644 --- a/external/resources/resources_schema.json +++ b/external/resources/resources_schema.json @@ -264,6 +264,16 @@ "required": ["verb", "resource"], "additionalProperties": false } + }, + "export-depends-on": { + "type": "array", + "items": { "type": "string" }, + "description": "Resources that must be exported/created before this one (for dependencies not visible in URL structure)" + }, + "excluded-json-pointers-from-import": { + "type": "array", + "items": { "type": "string" }, + "description": "JSON pointers to exclude when generating epcc-cli import commands (e.g., read-only fields)" } }, "required": [ "json-api-type", "json-api-format", "docs", "singular-name"] diff --git a/external/resources/yaml/commerce-extensions.yaml b/external/resources/yaml/commerce-extensions.yaml index cdb84294..34a51419 100644 --- a/external/resources/yaml/commerce-extensions.yaml +++ b/external/resources/yaml/commerce-extensions.yaml @@ -4,6 +4,8 @@ custom-apis: json-api-type: custom_api json-api-format: "legacy" docs: "https://elasticpath.dev/docs/api/commerce-extensions/custom-ap-is" + excluded-json-pointers-from-import: + - relationships.parent_apis delete-entity: docs: "https://elasticpath.dev/docs/api/commerce-extensions/delete-a-custom-api" url: "/v2/settings/extensions/custom-apis/{custom_apis}" @@ -114,6 +116,8 @@ custom-api-settings-entries: json-api-type: custom_entry json-api-format: "legacy" no-wrapping: true + export-depends-on: + - custom-fields docs: "https://elasticpath.dev/docs/api/commerce-extensions/custom-api-entries" delete-entity: docs: "https://elasticpath.dev/docs/api/commerce-extensions/delete-a-custom-entry" @@ -140,6 +144,8 @@ custom-api-extensions-entries: json-api-type: custom_entry json-api-format: "legacy" no-wrapping: true + export-depends-on: + - custom-fields docs: "https://elasticpath.dev/docs/api/commerce-extensions/custom-api-entries" delete-entity: docs: "https://elasticpath.dev/docs/api/commerce-extensions/delete-a-custom-entry-settings" @@ -222,3 +228,14 @@ custom-api-role-policies: type: CONST:built_in_role relationships.role.data.id: type: RESOURCE_ID:built-in-role +#custom-api-openapi-specifications: +# singular-name: custom-api-openapi-specification +# json_api_type: custom_api_openapi_specification +# json-api-format: legacy +# no-wrapping: true +# get-entity: +# docs: "https://elasticpath.dev/docs/api/permissions/get-a-custom-api-role-policy" +# url: "/v2/settings/extensions/specifications/openapi" +# get-collection: +# docs: "https://elasticpath.dev/docs/api/permissions/list-custom-api-role-policies" +# url: "/v2/settings/extensions/specifications/openapi" diff --git a/external/resources/yaml/customers.yaml b/external/resources/yaml/customers.yaml index ab50b0c3..2a223314 100644 --- a/external/resources/yaml/customers.yaml +++ b/external/resources/yaml/customers.yaml @@ -39,6 +39,8 @@ customers: autofill: FUNC:Email password: type: STRING + excluded-json-pointers-from-import: + - password customer-cart-associations: singular-name: customer-cart-association json-api-type: customer-cart-relationship diff --git a/external/resources/yaml/payments.yaml b/external/resources/yaml/payments.yaml index 6f06f5b1..da1797ed 100644 --- a/external/resources/yaml/payments.yaml +++ b/external/resources/yaml/payments.yaml @@ -195,12 +195,12 @@ payment-gateways-stripe-payment-intents: singular-name: "payment-gateway-stripe-payment-intent" json-api-type: "gateway" json-api-format: "legacy" - docs: "https://elasticpath.dev/docs/studio/Integrations/stripe-payment-intents" + docs: "https://developer.elasticpath.com/docs/api/payments/update-stripe-intents-gateway" get-entity: docs: "https://elasticpath.dev/docs/api/payments/gateways" url: "/v2/gateways/stripe_payment_intents" update-entity: - docs: "https://elasticpath.dev/docs/studio/Integrations/stripe-payment-intents" + docs: "https://developer.elasticpath.com/docs/api/payments/update-stripe-intents-gateway" url: "/v2/gateways/stripe_payment_intents" attributes: login: diff --git a/external/resources/yaml/pxm.yaml b/external/resources/yaml/pxm.yaml index b4759144..ef4278a5 100644 --- a/external/resources/yaml/pxm.yaml +++ b/external/resources/yaml/pxm.yaml @@ -128,6 +128,8 @@ pcm-hierarchies: type: STRING locales.en-US.description: type: STRING + excluded-json-pointers-from-import: + - relationships.children.links.related pcm-hierarchy-node-children: singular-name: "pcm-hierarchy-node-child" json-api-type: "node" @@ -162,6 +164,11 @@ pcm-node-products: type: RESOURCE_ID:pcm-product "data[n].type": type: CONST:product + excluded-json-pointers-from-import: + - attributes. + - relationships. + - meta. + - links. pcm-node-parents: singular-name: "pcm-node-parent" json-api-type: "node" @@ -212,6 +219,9 @@ pcm-nodes: type: CONST:node relationships.parent.data.id: type: RESOURCE_ID:pcm-nodes + excluded-json-pointers-from-import: + - relationships.children. + - relationships.products. pcm-product-main-image: singular-name: "pcm-product-main-image" json-api-type: "file" @@ -308,6 +318,14 @@ pcm-products: type: CONST:product ^components\.([a-zA-Z0-9-_]+)\.options\.quantity$: type: INT + excluded-json-pointers-from-import: + - relationships.children. + - relationships.component_products. + - relationships.custom_relationships. + - relationships.files. + - relationships.main_image + - relationships.templates. + - relationships.variations. pcm-product-templates: singular-name: "pcm-product-template" json-api-type: "template" diff --git a/external/runbooks/run-all-runbooks.sh b/external/runbooks/run-all-runbooks.sh index 619a4648..b9467430 100755 --- a/external/runbooks/run-all-runbooks.sh +++ b/external/runbooks/run-all-runbooks.sh @@ -16,7 +16,8 @@ epcc # Smoke test for EPCC_CLI_READ_ONLY echo "Starting Read-Only Mode Smoke Test" epcc reset-store .+ - +EPCC_CLI_READ_ONLY=true epcc get accounts +EPCC_CLI_READ_ONLY=true epcc get-all accounts EPCC_CLI_READ_ONLY=true epcc create account --auto-fill && exit 1 || test $? -eq 4 EPCC_CLI_READ_ONLY=true epcc update account 00000000-0000-0000-0000-000000000000 name foo && exit 1 || test $? -eq 4 EPCC_CLI_READ_ONLY=true epcc delete account 00000000-0000-0000-0000-000000000000 && exit 1 || test $? -eq 4 diff --git a/external/toposort/toposort.go b/external/toposort/toposort.go new file mode 100644 index 00000000..a0daf2aa --- /dev/null +++ b/external/toposort/toposort.go @@ -0,0 +1,100 @@ +package toposort + +import ( + "fmt" + "sort" +) + +type Graph struct { + edges map[string][]string + vertices map[string]struct{} + indegree map[string]int // Track number of dependencies +} + +func NewGraph() *Graph { + return &Graph{ + edges: make(map[string][]string), + vertices: make(map[string]struct{}), + indegree: make(map[string]int), + } +} + +func (g *Graph) AddNode(v string) { + g.vertices[v] = struct{}{} + if _, exists := g.edges[v]; !exists { + g.edges[v] = []string{} // Ensure it exists in the adjacency list + } +} + +func (g *Graph) AddEdge(u, v string) { + g.vertices[u] = struct{}{} + g.vertices[v] = struct{}{} + g.edges[u] = append(g.edges[u], v) + g.indegree[v]++ // Track dependencies +} + +// ParallelizableStages determines parallel execution stages using Kahn's Algorithm +func (g *Graph) ParallelizableStages() ([][]string, error) { + indegree := make(map[string]int) + for v := range g.vertices { + indegree[v] = g.indegree[v] // Copy original indegrees + } + + queue := []string{} + for v, deg := range indegree { + if deg == 0 { + queue = append(queue, v) + } + } + + var levels [][]string + count := 0 + + for len(queue) > 0 { + sort.Strings(queue) // Sort the current stage before processing + var nextQueue []string + levels = append(levels, queue) // Nodes at current level + count += len(queue) + + for _, v := range queue { + for _, neighbor := range g.edges[v] { + indegree[neighbor]-- + if indegree[neighbor] == 0 { + nextQueue = append(nextQueue, neighbor) + } + } + } + + queue = nextQueue // Move to next level + } + + // If not all nodes were processed, there is a cycle + if count != len(g.vertices) { + var cycleNodes []string + for v, deg := range indegree { + if deg > 0 { + cycleNodes = append(cycleNodes, v) + } + } + sort.Strings(cycleNodes) + + return nil, fmt.Errorf("cycle detected in graph : %v", cycleNodes) + } + + return levels, nil +} + +// TopologicalSort simply flattens ParallelizableStages output +func (g *Graph) TopologicalSort() ([]string, error) { + stages, err := g.ParallelizableStages() + if err != nil { + return nil, err + } + + var order []string + for _, stage := range stages { + order = append(order, stage...) + } + + return order, nil +} diff --git a/external/toposort/toposort_test.go b/external/toposort/toposort_test.go new file mode 100644 index 00000000..679258ba --- /dev/null +++ b/external/toposort/toposort_test.go @@ -0,0 +1,78 @@ +package toposort + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestTopoSort(t *testing.T) { + // Fixture Setup + g := NewGraph() + // Execute SUT + g.AddEdge("c", "b") + g.AddEdge("b", "a") + + sort, err := g.TopologicalSort() + + // Verification + require.NoError(t, err) + require.Equal(t, []string{"c", "b", "a"}, sort) +} + +func TestTopoSortWithExtraNodes(t *testing.T) { + // Fixture Setup + g := NewGraph() + // Execute SUT + g.AddNode("c") + g.AddEdge("c", "b") + g.AddNode("d") + g.AddEdge("b", "a") + g.AddNode("a") + + sort, err := g.TopologicalSort() + + // Verification + require.NoError(t, err) + require.Equal(t, []string{"c", "d", "b", "a"}, sort) +} + +func TestTopoSortWithStagesAndExtraNodes(t *testing.T) { + // Fixture Setup + g := NewGraph() + // Execute SUT + g.AddNode("c") + g.AddEdge("c", "b") + g.AddNode("d") + g.AddEdge("b", "a") + g.AddNode("a") + g.AddEdge("b", "x") + g.AddEdge("y", "z") + g.AddEdge("x", "z") + g.AddEdge("f", "y") + g.AddEdge("w", "x") + + sort, err := g.ParallelizableStages() + + // Verification + require.NoError(t, err) + require.Equal(t, [][]string{{"c", "d", "f", "w"}, {"b", "y"}, {"a", "x"}, {"z"}}, sort) +} + +func TestTopoSortCircle(t *testing.T) { + // Fixture Setup + g := NewGraph() + + // Execute SUT + g.AddEdge("e", "c") + g.AddEdge("c", "b") + g.AddEdge("b", "a") + g.AddEdge("a", "c") + g.AddEdge("d", "b") + g.AddEdge("c", "f") + + _, err := g.TopologicalSort() + + // Verification + require.ErrorContains(t, err, "cycle detected in graph") +}