Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/next-release/feature-AWSSDKforJavav-0ca473f.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"type": "feature",
"category": "AWS SDK for Java v2",
"contributor": "",
"description": "Migrate PartitionMetadata code generation from endpoints.json to partitions.json"
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public void execute() throws MojoExecutionException {
Partitions partitions = RegionMetadataLoader.build(endpoints);
PartitionsRegionsMetadata regionPartitions = PartitionsRegionsMetadataLoader.build(partitionsJson);

generatePartitionMetadataClass(baseSourcesDirectory, partitions);
generatePartitionMetadataClass(baseSourcesDirectory, regionPartitions);
generateRegionClass(baseSourcesDirectory, regionPartitions);
generateServiceMetadata(baseSourcesDirectory, partitions);
generateRegions(baseSourcesDirectory, regionPartitions);
Expand All @@ -86,7 +86,7 @@ public void execute() throws MojoExecutionException {
project.addTestCompileSourceRoot(testsDirectory.toFile().getAbsolutePath());
}

public void generatePartitionMetadataClass(Path baseSourcesDirectory, Partitions partitions) {
public void generatePartitionMetadataClass(Path baseSourcesDirectory, PartitionsRegionsMetadata partitions) {
Path sourcesDirectory = baseSourcesDirectory.resolve(StringUtils.replace(PARTITION_METADATA_BASE, ".", "/"));
partitions.getPartitions()
.forEach(p -> new CodeGenerator(sourcesDirectory.toString(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

package software.amazon.awssdk.codegen.lite.regions;

import static java.util.Collections.emptyList;
import static javax.lang.model.element.Modifier.FINAL;
import static javax.lang.model.element.Modifier.PRIVATE;
import static javax.lang.model.element.Modifier.PUBLIC;
Expand All @@ -29,7 +28,9 @@
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
Expand All @@ -38,16 +39,35 @@
import software.amazon.awssdk.annotations.SdkPublicApi;
import software.amazon.awssdk.codegen.lite.PoetClass;
import software.amazon.awssdk.codegen.lite.Utils;
import software.amazon.awssdk.codegen.lite.regions.model.Partition;
import software.amazon.awssdk.codegen.lite.regions.model.PartitionRegionsMetadata;
import software.amazon.awssdk.utils.ImmutableMap;
import software.amazon.awssdk.utils.StringUtils;

public class PartitionMetadataGenerator implements PoetClass {

private final Partition partition;
/**
* Hardcoded mapping of partition IDs to display names.
* This preserves backward compatibility since partitions.json only provides
* partition IDs, while the old endpoints.json had separate partitionName fields.
* New partitions will fall back to using their ID as the display name.
*/
private static final Map<String, String> PARTITION_DISPLAY_NAMES =
ImmutableMap.<String, String>builder()
.put("aws", "AWS Standard")
.put("aws-cn", "AWS China")
.put("aws-us-gov", "AWS GovCloud (US)")
.put("aws-iso", "AWS ISO (US)")
.put("aws-iso-b", "AWS ISOB (US)")
.put("aws-iso-e", "AWS ISOE (Europe)")
.put("aws-iso-f", "AWS ISOF")
.put("aws-eusc", "AWS EUSC")
.build();

private final PartitionRegionsMetadata partition;
private final String basePackage;
private final String regionBasePackage;

public PartitionMetadataGenerator(Partition partition,
public PartitionMetadataGenerator(PartitionRegionsMetadata partition,
String basePackage,
String regionBasePackage) {
this.partition = partition;
Expand Down Expand Up @@ -80,11 +100,12 @@ public TypeSpec poetClass() {
.build())
.addField(FieldSpec.builder(String.class, "ID")
.addModifiers(PRIVATE, FINAL, STATIC)
.initializer("$S", partition.getPartition())
.initializer("$S", partition.getId())
.build())
.addField(FieldSpec.builder(String.class, "NAME")
.addModifiers(PRIVATE, FINAL, STATIC)
.initializer("$S", partition.getPartitionName())
.initializer("$S", PARTITION_DISPLAY_NAMES.getOrDefault(
partition.getId(), partition.getId()))
.build())
.addField(FieldSpec.builder(String.class, "REGION_REGEX")
.addModifiers(PRIVATE, FINAL, STATIC)
Expand All @@ -103,18 +124,33 @@ private CodeBlock dnsSuffixes() {
CodeBlock.builder()
.add("$T.<$T, $T>builder()", ImmutableMap.class, partitionEndpointKeyClass(), String.class);

String defaultDnsSuffix = partition.getOutputs().getDnsSuffix();
String dualStackDnsSuffix = partition.getOutputs().getDualStackDnsSuffix();
boolean supportsFips = partition.getOutputs().isSupportsFIPS();
boolean supportsDualStack = partition.getOutputs().isSupportsDualStack();

builder.add(".put(")
.add(partitionEndpointKey(emptyList()))
.add(", $S)", partition.getDnsSuffix());

if (partition.getDefaults() != null) {
partition.getDefaults().getVariants().forEach(variant -> {
if (variant.getDnsSuffix() != null) {
builder.add(".put(")
.add(partitionEndpointKey(variant.getTags()))
.add(", $S)", variant.getDnsSuffix());
}
});
.add(partitionEndpointKey(Collections.emptyList()))
.add(", $S)", defaultDnsSuffix);

if (supportsFips) {
builder.add(".put(")
.add(partitionEndpointKey(Collections.singletonList("fips")))
.add(", $S)", defaultDnsSuffix);
}

if (supportsDualStack && supportsFips) {
validateDualStackDnsSuffix(dualStackDnsSuffix);
builder.add(".put(")
.add(partitionEndpointKey(Arrays.asList("dualstack", "fips")))
.add(", $S)", dualStackDnsSuffix);
}

if (supportsDualStack) {
validateDualStackDnsSuffix(dualStackDnsSuffix);
builder.add(".put(")
.add(partitionEndpointKey(Collections.singletonList("dualstack")))
.add(", $S)", dualStackDnsSuffix);
}

return builder.add(".build()").build();
Expand All @@ -125,19 +161,32 @@ private CodeBlock hostnames() {
CodeBlock.builder()
.add("$T.<$T, $T>builder()", ImmutableMap.class, partitionEndpointKeyClass(), String.class);

boolean supportsFips = partition.getOutputs().isSupportsFIPS();
boolean supportsDualStack = partition.getOutputs().isSupportsDualStack();
String dualStackDnsSuffix = partition.getOutputs().getDualStackDnsSuffix();

if (partition.getDefaults() != null) {
builder.add(".put(")
.add(partitionEndpointKey(Collections.emptyList()))
.add(", $S)", "{service}.{region}.{dnsSuffix}");

if (supportsFips) {
builder.add(".put(")
.add(partitionEndpointKey(Collections.singletonList("fips")))
.add(", $S)", "{service}-fips.{region}.{dnsSuffix}");
}

if (supportsDualStack && supportsFips) {
validateDualStackDnsSuffix(dualStackDnsSuffix);
builder.add(".put(")
.add(partitionEndpointKey(Arrays.asList("dualstack", "fips")))
.add(", $S)", "{service}-fips.{region}.{dnsSuffix}");
}

if (supportsDualStack) {
validateDualStackDnsSuffix(dualStackDnsSuffix);
builder.add(".put(")
.add(partitionEndpointKey(emptyList()))
.add(", $S)", partition.getDefaults().getHostname());

partition.getDefaults().getVariants().forEach(variant -> {
if (variant.getHostname() != null) {
builder.add(".put(")
.add(partitionEndpointKey(variant.getTags()))
.add(", $S)", variant.getHostname());
}
});
.add(partitionEndpointKey(Collections.singletonList("dualstack")))
.add(", $S)", "{service}.{region}.{dnsSuffix}");
}

return builder.add(".build()").build();
Expand Down Expand Up @@ -165,7 +214,7 @@ private MethodSpec hostnameGetter() {

@Override
public ClassName className() {
return ClassName.get(basePackage, Stream.of(partition.getPartition().split("-"))
return ClassName.get(basePackage, Stream.of(partition.getId().split("-"))
.map(Utils::capitalize)
.collect(Collectors.joining()) + "PartitionMetadata");
}
Expand All @@ -179,6 +228,13 @@ private MethodSpec getter(String methodName, String field) {
.build();
}

private void validateDualStackDnsSuffix(String dualStackDnsSuffix) {
if (StringUtils.isBlank(dualStackDnsSuffix)) {
throw new IllegalStateException("Partition " + partition.getId()
+ " claims to support dualstack but dualStackDnsSuffix is null or empty");
}
}

private CodeBlock partitionEndpointKey(Collection<String> tags) {
CodeBlock.Builder result = CodeBlock.builder();
result.add("$T.builder()", partitionEndpointKeyClass());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ public void serviceMetadataProviderClass() {

@Test
public void partitionMetadataClass() {
PartitionMetadataGenerator partitionMetadataGenerator = new PartitionMetadataGenerator(partitions.getPartitions().get(0),
PartitionMetadataGenerator partitionMetadataGenerator = new PartitionMetadataGenerator(partitionsRegions.getPartitions().get(0),
PARTITION_METADATA_BASE,
REGION_BASE);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public final class AwsPartitionMetadata implements PartitionMetadata {

private static final String NAME = "AWS Standard";

private static final String REGION_REGEX = "^(us|eu|ap|sa|ca|me|af)\\-\\w+\\-\\d+$";
private static final String REGION_REGEX = "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$";

@Override
public String id() {
Expand Down
1 change: 1 addition & 0 deletions test/region-testing/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@
</goals>
<configuration>
<endpoints>${basedir}/src/test/resources/variants-test-endpoints.json</endpoints>
<partitionsJson>${basedir}/src/test/resources/variants-test-partitions.json</partitionsJson>
</configuration>
</execution>
</executions>
Expand Down
Loading
Loading