diff --git a/Contentstack.Management.ASPNETCore/LICENSE.txt b/Contentstack.Management.ASPNETCore/LICENSE.txt
index 501f936..4382a0d 100644
--- a/Contentstack.Management.ASPNETCore/LICENSE.txt
+++ b/Contentstack.Management.ASPNETCore/LICENSE.txt
@@ -1,6 +1,6 @@
MIT License
-Copyright © 2012-2025 Contentstack. All Rights Reserved
+Copyright © 2012-2026 Contentstack. All Rights Reserved
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/Contentstack.Management.ASPNETCore/contentstack.management.aspnetcore.csproj b/Contentstack.Management.ASPNETCore/contentstack.management.aspnetcore.csproj
index 679d686..039b9ac 100644
--- a/Contentstack.Management.ASPNETCore/contentstack.management.aspnetcore.csproj
+++ b/Contentstack.Management.ASPNETCore/contentstack.management.aspnetcore.csproj
@@ -5,7 +5,7 @@
contentstack.management.aspnetcore
$(Version)
Contentstack
- Copyright © 2012-2025 Contentstack. All Rights Reserved
+ Copyright © 2012-2026 Contentstack. All Rights Reserved
Contentstack
https://github.com/contentstack/contentstack-management-dotnet
Initial Release
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/MockHttpHandlerWithRetries.cs b/Contentstack.Management.Core.Unit.Tests/Mokes/MockHttpHandlerWithRetries.cs
new file mode 100644
index 0000000..c485ad8
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/MockHttpHandlerWithRetries.cs
@@ -0,0 +1,150 @@
+using System;
+using System.Collections.Generic;
+using System.Net;
+using System.Net.Http;
+using Contentstack.Management.Core.Http;
+using Contentstack.Management.Core.Internal;
+using Contentstack.Management.Core.Runtime.Contexts;
+using Contentstack.Management.Core.Runtime.Pipeline;
+using Newtonsoft.Json;
+
+namespace Contentstack.Management.Core.Unit.Tests.Mokes
+{
+ ///
+ /// Mock HTTP handler that can simulate failures and successes for retry testing.
+ ///
+ public class MockHttpHandlerWithRetries : IPipelineHandler
+ {
+ private readonly Queue> _responseQueue;
+ private readonly Queue _exceptionQueue;
+ private int _callCount = 0;
+
+ public ILogManager LogManager { get; set; }
+ public IPipelineHandler InnerHandler { get; set; }
+ public int CallCount => _callCount;
+
+ public MockHttpHandlerWithRetries()
+ {
+ _responseQueue = new Queue>();
+ _exceptionQueue = new Queue();
+ }
+
+ ///
+ /// Adds a response that will be returned on the next call.
+ ///
+ public void AddResponse(HttpStatusCode statusCode, string body = null)
+ {
+ _responseQueue.Enqueue((context) =>
+ {
+ var response = new HttpResponseMessage(statusCode);
+ if (body != null)
+ {
+ response.Content = new StringContent(body);
+ }
+ return new ContentstackResponse(response, JsonSerializer.Create(new JsonSerializerSettings()));
+ });
+ }
+
+ ///
+ /// Adds a successful response (200 OK).
+ ///
+ public void AddSuccessResponse(string body = "{\"success\": true}")
+ {
+ AddResponse(HttpStatusCode.OK, body);
+ }
+
+ ///
+ /// Adds an exception that will be thrown on the next call.
+ ///
+ public void AddException(Exception exception)
+ {
+ _exceptionQueue.Enqueue(exception);
+ }
+
+ ///
+ /// Adds multiple failures followed by a success.
+ ///
+ public void AddFailuresThenSuccess(int failureCount, Exception failureException, string successBody = "{\"success\": true}")
+ {
+ for (int i = 0; i < failureCount; i++)
+ {
+ AddException(failureException);
+ }
+ AddSuccessResponse(successBody);
+ }
+
+ ///
+ /// Adds multiple HTTP error responses followed by a success.
+ ///
+ public void AddHttpErrorsThenSuccess(int errorCount, HttpStatusCode errorStatusCode, string successBody = "{\"success\": true}")
+ {
+ for (int i = 0; i < errorCount; i++)
+ {
+ AddResponse(errorStatusCode);
+ }
+ AddSuccessResponse(successBody);
+ }
+
+ public async System.Threading.Tasks.Task InvokeAsync(
+ IExecutionContext executionContext,
+ bool addAcceptMediaHeader = false,
+ string apiVersion = null)
+ {
+ _callCount++;
+
+ // Check for exceptions first
+ if (_exceptionQueue.Count > 0)
+ {
+ var exception = _exceptionQueue.Dequeue();
+ throw exception;
+ }
+
+ // Check for responses
+ if (_responseQueue.Count > 0)
+ {
+ var responseFactory = _responseQueue.Dequeue();
+ var response = responseFactory(executionContext);
+ executionContext.ResponseContext.httpResponse = response;
+ return await System.Threading.Tasks.Task.FromResult((T)response);
+ }
+
+ // Default: return success
+ var defaultResponse = new HttpResponseMessage(HttpStatusCode.OK);
+ defaultResponse.Content = new StringContent("{\"success\": true}");
+ var contentstackResponse = new ContentstackResponse(defaultResponse, JsonSerializer.Create(new JsonSerializerSettings()));
+ executionContext.ResponseContext.httpResponse = contentstackResponse;
+ return await System.Threading.Tasks.Task.FromResult((T)(IResponse)contentstackResponse);
+ }
+
+ public void InvokeSync(
+ IExecutionContext executionContext,
+ bool addAcceptMediaHeader = false,
+ string apiVersion = null)
+ {
+ _callCount++;
+
+ // Check for exceptions first
+ if (_exceptionQueue.Count > 0)
+ {
+ var exception = _exceptionQueue.Dequeue();
+ throw exception;
+ }
+
+ // Check for responses
+ if (_responseQueue.Count > 0)
+ {
+ var responseFactory = _responseQueue.Dequeue();
+ var response = responseFactory(executionContext);
+ executionContext.ResponseContext.httpResponse = response;
+ return;
+ }
+
+ // Default: return success
+ var defaultResponse = new HttpResponseMessage(HttpStatusCode.OK);
+ defaultResponse.Content = new StringContent("{\"success\": true}");
+ var contentstackResponse = new ContentstackResponse(defaultResponse, JsonSerializer.Create(new JsonSerializerSettings()));
+ executionContext.ResponseContext.httpResponse = contentstackResponse;
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/MockNetworkErrorGenerator.cs b/Contentstack.Management.Core.Unit.Tests/Mokes/MockNetworkErrorGenerator.cs
new file mode 100644
index 0000000..8de4de1
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/MockNetworkErrorGenerator.cs
@@ -0,0 +1,55 @@
+using System;
+using System.Net;
+using System.Net.Http;
+using System.Net.Sockets;
+using System.Threading;
+using System.Threading.Tasks;
+using Contentstack.Management.Core.Exceptions;
+
+namespace Contentstack.Management.Core.Unit.Tests.Mokes
+{
+ ///
+ /// Utility to generate various network error exceptions for testing.
+ ///
+ public static class MockNetworkErrorGenerator
+ {
+ public static SocketException CreateSocketException(SocketError errorCode)
+ {
+ return new SocketException((int)errorCode);
+ }
+
+ public static HttpRequestException CreateHttpRequestExceptionWithSocketException(SocketError socketError)
+ {
+ var socketException = CreateSocketException(socketError);
+ return new HttpRequestException("Network error", socketException);
+ }
+
+ public static TaskCanceledException CreateTaskCanceledExceptionTimeout()
+ {
+ var cts = new CancellationTokenSource();
+ return new TaskCanceledException("Operation timed out", null, cts.Token);
+ }
+
+ public static TaskCanceledException CreateTaskCanceledExceptionUserCancellation()
+ {
+ var cts = new CancellationTokenSource();
+ cts.Cancel();
+ return new TaskCanceledException("User cancelled", null, cts.Token);
+ }
+
+ public static TimeoutException CreateTimeoutException()
+ {
+ return new TimeoutException("Operation timed out");
+ }
+
+ public static ContentstackErrorException CreateContentstackErrorException(HttpStatusCode statusCode)
+ {
+ return new ContentstackErrorException
+ {
+ StatusCode = statusCode,
+ Message = $"HTTP {statusCode} error"
+ };
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/MockRetryPolicy.cs b/Contentstack.Management.Core.Unit.Tests/Mokes/MockRetryPolicy.cs
new file mode 100644
index 0000000..41b07dd
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/MockRetryPolicy.cs
@@ -0,0 +1,49 @@
+using System;
+using Contentstack.Management.Core.Exceptions;
+using Contentstack.Management.Core.Runtime.Contexts;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+
+namespace Contentstack.Management.Core.Unit.Tests.Mokes
+{
+ ///
+ /// Mock retry policy for testing RetryHandler in isolation.
+ ///
+ public class MockRetryPolicy : RetryPolicy
+ {
+ public bool ShouldRetryValue { get; set; } = true;
+ public bool CanRetryValue { get; set; } = true;
+ public bool RetryLimitExceededValue { get; set; } = false;
+ public TimeSpan WaitDelay { get; set; } = TimeSpan.FromMilliseconds(100);
+ public Exception LastException { get; private set; }
+ public int RetryCallCount { get; private set; }
+
+ public MockRetryPolicy()
+ {
+ RetryOnError = true;
+ RetryLimit = 5;
+ }
+
+ protected override bool RetryForException(IExecutionContext executionContext, Exception exception)
+ {
+ LastException = exception;
+ RetryCallCount++;
+ return ShouldRetryValue;
+ }
+
+ protected override bool CanRetry(IExecutionContext executionContext)
+ {
+ return CanRetryValue;
+ }
+
+ protected override bool RetryLimitExceeded(IExecutionContext executionContext)
+ {
+ return RetryLimitExceededValue;
+ }
+
+ internal override void WaitBeforeRetry(IExecutionContext executionContext)
+ {
+ System.Threading.Tasks.Task.Delay(WaitDelay).Wait();
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/Response/429Response.txt b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/429Response.txt
new file mode 100644
index 0000000..ff08912
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/429Response.txt
@@ -0,0 +1,9 @@
+HTTP/1.1 429 Too Many Requests
+content-type: application/json
+content-length: 45
+retry-after: 5
+date: Wed, 28 Apr 2021 11:11:34 GMT
+connection: Keep-Alive
+
+{"error_message": "Too many requests","error_code": 429}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/Response/500Response.txt b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/500Response.txt
new file mode 100644
index 0000000..a9dd5e3
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/500Response.txt
@@ -0,0 +1,8 @@
+HTTP/1.1 500 Internal Server Error
+content-type: application/json
+content-length: 45
+date: Wed, 28 Apr 2021 11:11:34 GMT
+connection: Keep-Alive
+
+{"error_message": "Internal server error","error_code": 500}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/Response/502Response.txt b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/502Response.txt
new file mode 100644
index 0000000..1f7c50f
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/502Response.txt
@@ -0,0 +1,8 @@
+HTTP/1.1 502 Bad Gateway
+content-type: application/json
+content-length: 40
+date: Wed, 28 Apr 2021 11:11:34 GMT
+connection: Keep-Alive
+
+{"error_message": "Bad gateway","error_code": 502}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/Response/503Response.txt b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/503Response.txt
new file mode 100644
index 0000000..e0ce259
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/503Response.txt
@@ -0,0 +1,8 @@
+HTTP/1.1 503 Service Unavailable
+content-type: application/json
+content-length: 48
+date: Wed, 28 Apr 2021 11:11:34 GMT
+connection: Keep-Alive
+
+{"error_message": "Service unavailable","error_code": 503}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Mokes/Response/504Response.txt b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/504Response.txt
new file mode 100644
index 0000000..8267c6b
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Mokes/Response/504Response.txt
@@ -0,0 +1,8 @@
+HTTP/1.1 504 Gateway Timeout
+content-type: application/json
+content-length: 45
+date: Wed, 28 Apr 2021 11:11:34 GMT
+connection: Keep-Alive
+
+{"error_message": "Gateway timeout","error_code": 504}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/DefaultRetryPolicyTest.cs b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/DefaultRetryPolicyTest.cs
new file mode 100644
index 0000000..3a7ef45
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/DefaultRetryPolicyTest.cs
@@ -0,0 +1,376 @@
+using System;
+using System.Net;
+using System.Net.Http;
+using System.Net.Sockets;
+using Contentstack.Management.Core.Exceptions;
+using Contentstack.Management.Core.Runtime.Contexts;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+using Contentstack.Management.Core.Unit.Tests.Mokes;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Contentstack.Management.Core.Unit.Tests.Runtime.Pipeline.RetryHandler
+{
+ [TestClass]
+ public class DefaultRetryPolicyTest
+ {
+ [TestMethod]
+ public void Constructor_With_RetryConfiguration_Sets_Properties()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 3,
+ RetryDelay = TimeSpan.FromMilliseconds(200)
+ };
+
+ var policy = new DefaultRetryPolicy(config);
+
+ Assert.AreEqual(3, policy.RetryLimit);
+ }
+
+ [TestMethod]
+ public void Constructor_With_Legacy_Parameters_Sets_Properties()
+ {
+ var policy = new DefaultRetryPolicy(5, TimeSpan.FromMilliseconds(300));
+
+ Assert.AreEqual(5, policy.RetryLimit);
+ }
+
+ [TestMethod]
+ public void CanRetry_Respects_RetryOnError_From_Configuration()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnError = true
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+
+ var result = policy.CanRetry(context);
+
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void CanRetry_Fallback_To_RetryOnError_Property()
+ {
+ var policy = new DefaultRetryPolicy(5, TimeSpan.FromMilliseconds(300));
+ policy.RetryOnError = false;
+ var context = CreateExecutionContext();
+
+ var result = policy.CanRetry(context);
+
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void RetryForException_NetworkError_Respects_MaxNetworkRetries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 2
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ var exception = MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset);
+
+ context.RequestContext.NetworkRetryCount = 1;
+ var result1 = policy.RetryForException(context, exception);
+ Assert.IsTrue(result1);
+
+ context.RequestContext.NetworkRetryCount = 2;
+ var result2 = policy.RetryForException(context, exception);
+ Assert.IsFalse(result2);
+ }
+
+ [TestMethod]
+ public void RetryForException_NetworkError_Increments_NetworkRetryCount()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 3
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ var exception = MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset);
+
+ var result = policy.RetryForException(context, exception);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void RetryForException_HttpError_5xx_Respects_RetryLimit()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnHttpServerError = true,
+ RetryLimit = 2
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ var exception = MockNetworkErrorGenerator.CreateContentstackErrorException(HttpStatusCode.InternalServerError);
+
+ context.RequestContext.HttpRetryCount = 1;
+ var result1 = policy.RetryForException(context, exception);
+ Assert.IsTrue(result1);
+
+ context.RequestContext.HttpRetryCount = 2;
+ var result2 = policy.RetryForException(context, exception);
+ Assert.IsFalse(result2);
+ }
+
+ [TestMethod]
+ public void RetryForException_HttpError_5xx_Increments_HttpRetryCount()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnHttpServerError = true,
+ RetryLimit = 5
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ var exception = MockNetworkErrorGenerator.CreateContentstackErrorException(HttpStatusCode.InternalServerError);
+
+ var result = policy.RetryForException(context, exception);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void RetryForException_HttpError_429_Respects_RetryLimit()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ var exception = MockNetworkErrorGenerator.CreateContentstackErrorException(HttpStatusCode.TooManyRequests);
+
+ context.RequestContext.HttpRetryCount = 1;
+ var result1 = policy.RetryForException(context, exception);
+ Assert.IsTrue(result1);
+
+ context.RequestContext.HttpRetryCount = 2;
+ var result2 = policy.RetryForException(context, exception);
+ Assert.IsFalse(result2);
+ }
+
+ [TestMethod]
+ public void RetryForException_NetworkError_Exceeds_MaxNetworkRetries_Returns_False()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 1
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.NetworkRetryCount = 1;
+ var exception = MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset);
+
+ var result = policy.RetryForException(context, exception);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void RetryForException_HttpError_Exceeds_RetryLimit_Returns_False()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnHttpServerError = true,
+ RetryLimit = 1
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.HttpRetryCount = 1;
+ var exception = MockNetworkErrorGenerator.CreateContentstackErrorException(HttpStatusCode.InternalServerError);
+
+ var result = policy.RetryForException(context, exception);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void RetryForException_NonRetryableException_Returns_False()
+ {
+ var config = new RetryConfiguration();
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ var exception = new ArgumentException("Invalid argument");
+
+ var result = policy.RetryForException(context, exception);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void RetryLimitExceeded_Checks_Both_Network_And_Http_Counts()
+ {
+ var config = new RetryConfiguration
+ {
+ MaxNetworkRetries = 2,
+ RetryLimit = 3
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+
+ context.RequestContext.NetworkRetryCount = 1;
+ context.RequestContext.HttpRetryCount = 2;
+ var result1 = policy.RetryLimitExceeded(context);
+ Assert.IsFalse(result1);
+
+ context.RequestContext.NetworkRetryCount = 2;
+ context.RequestContext.HttpRetryCount = 3;
+ var result2 = policy.RetryLimitExceeded(context);
+ Assert.IsTrue(result2);
+ }
+
+ [TestMethod]
+ public void WaitBeforeRetry_Uses_NetworkDelay_For_NetworkRetries()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(50),
+ NetworkBackoffStrategy = BackoffStrategy.Fixed
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.NetworkRetryCount = 1;
+
+ var startTime = DateTime.UtcNow;
+ policy.WaitBeforeRetry(context);
+ var elapsed = DateTime.UtcNow - startTime;
+
+ // Should wait approximately 50ms + jitter (0-100ms)
+ Assert.IsTrue(elapsed >= TimeSpan.FromMilliseconds(50));
+ Assert.IsTrue(elapsed <= TimeSpan.FromMilliseconds(200));
+ }
+
+ [TestMethod]
+ public void WaitBeforeRetry_Uses_HttpDelay_For_HttpRetries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(100),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(100)
+ }
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.HttpRetryCount = 1;
+ context.RequestContext.NetworkRetryCount = 0;
+
+ var startTime = DateTime.UtcNow;
+ policy.WaitBeforeRetry(context);
+ var elapsed = DateTime.UtcNow - startTime;
+
+ // Should wait approximately 200ms (100ms * 2^1) + jitter
+ Assert.IsTrue(elapsed >= TimeSpan.FromMilliseconds(200));
+ Assert.IsTrue(elapsed <= TimeSpan.FromMilliseconds(300));
+ }
+
+ [TestMethod]
+ public void WaitBeforeRetry_Fallback_To_Legacy_Delay()
+ {
+ var policy = new DefaultRetryPolicy(5, TimeSpan.FromMilliseconds(150));
+ var context = CreateExecutionContext();
+
+ var startTime = DateTime.UtcNow;
+ policy.WaitBeforeRetry(context);
+ var elapsed = DateTime.UtcNow - startTime;
+
+ // Should wait approximately 150ms
+ Assert.IsTrue(elapsed >= TimeSpan.FromMilliseconds(150));
+ Assert.IsTrue(elapsed <= TimeSpan.FromMilliseconds(200));
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Respects_Configuration()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryCondition = (statusCode) => statusCode == HttpStatusCode.NotFound
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+
+ var result = policy.ShouldRetryHttpStatusCode(HttpStatusCode.NotFound, context.RequestContext);
+ Assert.IsTrue(result);
+
+ var result2 = policy.ShouldRetryHttpStatusCode(HttpStatusCode.InternalServerError, context.RequestContext);
+ Assert.IsFalse(result2);
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Respects_RetryLimit()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.HttpRetryCount = 2;
+
+ var result = policy.ShouldRetryHttpStatusCode(HttpStatusCode.TooManyRequests, context.RequestContext);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void GetHttpRetryDelay_Uses_DelayCalculator()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(200),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(200)
+ }
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.HttpRetryCount = 1;
+
+ var delay = policy.GetHttpRetryDelay(context.RequestContext, null);
+
+ // Should be approximately 400ms (200ms * 2^1) + jitter
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(400));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(500));
+ }
+
+ [TestMethod]
+ public void GetNetworkRetryDelay_Uses_DelayCalculator()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(100),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var context = CreateExecutionContext();
+ context.RequestContext.NetworkRetryCount = 2;
+
+ var delay = policy.GetNetworkRetryDelay(context.RequestContext);
+
+ // Should be approximately 200ms (100ms * 2^1) + jitter
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(200));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(300));
+ }
+
+ private ExecutionContext CreateExecutionContext()
+ {
+ return new ExecutionContext(
+ new RequestContext
+ {
+ config = new ContentstackClientOptions(),
+ service = new MockService()
+ },
+ new ResponseContext());
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/NetworkErrorDetectorTest.cs b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/NetworkErrorDetectorTest.cs
new file mode 100644
index 0000000..620cd2c
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/NetworkErrorDetectorTest.cs
@@ -0,0 +1,340 @@
+using System;
+using System.Net;
+using System.Net.Http;
+using System.Net.Sockets;
+using System.Threading;
+using System.Threading.Tasks;
+using Contentstack.Management.Core.Exceptions;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Contentstack.Management.Core.Unit.Tests.Runtime.Pipeline.RetryHandler
+{
+ [TestClass]
+ public class NetworkErrorDetectorTest
+ {
+ private NetworkErrorDetector detector;
+
+ [TestInitialize]
+ public void Initialize()
+ {
+ detector = new NetworkErrorDetector();
+ }
+
+ [TestMethod]
+ public void Should_Detect_SocketException_ConnectionReset()
+ {
+ var exception = new SocketException((int)SocketError.ConnectionReset);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.SocketError, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ Assert.AreEqual(exception, result.OriginalException);
+ }
+
+ [TestMethod]
+ public void Should_Detect_SocketException_TimedOut()
+ {
+ var exception = new SocketException((int)SocketError.TimedOut);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.SocketError, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_SocketException_ConnectionRefused()
+ {
+ var exception = new SocketException((int)SocketError.ConnectionRefused);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.SocketError, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_SocketException_HostNotFound()
+ {
+ var exception = new SocketException((int)SocketError.HostNotFound);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.DnsFailure, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_SocketException_TryAgain()
+ {
+ var exception = new SocketException((int)SocketError.TryAgain);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.DnsFailure, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_TaskCanceledException_Timeout()
+ {
+ var cts = new CancellationTokenSource();
+ var exception = new TaskCanceledException("Operation timed out", null, cts.Token);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.Timeout, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Not_Detect_TaskCanceledException_UserCancellation()
+ {
+ var cts = new CancellationTokenSource();
+ cts.Cancel();
+ var exception = new TaskCanceledException("User cancelled", null, cts.Token);
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNull(result);
+ }
+
+ [TestMethod]
+ public void Should_Detect_TimeoutException()
+ {
+ var exception = new TimeoutException("Operation timed out");
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.Timeout, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_HttpRequestException_With_Inner_SocketException()
+ {
+ var socketException = new SocketException((int)SocketError.ConnectionReset);
+ var httpException = new HttpRequestException("Network error", socketException);
+ var result = detector.IsTransientNetworkError(httpException);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.SocketError, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_ContentstackErrorException_5xx()
+ {
+ var exception = new ContentstackErrorException
+ {
+ StatusCode = HttpStatusCode.InternalServerError
+ };
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.HttpServerError, result.ErrorType);
+ Assert.IsTrue(result.IsTransient);
+ }
+
+ [TestMethod]
+ public void Should_Detect_ContentstackErrorException_502()
+ {
+ var exception = new ContentstackErrorException
+ {
+ StatusCode = HttpStatusCode.BadGateway
+ };
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.HttpServerError, result.ErrorType);
+ }
+
+ [TestMethod]
+ public void Should_Detect_ContentstackErrorException_503()
+ {
+ var exception = new ContentstackErrorException
+ {
+ StatusCode = HttpStatusCode.ServiceUnavailable
+ };
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.HttpServerError, result.ErrorType);
+ }
+
+ [TestMethod]
+ public void Should_Detect_ContentstackErrorException_504()
+ {
+ var exception = new ContentstackErrorException
+ {
+ StatusCode = HttpStatusCode.GatewayTimeout
+ };
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(NetworkErrorType.HttpServerError, result.ErrorType);
+ }
+
+ [TestMethod]
+ public void Should_Not_Detect_ContentstackErrorException_4xx()
+ {
+ var exception = new ContentstackErrorException
+ {
+ StatusCode = HttpStatusCode.BadRequest
+ };
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNull(result);
+ }
+
+ [TestMethod]
+ public void Should_Not_Detect_ContentstackErrorException_404()
+ {
+ var exception = new ContentstackErrorException
+ {
+ StatusCode = HttpStatusCode.NotFound
+ };
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNull(result);
+ }
+
+ [TestMethod]
+ public void Should_Return_Null_For_NonNetworkError()
+ {
+ var exception = new ArgumentException("Invalid argument");
+ var result = detector.IsTransientNetworkError(exception);
+
+ Assert.IsNull(result);
+ }
+
+ [TestMethod]
+ public void Should_Return_Null_For_Null()
+ {
+ var result = detector.IsTransientNetworkError(null);
+
+ Assert.IsNull(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_Respects_Configuration()
+ {
+ var socketException = new SocketException((int)SocketError.ConnectionReset);
+ var errorInfo = new NetworkErrorInfo(NetworkErrorType.SocketError, true, socketException);
+
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_DnsFailure_Respects_RetryOnDnsFailure()
+ {
+ var socketException = new SocketException((int)SocketError.HostNotFound);
+ var errorInfo = new NetworkErrorInfo(NetworkErrorType.DnsFailure, true, socketException);
+
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnDnsFailure = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsTrue(result);
+
+ config.RetryOnDnsFailure = false;
+ result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_SocketError_Respects_RetryOnSocketFailure()
+ {
+ var socketException = new SocketException((int)SocketError.ConnectionReset);
+ var errorInfo = new NetworkErrorInfo(NetworkErrorType.SocketError, true, socketException);
+
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsTrue(result);
+
+ config.RetryOnSocketFailure = false;
+ result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_HttpServerError_Respects_RetryOnHttpServerError()
+ {
+ var httpException = new ContentstackErrorException { StatusCode = HttpStatusCode.InternalServerError };
+ var errorInfo = new NetworkErrorInfo(NetworkErrorType.HttpServerError, true, httpException);
+
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnHttpServerError = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsTrue(result);
+
+ config.RetryOnHttpServerError = false;
+ result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_Returns_False_When_RetryOnNetworkFailure_Is_False()
+ {
+ var socketException = new SocketException((int)SocketError.ConnectionReset);
+ var errorInfo = new NetworkErrorInfo(NetworkErrorType.SocketError, true, socketException);
+
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = false,
+ RetryOnSocketFailure = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_Returns_False_When_Not_Transient()
+ {
+ var exception = new ArgumentException("Not transient");
+ var errorInfo = new NetworkErrorInfo(NetworkErrorType.Unknown, false, exception);
+
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(errorInfo, config);
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryNetworkError_Returns_False_When_Null()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true
+ };
+
+ var result = detector.ShouldRetryNetworkError(null, config);
+ Assert.IsFalse(result);
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryConfigurationTest.cs b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryConfigurationTest.cs
new file mode 100644
index 0000000..7f201de
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryConfigurationTest.cs
@@ -0,0 +1,111 @@
+using System;
+using System.Net;
+using Contentstack.Management.Core;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Contentstack.Management.Core.Unit.Tests.Runtime.Pipeline.RetryHandler
+{
+ [TestClass]
+ public class RetryConfigurationTest
+ {
+ [TestMethod]
+ public void FromOptions_Creates_Configuration_With_All_Properties()
+ {
+ var options = new ContentstackClientOptions
+ {
+ RetryOnError = true,
+ RetryLimit = 5,
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryOnNetworkFailure = true,
+ RetryOnDnsFailure = true,
+ RetryOnSocketFailure = true,
+ RetryOnHttpServerError = true,
+ MaxNetworkRetries = 3,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(100),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential,
+ RetryCondition = (statusCode) => statusCode == HttpStatusCode.TooManyRequests,
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(300),
+ CustomBackoff = (retryCount, error) => TimeSpan.FromMilliseconds(500)
+ }
+ };
+
+ var config = RetryConfiguration.FromOptions(options);
+
+ Assert.AreEqual(options.RetryOnError, config.RetryOnError);
+ Assert.AreEqual(options.RetryLimit, config.RetryLimit);
+ Assert.AreEqual(options.RetryDelay, config.RetryDelay);
+ Assert.AreEqual(options.RetryOnNetworkFailure, config.RetryOnNetworkFailure);
+ Assert.AreEqual(options.RetryOnDnsFailure, config.RetryOnDnsFailure);
+ Assert.AreEqual(options.RetryOnSocketFailure, config.RetryOnSocketFailure);
+ Assert.AreEqual(options.RetryOnHttpServerError, config.RetryOnHttpServerError);
+ Assert.AreEqual(options.MaxNetworkRetries, config.MaxNetworkRetries);
+ Assert.AreEqual(options.NetworkRetryDelay, config.NetworkRetryDelay);
+ Assert.AreEqual(options.NetworkBackoffStrategy, config.NetworkBackoffStrategy);
+ Assert.AreEqual(options.RetryCondition, config.RetryCondition);
+ Assert.IsNotNull(config.RetryDelayOptions);
+ Assert.AreEqual(options.RetryDelayOptions.Base, config.RetryDelayOptions.Base);
+ Assert.AreEqual(options.RetryDelayOptions.CustomBackoff, config.RetryDelayOptions.CustomBackoff);
+ }
+
+ [TestMethod]
+ public void FromOptions_Handles_Null_RetryDelayOptions()
+ {
+ var options = new ContentstackClientOptions
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = null
+ };
+
+ var config = RetryConfiguration.FromOptions(options);
+
+ Assert.IsNotNull(config.RetryDelayOptions);
+ Assert.AreEqual(options.RetryDelay, config.RetryDelayOptions.Base);
+ }
+
+ [TestMethod]
+ public void FromOptions_Sets_RetryDelayOptions_Base_From_RetryDelay()
+ {
+ var options = new ContentstackClientOptions
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(500),
+ RetryDelayOptions = null
+ };
+
+ var config = RetryConfiguration.FromOptions(options);
+
+ Assert.AreEqual(TimeSpan.FromMilliseconds(500), config.RetryDelayOptions.Base);
+ }
+
+ [TestMethod]
+ public void Default_Values_Are_Correct()
+ {
+ var config = new RetryConfiguration();
+
+ Assert.IsTrue(config.RetryOnError);
+ Assert.AreEqual(5, config.RetryLimit);
+ Assert.AreEqual(TimeSpan.FromMilliseconds(300), config.RetryDelay);
+ Assert.IsTrue(config.RetryOnNetworkFailure);
+ Assert.IsTrue(config.RetryOnDnsFailure);
+ Assert.IsTrue(config.RetryOnSocketFailure);
+ Assert.IsTrue(config.RetryOnHttpServerError);
+ Assert.AreEqual(3, config.MaxNetworkRetries);
+ Assert.AreEqual(TimeSpan.FromMilliseconds(100), config.NetworkRetryDelay);
+ Assert.AreEqual(BackoffStrategy.Exponential, config.NetworkBackoffStrategy);
+ Assert.IsNull(config.RetryCondition);
+ Assert.IsNotNull(config.RetryDelayOptions);
+ }
+
+ [TestMethod]
+ public void RetryDelayOptions_Default_Values()
+ {
+ var options = new RetryDelayOptions();
+
+ Assert.AreEqual(TimeSpan.FromMilliseconds(300), options.Base);
+ Assert.IsNull(options.CustomBackoff);
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryDelayCalculatorTest.cs b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryDelayCalculatorTest.cs
new file mode 100644
index 0000000..39ff621
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryDelayCalculatorTest.cs
@@ -0,0 +1,363 @@
+using System;
+using System.Net;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using Contentstack.Management.Core.Exceptions;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Contentstack.Management.Core.Unit.Tests.Runtime.Pipeline.RetryHandler
+{
+ [TestClass]
+ public class RetryDelayCalculatorTest
+ {
+ private RetryDelayCalculator calculator;
+
+ [TestInitialize]
+ public void Initialize()
+ {
+ calculator = new RetryDelayCalculator();
+ }
+
+ [TestMethod]
+ public void CalculateNetworkRetryDelay_Exponential_FirstAttempt()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(100),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential
+ };
+
+ var delay = calculator.CalculateNetworkRetryDelay(1, config);
+
+ // First attempt: 100ms * 2^0 = 100ms + jitter (0-100ms)
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(100));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(200));
+ }
+
+ [TestMethod]
+ public void CalculateNetworkRetryDelay_Exponential_SecondAttempt()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(100),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential
+ };
+
+ var delay = calculator.CalculateNetworkRetryDelay(2, config);
+
+ // Second attempt: 100ms * 2^1 = 200ms + jitter (0-100ms)
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(200));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(300));
+ }
+
+ [TestMethod]
+ public void CalculateNetworkRetryDelay_Exponential_ThirdAttempt()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(100),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential
+ };
+
+ var delay = calculator.CalculateNetworkRetryDelay(3, config);
+
+ // Third attempt: 100ms * 2^2 = 400ms + jitter (0-100ms)
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(400));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(500));
+ }
+
+ [TestMethod]
+ public void CalculateNetworkRetryDelay_Fixed_AllAttempts()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(150),
+ NetworkBackoffStrategy = BackoffStrategy.Fixed
+ };
+
+ var delay1 = calculator.CalculateNetworkRetryDelay(1, config);
+ var delay2 = calculator.CalculateNetworkRetryDelay(2, config);
+ var delay3 = calculator.CalculateNetworkRetryDelay(3, config);
+
+ // All attempts should be ~150ms + jitter
+ Assert.IsTrue(delay1 >= TimeSpan.FromMilliseconds(150));
+ Assert.IsTrue(delay1 <= TimeSpan.FromMilliseconds(250));
+ Assert.IsTrue(delay2 >= TimeSpan.FromMilliseconds(150));
+ Assert.IsTrue(delay2 <= TimeSpan.FromMilliseconds(250));
+ Assert.IsTrue(delay3 >= TimeSpan.FromMilliseconds(150));
+ Assert.IsTrue(delay3 <= TimeSpan.FromMilliseconds(250));
+ }
+
+ [TestMethod]
+ public void CalculateNetworkRetryDelay_Includes_Jitter()
+ {
+ var config = new RetryConfiguration
+ {
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(100),
+ NetworkBackoffStrategy = BackoffStrategy.Fixed
+ };
+
+ // Run multiple times to verify jitter is added
+ bool foundVariation = false;
+ var firstDelay = calculator.CalculateNetworkRetryDelay(1, config);
+
+ for (int i = 0; i < 10; i++)
+ {
+ var delay = calculator.CalculateNetworkRetryDelay(1, config);
+ if (delay != firstDelay)
+ {
+ foundVariation = true;
+ break;
+ }
+ }
+
+ // Jitter should cause some variation (though it's random, so not guaranteed)
+ // At minimum, verify the delay is within expected range
+ Assert.IsTrue(firstDelay >= TimeSpan.FromMilliseconds(100));
+ Assert.IsTrue(firstDelay <= TimeSpan.FromMilliseconds(200));
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Exponential_FirstRetry()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(300)
+ }
+ };
+
+ var delay = calculator.CalculateHttpRetryDelay(0, config, null);
+
+ // First retry: 300ms * 2^0 = 300ms + jitter
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(300));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(400));
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Exponential_SubsequentRetries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(300)
+ }
+ };
+
+ var delay1 = calculator.CalculateHttpRetryDelay(1, config, null);
+ var delay2 = calculator.CalculateHttpRetryDelay(2, config, null);
+
+ // Second retry: 300ms * 2^1 = 600ms + jitter
+ Assert.IsTrue(delay1 >= TimeSpan.FromMilliseconds(600));
+ Assert.IsTrue(delay1 <= TimeSpan.FromMilliseconds(700));
+
+ // Third retry: 300ms * 2^2 = 1200ms + jitter
+ Assert.IsTrue(delay2 >= TimeSpan.FromMilliseconds(1200));
+ Assert.IsTrue(delay2 <= TimeSpan.FromMilliseconds(1300));
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Respects_RetryAfter_Header_Delta()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300)
+ };
+
+ var response = new HttpResponseMessage(HttpStatusCode.TooManyRequests);
+ response.Headers.RetryAfter = new RetryConditionHeaderValue(TimeSpan.FromSeconds(5));
+
+ var delay = calculator.CalculateHttpRetryDelay(0, config, null, response.Headers);
+
+ Assert.AreEqual(TimeSpan.FromSeconds(5), delay);
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Respects_RetryAfter_Header_Date()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300)
+ };
+
+ var response = new HttpResponseMessage(HttpStatusCode.TooManyRequests);
+ var retryAfterDate = DateTimeOffset.UtcNow.AddSeconds(3);
+ response.Headers.RetryAfter = new RetryConditionHeaderValue(retryAfterDate);
+
+ var delay = calculator.CalculateHttpRetryDelay(0, config, null, response.Headers);
+
+ // Should be approximately 3 seconds (allowing for small timing differences)
+ Assert.IsTrue(delay >= TimeSpan.FromSeconds(2.5));
+ Assert.IsTrue(delay <= TimeSpan.FromSeconds(3.5));
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Uses_CustomBackoff_When_Provided()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ CustomBackoff = (retryCount, error) => TimeSpan.FromMilliseconds(500 * (retryCount + 1))
+ }
+ };
+
+ var delay1 = calculator.CalculateHttpRetryDelay(0, config, null);
+ var delay2 = calculator.CalculateHttpRetryDelay(1, config, null);
+
+ Assert.AreEqual(TimeSpan.FromMilliseconds(500), delay1);
+ Assert.AreEqual(TimeSpan.FromMilliseconds(1000), delay2);
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_CustomBackoff_Returns_Zero_Disables_Retry()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ CustomBackoff = (retryCount, error) => retryCount >= 2 ? TimeSpan.Zero : TimeSpan.FromMilliseconds(100)
+ }
+ };
+
+ var delay1 = calculator.CalculateHttpRetryDelay(0, config, null);
+ var delay2 = calculator.CalculateHttpRetryDelay(1, config, null);
+ var delay3 = calculator.CalculateHttpRetryDelay(2, config, null);
+
+ Assert.AreEqual(TimeSpan.FromMilliseconds(100), delay1);
+ Assert.AreEqual(TimeSpan.FromMilliseconds(100), delay2);
+ Assert.AreEqual(TimeSpan.Zero, delay3);
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_CustomBackoff_Returns_Negative_Disables_Retry()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ CustomBackoff = (retryCount, error) => retryCount >= 2 ? TimeSpan.FromMilliseconds(-1) : TimeSpan.FromMilliseconds(100)
+ }
+ };
+
+ var delay1 = calculator.CalculateHttpRetryDelay(0, config, null);
+ var delay2 = calculator.CalculateHttpRetryDelay(1, config, null);
+ var delay3 = calculator.CalculateHttpRetryDelay(2, config, null);
+
+ Assert.AreEqual(TimeSpan.FromMilliseconds(100), delay1);
+ Assert.AreEqual(TimeSpan.FromMilliseconds(100), delay2);
+ Assert.IsTrue(delay3 < TimeSpan.Zero);
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Includes_Jitter()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(300),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(300)
+ }
+ };
+
+ var delay = calculator.CalculateHttpRetryDelay(0, config, null);
+
+ // Should be 300ms + jitter (0-100ms)
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(300));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(400));
+ }
+
+ [TestMethod]
+ public void CalculateHttpRetryDelay_Uses_RetryDelay_When_Base_Is_Zero()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryDelay = TimeSpan.FromMilliseconds(500),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.Zero
+ }
+ };
+
+ var delay = calculator.CalculateHttpRetryDelay(0, config, null);
+
+ // Should use RetryDelay (500ms) instead of Base
+ Assert.IsTrue(delay >= TimeSpan.FromMilliseconds(500));
+ Assert.IsTrue(delay <= TimeSpan.FromMilliseconds(600));
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Default_429()
+ {
+ var config = new RetryConfiguration();
+ var result = calculator.ShouldRetryHttpStatusCode(HttpStatusCode.TooManyRequests, config);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Default_500()
+ {
+ var config = new RetryConfiguration();
+ var result = calculator.ShouldRetryHttpStatusCode(HttpStatusCode.InternalServerError, config);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Default_502()
+ {
+ var config = new RetryConfiguration();
+ var result = calculator.ShouldRetryHttpStatusCode(HttpStatusCode.BadGateway, config);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Default_503()
+ {
+ var config = new RetryConfiguration();
+ var result = calculator.ShouldRetryHttpStatusCode(HttpStatusCode.ServiceUnavailable, config);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Default_504()
+ {
+ var config = new RetryConfiguration();
+ var result = calculator.ShouldRetryHttpStatusCode(HttpStatusCode.GatewayTimeout, config);
+ Assert.IsTrue(result);
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Default_Not_4xx()
+ {
+ var config = new RetryConfiguration();
+
+ Assert.IsFalse(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.BadRequest, config));
+ Assert.IsFalse(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.Unauthorized, config));
+ Assert.IsFalse(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.Forbidden, config));
+ Assert.IsFalse(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.NotFound, config));
+ }
+
+ [TestMethod]
+ public void ShouldRetryHttpStatusCode_Custom_Condition()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryCondition = (statusCode) => statusCode == HttpStatusCode.NotFound || statusCode == HttpStatusCode.TooManyRequests
+ };
+
+ Assert.IsTrue(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.NotFound, config));
+ Assert.IsTrue(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.TooManyRequests, config));
+ Assert.IsFalse(calculator.ShouldRetryHttpStatusCode(HttpStatusCode.InternalServerError, config));
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryHandlerIntegrationTest.cs b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryHandlerIntegrationTest.cs
new file mode 100644
index 0000000..142b479
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryHandlerIntegrationTest.cs
@@ -0,0 +1,262 @@
+using System;
+using System.Net;
+using System.Net.Sockets;
+using Contentstack.Management.Core;
+using Contentstack.Management.Core.Runtime.Contexts;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+using Contentstack.Management.Core.Unit.Tests.Mokes;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using System.Threading.Tasks;
+
+namespace Contentstack.Management.Core.Unit.Tests.Runtime.Pipeline.RetryHandler
+{
+ [TestClass]
+ public class RetryHandlerIntegrationTest
+ {
+ private ExecutionContext CreateExecutionContext()
+ {
+ return new ExecutionContext(
+ new RequestContext
+ {
+ config = new ContentstackClientOptions(),
+ service = new MockService()
+ },
+ new ResponseContext());
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_NetworkError_Retries_And_Succeeds()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 3,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddFailuresThenSuccess(2, MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.IsTrue(result.IsSuccessStatusCode);
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.NetworkRetryCount);
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_HttpError_Retries_And_Succeeds()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 3,
+ RetryDelay = TimeSpan.FromMilliseconds(10),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(10)
+ }
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(2, HttpStatusCode.InternalServerError);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.IsTrue(result.IsSuccessStatusCode);
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_Mixed_Network_And_Http_Errors()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 3,
+ RetryLimit = 3,
+ RetryOnHttpServerError = true,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10),
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddException(MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ mockInnerHandler.AddResponse(HttpStatusCode.InternalServerError);
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ mockInnerHandler.AddSuccessResponse();
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.IsTrue(result.IsSuccessStatusCode);
+ Assert.AreEqual(4, mockInnerHandler.CallCount);
+ Assert.AreEqual(1, context.RequestContext.NetworkRetryCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_Respects_RetryConfiguration()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnError = false
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddException(MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+
+ try
+ {
+ await handler.InvokeAsync(context);
+ Assert.Fail("Should have thrown exception");
+ }
+ catch (SocketException)
+ {
+ // Expected
+ }
+
+ // Should not retry when RetryOnError is false
+ Assert.AreEqual(1, mockInnerHandler.CallCount);
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_ExponentialBackoff_Delays_Increase()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 2,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(50),
+ NetworkBackoffStrategy = BackoffStrategy.Exponential
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddFailuresThenSuccess(2, MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var startTime = DateTime.UtcNow;
+ await handler.InvokeAsync(context);
+ var totalElapsed = DateTime.UtcNow - startTime;
+
+ // First retry: ~50ms, second retry: ~100ms (exponential)
+ // Total should be at least 150ms + jitter
+ Assert.IsTrue(totalElapsed >= TimeSpan.FromMilliseconds(150));
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_RetryLimit_Stops_Retries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+
+ try
+ {
+ await handler.InvokeAsync(context);
+ Assert.Fail("Should have thrown exception");
+ }
+ catch (ContentstackErrorException)
+ {
+ // Expected
+ }
+
+ // Should stop after 2 retries (3 total calls)
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_With_CustomRetryCondition()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10),
+ RetryCondition = (statusCode) => statusCode == HttpStatusCode.NotFound
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(2, HttpStatusCode.NotFound);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.IsTrue(result.IsSuccessStatusCode);
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task EndToEnd_With_CustomBackoff()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ CustomBackoff = (retryCount, error) => TimeSpan.FromMilliseconds(100 * (retryCount + 1))
+ }
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(2, HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var startTime = DateTime.UtcNow;
+ await handler.InvokeAsync(context);
+ var elapsed = DateTime.UtcNow - startTime;
+
+ // Custom backoff: first retry 100ms, second retry 200ms = 300ms total
+ Assert.IsTrue(elapsed >= TimeSpan.FromMilliseconds(300));
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryHandlerTest.cs b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryHandlerTest.cs
new file mode 100644
index 0000000..3f6baa1
--- /dev/null
+++ b/Contentstack.Management.Core.Unit.Tests/Runtime/Pipeline/RetryHandler/RetryHandlerTest.cs
@@ -0,0 +1,403 @@
+using System;
+using System.Net;
+using System.Net.Sockets;
+using Contentstack.Management.Core;
+using Contentstack.Management.Core.Exceptions;
+using Contentstack.Management.Core.Runtime.Contexts;
+using Contentstack.Management.Core.Runtime.Pipeline.RetryHandler;
+using Contentstack.Management.Core.Unit.Tests.Mokes;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using Moq;
+using System.Threading.Tasks;
+
+namespace Contentstack.Management.Core.Unit.Tests.Runtime.Pipeline.RetryHandler
+{
+ [TestClass]
+ public class RetryHandlerTest
+ {
+ private ExecutionContext CreateExecutionContext()
+ {
+ return new ExecutionContext(
+ new RequestContext
+ {
+ config = new ContentstackClientOptions(),
+ service = new MockService()
+ },
+ new ResponseContext());
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_Success_NoRetry()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 3,
+ MaxNetworkRetries = 2
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddSuccessResponse();
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(1, mockInnerHandler.CallCount);
+ Assert.AreEqual(0, context.RequestContext.NetworkRetryCount);
+ Assert.AreEqual(0, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_NetworkError_Retries_UpTo_MaxNetworkRetries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 2,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddFailuresThenSuccess(2, MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(3, mockInnerHandler.CallCount); // 2 failures + 1 success
+ Assert.AreEqual(2, context.RequestContext.NetworkRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_NetworkError_Exceeds_MaxNetworkRetries_Throws()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 2,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddException(MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ mockInnerHandler.AddException(MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ mockInnerHandler.AddException(MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+
+ try
+ {
+ await handler.InvokeAsync(context);
+ Assert.Fail("Should have thrown exception");
+ }
+ catch (SocketException)
+ {
+ // Expected
+ }
+
+ Assert.AreEqual(3, mockInnerHandler.CallCount); // 3 failures
+ Assert.AreEqual(2, context.RequestContext.NetworkRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_HttpError_429_Retries_UpTo_RetryLimit()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(2, HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(3, mockInnerHandler.CallCount); // 2 failures + 1 success
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_HttpError_500_Retries_UpTo_RetryLimit()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnHttpServerError = true,
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(2, HttpStatusCode.InternalServerError);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var result = await handler.InvokeAsync(context);
+
+ Assert.IsNotNull(result);
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_HttpError_Exceeds_RetryLimit_Throws()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+
+ try
+ {
+ await handler.InvokeAsync(context);
+ Assert.Fail("Should have thrown exception");
+ }
+ catch (ContentstackErrorException ex)
+ {
+ Assert.AreEqual(HttpStatusCode.TooManyRequests, ex.StatusCode);
+ }
+
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_NetworkError_Tracks_NetworkRetryCount()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 3,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddFailuresThenSuccess(1, MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ await handler.InvokeAsync(context);
+
+ Assert.AreEqual(1, context.RequestContext.NetworkRetryCount);
+ Assert.AreEqual(0, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_HttpError_Tracks_HttpRetryCount()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 3,
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(1, HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ await handler.InvokeAsync(context);
+
+ Assert.AreEqual(0, context.RequestContext.NetworkRetryCount);
+ Assert.AreEqual(1, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_NetworkError_Then_HttpError_Tracks_Both_Counts()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 3,
+ RetryLimit = 3,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10),
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddException(MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ mockInnerHandler.AddResponse(HttpStatusCode.TooManyRequests);
+ mockInnerHandler.AddSuccessResponse();
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ await handler.InvokeAsync(context);
+
+ Assert.AreEqual(1, context.RequestContext.NetworkRetryCount);
+ Assert.AreEqual(1, context.RequestContext.HttpRetryCount);
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_Applies_NetworkRetryDelay()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 1,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(50),
+ NetworkBackoffStrategy = BackoffStrategy.Fixed
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddFailuresThenSuccess(1, MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var startTime = DateTime.UtcNow;
+ await handler.InvokeAsync(context);
+ var elapsed = DateTime.UtcNow - startTime;
+
+ // Should have waited at least 50ms + jitter
+ Assert.IsTrue(elapsed >= TimeSpan.FromMilliseconds(50));
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_Applies_HttpRetryDelay()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 1,
+ RetryDelay = TimeSpan.FromMilliseconds(50),
+ RetryDelayOptions = new RetryDelayOptions
+ {
+ Base = TimeSpan.FromMilliseconds(50)
+ }
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(1, HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ var startTime = DateTime.UtcNow;
+ await handler.InvokeAsync(context);
+ var elapsed = DateTime.UtcNow - startTime;
+
+ // Should have waited at least 50ms + jitter
+ Assert.IsTrue(elapsed >= TimeSpan.FromMilliseconds(50));
+ }
+
+ [TestMethod]
+ public async Task InvokeAsync_RequestId_Is_Generated()
+ {
+ var config = new RetryConfiguration();
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddSuccessResponse();
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ await handler.InvokeAsync(context);
+
+ Assert.AreNotEqual(Guid.Empty, context.RequestContext.RequestId);
+ }
+
+ [TestMethod]
+ public void InvokeSync_Success_NoRetry()
+ {
+ var config = new RetryConfiguration();
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddSuccessResponse();
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ handler.InvokeSync(context);
+
+ Assert.AreEqual(1, mockInnerHandler.CallCount);
+ }
+
+ [TestMethod]
+ public void InvokeSync_NetworkError_Retries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryOnNetworkFailure = true,
+ RetryOnSocketFailure = true,
+ MaxNetworkRetries = 2,
+ NetworkRetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddFailuresThenSuccess(2, MockNetworkErrorGenerator.CreateSocketException(SocketError.ConnectionReset));
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ handler.InvokeSync(context);
+
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.NetworkRetryCount);
+ }
+
+ [TestMethod]
+ public void InvokeSync_HttpError_Retries()
+ {
+ var config = new RetryConfiguration
+ {
+ RetryLimit = 2,
+ RetryDelay = TimeSpan.FromMilliseconds(10)
+ };
+ var policy = new DefaultRetryPolicy(config);
+ var handler = new RetryHandler(policy);
+ var mockInnerHandler = new MockHttpHandlerWithRetries();
+ mockInnerHandler.AddHttpErrorsThenSuccess(2, HttpStatusCode.TooManyRequests);
+ handler.InnerHandler = mockInnerHandler;
+ handler.LogManager = LogManager.EmptyLogger;
+
+ var context = CreateExecutionContext();
+ handler.InvokeSync(context);
+
+ Assert.AreEqual(3, mockInnerHandler.CallCount);
+ Assert.AreEqual(2, context.RequestContext.HttpRetryCount);
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core/ContentstackClient.cs b/Contentstack.Management.Core/ContentstackClient.cs
index 478d9ff..a8929b4 100644
--- a/Contentstack.Management.Core/ContentstackClient.cs
+++ b/Contentstack.Management.Core/ContentstackClient.cs
@@ -207,7 +207,19 @@ protected void BuildPipeline()
{
HttpHandler httpClientHandler = new HttpHandler(_httpClient);
- RetryPolicy retryPolicy = contentstackOptions.RetryPolicy ?? new DefaultRetryPolicy(contentstackOptions.RetryLimit, contentstackOptions.RetryDelay);
+ RetryPolicy retryPolicy;
+ if (contentstackOptions.RetryPolicy != null)
+ {
+ // Use custom retry policy if provided
+ retryPolicy = contentstackOptions.RetryPolicy;
+ }
+ else
+ {
+ // Create RetryConfiguration from options and use it with DefaultRetryPolicy
+ var retryConfiguration = RetryConfiguration.FromOptions(contentstackOptions);
+ retryPolicy = new DefaultRetryPolicy(retryConfiguration);
+ }
+
ContentstackPipeline = new ContentstackRuntimePipeline(new List()
{
httpClientHandler,
diff --git a/Contentstack.Management.Core/ContentstackClientOptions.cs b/Contentstack.Management.Core/ContentstackClientOptions.cs
index af46f30..f5946ce 100644
--- a/Contentstack.Management.Core/ContentstackClientOptions.cs
+++ b/Contentstack.Management.Core/ContentstackClientOptions.cs
@@ -88,6 +88,59 @@ public class ContentstackClientOptions
///
public RetryPolicy RetryPolicy { get; set; }
+ ///
+ /// When set to true, the client will retry on network failures.
+ /// The default value is true.
+ ///
+ public bool RetryOnNetworkFailure { get; set; } = true;
+
+ ///
+ /// When set to true, the client will retry on DNS failures.
+ /// The default value is true.
+ ///
+ public bool RetryOnDnsFailure { get; set; } = true;
+
+ ///
+ /// When set to true, the client will retry on socket failures.
+ /// The default value is true.
+ ///
+ public bool RetryOnSocketFailure { get; set; } = true;
+
+ ///
+ /// When set to true, the client will retry on HTTP server errors (5xx).
+ /// The default value is true.
+ ///
+ public bool RetryOnHttpServerError { get; set; } = true;
+
+ ///
+ /// Maximum number of network retry attempts.
+ /// The default value is 3.
+ ///
+ public int MaxNetworkRetries { get; set; } = 3;
+
+ ///
+ /// Base delay for network retries.
+ /// The default value is 100ms.
+ ///
+ public TimeSpan NetworkRetryDelay { get; set; } = TimeSpan.FromMilliseconds(100);
+
+ ///
+ /// Backoff strategy for network retries.
+ /// The default value is Exponential.
+ ///
+ public BackoffStrategy NetworkBackoffStrategy { get; set; } = BackoffStrategy.Exponential;
+
+ ///
+ /// Custom function to determine if a status code should be retried.
+ /// If null, default retry condition is used (429, 500, 502, 503, 504).
+ ///
+ public Func? RetryCondition { get; set; }
+
+ ///
+ /// Options for retry delay calculation.
+ ///
+ public RetryDelayOptions RetryDelayOptions { get; set; }
+
///
/// Host for the Proxy.
///
diff --git a/Contentstack.Management.Core/Exceptions/ContentstackErrorException.cs b/Contentstack.Management.Core/Exceptions/ContentstackErrorException.cs
index 8fcda6e..15e7b11 100644
--- a/Contentstack.Management.Core/Exceptions/ContentstackErrorException.cs
+++ b/Contentstack.Management.Core/Exceptions/ContentstackErrorException.cs
@@ -66,6 +66,21 @@ public string ErrorMessage
///
[JsonProperty("errors")]
public Dictionary Errors { get; set; }
+
+ ///
+ /// Number of retry attempts made before this exception was thrown.
+ ///
+ public int RetryAttempts { get; set; }
+
+ ///
+ /// The original exception that caused this error, if this is a network error wrapped in an HTTP exception.
+ ///
+ public Exception OriginalError { get; set; }
+
+ ///
+ /// Indicates whether this error originated from a network failure.
+ ///
+ public bool IsNetworkError { get; set; }
#endregion
public static ContentstackErrorException CreateException(HttpResponseMessage response)
{
diff --git a/Contentstack.Management.Core/LICENSE.txt b/Contentstack.Management.Core/LICENSE.txt
index 501f936..4382a0d 100644
--- a/Contentstack.Management.Core/LICENSE.txt
+++ b/Contentstack.Management.Core/LICENSE.txt
@@ -1,6 +1,6 @@
MIT License
-Copyright © 2012-2025 Contentstack. All Rights Reserved
+Copyright © 2012-2026 Contentstack. All Rights Reserved
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/Contentstack.Management.Core/Runtime/Contexts/IRequestContext.cs b/Contentstack.Management.Core/Runtime/Contexts/IRequestContext.cs
index 5cf6e80..eb2827e 100644
--- a/Contentstack.Management.Core/Runtime/Contexts/IRequestContext.cs
+++ b/Contentstack.Management.Core/Runtime/Contexts/IRequestContext.cs
@@ -9,6 +9,21 @@ public interface IRequestContext
IContentstackService service { get; set; }
ContentstackClientOptions config { get; set; }
int Retries { get; set; }
+
+ ///
+ /// Number of network retry attempts made.
+ ///
+ int NetworkRetryCount { get; set; }
+
+ ///
+ /// Number of HTTP retry attempts made.
+ ///
+ int HttpRetryCount { get; set; }
+
+ ///
+ /// Unique identifier for this request, used for correlation in logs.
+ ///
+ Guid RequestId { get; set; }
}
}
diff --git a/Contentstack.Management.Core/Runtime/Contexts/RequestContext.cs b/Contentstack.Management.Core/Runtime/Contexts/RequestContext.cs
index 214927a..9ce0b1a 100644
--- a/Contentstack.Management.Core/Runtime/Contexts/RequestContext.cs
+++ b/Contentstack.Management.Core/Runtime/Contexts/RequestContext.cs
@@ -10,5 +10,20 @@ internal class RequestContext : IRequestContext
public ContentstackClientOptions config { get; set; }
public int Retries { get; set; }
+
+ ///
+ /// Number of network retry attempts made.
+ ///
+ public int NetworkRetryCount { get; set; }
+
+ ///
+ /// Number of HTTP retry attempts made.
+ ///
+ public int HttpRetryCount { get; set; }
+
+ ///
+ /// Unique identifier for this request, used for correlation in logs.
+ ///
+ public Guid RequestId { get; set; } = Guid.NewGuid();
}
}
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/BackoffStrategy.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/BackoffStrategy.cs
new file mode 100644
index 0000000..788d1a0
--- /dev/null
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/BackoffStrategy.cs
@@ -0,0 +1,19 @@
+namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
+{
+ ///
+ /// Defines the backoff strategy for retry delays.
+ ///
+ public enum BackoffStrategy
+ {
+ ///
+ /// Fixed delay with jitter.
+ ///
+ Fixed,
+
+ ///
+ /// Exponential backoff with jitter (delay = baseDelay * 2^(attempt-1) + jitter).
+ ///
+ Exponential
+ }
+}
+
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/DefaultRetryPolicy.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/DefaultRetryPolicy.cs
index 050e2c8..fc436b2 100644
--- a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/DefaultRetryPolicy.cs
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/DefaultRetryPolicy.cs
@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.Net;
+using System.Net.Http;
+using Contentstack.Management.Core.Exceptions;
using Contentstack.Management.Core.Runtime.Contexts;
namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
@@ -8,6 +10,9 @@ namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
public partial class DefaultRetryPolicy : RetryPolicy
{
protected TimeSpan retryDelay { get; set; }
+ protected RetryConfiguration retryConfiguration;
+ protected NetworkErrorDetector networkErrorDetector;
+ protected RetryDelayCalculator delayCalculator;
protected ICollection statusCodesToRetryOn = new HashSet
{
@@ -24,38 +29,183 @@ internal DefaultRetryPolicy(int retryLimit, TimeSpan delay)
{
RetryLimit = retryLimit;
retryDelay = delay;
+ networkErrorDetector = new NetworkErrorDetector();
+ delayCalculator = new RetryDelayCalculator();
+ }
+
+ internal DefaultRetryPolicy(RetryConfiguration config)
+ {
+ retryConfiguration = config ?? throw new ArgumentNullException(nameof(config));
+ RetryLimit = config.RetryLimit;
+ retryDelay = config.RetryDelay;
+ networkErrorDetector = new NetworkErrorDetector();
+ delayCalculator = new RetryDelayCalculator();
}
protected override bool CanRetry(IExecutionContext executionContext)
{
- return true;
+ if (retryConfiguration != null)
+ {
+ return retryConfiguration.RetryOnError;
+ }
+ return RetryOnError;
}
protected override bool RetryForException(IExecutionContext executionContext, Exception exception)
{
- //if (exception is Exceptions.ContentstackErrorException)
- //{
- // var contentstackExecption = exception as Exceptions.ContentstackErrorException;
+ if (retryConfiguration == null)
+ {
+ // Fallback to old behavior if no configuration provided
+ return false;
+ }
- // if (statusCodesToRetryOn.Contains(contentstackExecption.StatusCode))
- // {
- // return true;
- // }
- //}
-
- return false;
+ var requestContext = executionContext.RequestContext;
+
+ // Check for network errors
+ var networkErrorInfo = networkErrorDetector.IsTransientNetworkError(exception);
+ if (networkErrorInfo != null)
+ {
+ if (networkErrorDetector.ShouldRetryNetworkError(networkErrorInfo, retryConfiguration))
+ {
+ // Check if network retry limit exceeded
+ if (requestContext.NetworkRetryCount >= retryConfiguration.MaxNetworkRetries)
+ {
+ return false;
+ }
+ return true;
+ }
+ }
+
+ // Check for HTTP errors (ContentstackErrorException)
+ if (exception is ContentstackErrorException contentstackException)
+ {
+ // Check if it's a server error (5xx) that should be retried
+ if (contentstackException.StatusCode >= HttpStatusCode.InternalServerError &&
+ contentstackException.StatusCode <= HttpStatusCode.GatewayTimeout)
+ {
+ if (retryConfiguration.RetryOnHttpServerError)
+ {
+ // Check if HTTP retry limit exceeded
+ if (requestContext.HttpRetryCount >= retryConfiguration.RetryLimit)
+ {
+ return false;
+ }
+ return true;
+ }
+ }
+
+ // Check custom retry condition
+ if (delayCalculator.ShouldRetryHttpStatusCode(contentstackException.StatusCode, retryConfiguration))
+ {
+ // Check if HTTP retry limit exceeded
+ if (requestContext.HttpRetryCount >= retryConfiguration.RetryLimit)
+ {
+ return false;
+ }
+ return true;
+ }
+ }
+ return false;
}
protected override bool RetryLimitExceeded(IExecutionContext executionContext)
{
- return executionContext.RequestContext.Retries >= this.RetryLimit;
+ var requestContext = executionContext.RequestContext;
+
+ if (retryConfiguration != null)
+ {
+ // Check both network and HTTP retry limits
+ return requestContext.NetworkRetryCount >= retryConfiguration.MaxNetworkRetries &&
+ requestContext.HttpRetryCount >= retryConfiguration.RetryLimit;
+ }
+ // Fallback to old behavior
+ return requestContext.Retries >= this.RetryLimit;
}
internal override void WaitBeforeRetry(IExecutionContext executionContext)
{
- System.Threading.Tasks.Task.Delay(retryDelay.Milliseconds).Wait();
+ if (retryConfiguration == null)
+ {
+ // Fallback to old behavior
+ System.Threading.Tasks.Task.Delay(retryDelay.Milliseconds).Wait();
+ return;
+ }
+
+ var requestContext = executionContext.RequestContext;
+ TimeSpan delay;
+
+ // Determine delay based on error type
+ // We need to check the last exception, but we don't have it here
+ // So we'll use a heuristic: if network retries > 0, use network delay
+ if (requestContext.NetworkRetryCount > 0)
+ {
+ delay = delayCalculator.CalculateNetworkRetryDelay(
+ requestContext.NetworkRetryCount,
+ retryConfiguration);
+ }
+ else
+ {
+ // HTTP retry - we'll use the last exception if available
+ // For now, use base delay with exponential backoff
+ delay = delayCalculator.CalculateHttpRetryDelay(
+ requestContext.HttpRetryCount,
+ retryConfiguration,
+ null);
+ }
+
+ System.Threading.Tasks.Task.Delay(delay).Wait();
+ }
+
+ ///
+ /// Determines if an HTTP status code should be retried.
+ ///
+ public bool ShouldRetryHttpStatusCode(HttpStatusCode statusCode, IRequestContext requestContext)
+ {
+ if (retryConfiguration == null)
+ {
+ return statusCodesToRetryOn.Contains(statusCode);
+ }
+
+ if (requestContext.HttpRetryCount >= retryConfiguration.RetryLimit)
+ {
+ return false;
+ }
+
+ return delayCalculator.ShouldRetryHttpStatusCode(statusCode, retryConfiguration);
+ }
+
+ ///
+ /// Gets the retry delay for an HTTP error.
+ ///
+ public TimeSpan GetHttpRetryDelay(IRequestContext requestContext, Exception exception, HttpResponseHeaders? responseHeaders = null)
+ {
+ if (retryConfiguration == null)
+ {
+ return retryDelay;
+ }
+
+ return delayCalculator.CalculateHttpRetryDelay(
+ requestContext.HttpRetryCount,
+ retryConfiguration,
+ exception,
+ responseHeaders);
+ }
+
+ ///
+ /// Gets the retry delay for a network error.
+ ///
+ public TimeSpan GetNetworkRetryDelay(IRequestContext requestContext)
+ {
+ if (retryConfiguration == null)
+ {
+ return retryDelay;
+ }
+
+ return delayCalculator.CalculateNetworkRetryDelay(
+ requestContext.NetworkRetryCount,
+ retryConfiguration);
}
}
}
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/NetworkErrorDetector.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/NetworkErrorDetector.cs
new file mode 100644
index 0000000..16f5bf5
--- /dev/null
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/NetworkErrorDetector.cs
@@ -0,0 +1,141 @@
+using System;
+using System.Net;
+using System.Net.Sockets;
+using System.Threading.Tasks;
+using Contentstack.Management.Core.Exceptions;
+
+namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
+{
+ ///
+ /// Service to detect and classify transient network errors.
+ ///
+ public class NetworkErrorDetector
+ {
+ ///
+ /// Determines if an exception represents a transient network error.
+ ///
+ /// The exception to analyze.
+ /// NetworkErrorInfo if it's a transient network error, null otherwise.
+ public NetworkErrorInfo? IsTransientNetworkError(Exception error)
+ {
+ if (error == null)
+ return null;
+
+ // Check for SocketException
+ if (error is SocketException socketException)
+ {
+ return DetectSocketError(socketException);
+ }
+
+ // Check for HttpRequestException with inner SocketException
+ if (error is System.Net.Http.HttpRequestException httpRequestException)
+ {
+ if (httpRequestException.InnerException is SocketException innerSocketException)
+ {
+ return DetectSocketError(innerSocketException);
+ }
+ }
+
+ // Check for TaskCanceledException (timeout)
+ if (error is TaskCanceledException taskCanceledException)
+ {
+ // Only treat as timeout if it's not a user cancellation
+ // TaskCanceledException can occur due to timeout or cancellation
+ // We check if the cancellation token was actually cancelled by the user
+ if (taskCanceledException.CancellationToken.IsCancellationRequested == false)
+ {
+ return new NetworkErrorInfo(NetworkErrorType.Timeout, true, error);
+ }
+ }
+
+ // Check for TimeoutException
+ if (error is TimeoutException)
+ {
+ return new NetworkErrorInfo(NetworkErrorType.Timeout, true, error);
+ }
+
+ // Check for ContentstackErrorException with 5xx status codes
+ if (error is ContentstackErrorException contentstackError)
+ {
+ if (contentstackError.StatusCode >= HttpStatusCode.InternalServerError &&
+ contentstackError.StatusCode <= HttpStatusCode.GatewayTimeout)
+ {
+ return new NetworkErrorInfo(NetworkErrorType.HttpServerError, true, error);
+ }
+ }
+
+ return null;
+ }
+
+ ///
+ /// Detects the type of socket error from a SocketException.
+ ///
+ private NetworkErrorInfo DetectSocketError(SocketException socketException)
+ {
+ bool isTransient = false;
+ NetworkErrorType errorType = NetworkErrorType.SocketError;
+
+ switch (socketException.SocketErrorCode)
+ {
+ // DNS-related errors
+ case SocketError.HostNotFound:
+ case SocketError.TryAgain:
+ errorType = NetworkErrorType.DnsFailure;
+ isTransient = true;
+ break;
+
+ // Transient connection errors
+ case SocketError.ConnectionReset:
+ case SocketError.TimedOut:
+ case SocketError.ConnectionRefused:
+ case SocketError.NetworkUnreachable:
+ case SocketError.HostUnreachable:
+ case SocketError.NoBufferSpaceAvailable:
+ errorType = NetworkErrorType.SocketError;
+ isTransient = true;
+ break;
+
+ // Other socket errors (may or may not be transient)
+ default:
+ errorType = NetworkErrorType.SocketError;
+ // Most socket errors are transient, but some are not
+ // We'll be conservative and retry on most socket errors
+ isTransient = true;
+ break;
+ }
+
+ return new NetworkErrorInfo(errorType, isTransient, socketException);
+ }
+
+ ///
+ /// Determines if a network error should be retried based on configuration.
+ ///
+ public bool ShouldRetryNetworkError(NetworkErrorInfo? errorInfo, RetryConfiguration config)
+ {
+ if (errorInfo == null || !errorInfo.IsTransient)
+ return false;
+
+ if (!config.RetryOnNetworkFailure)
+ return false;
+
+ switch (errorInfo.ErrorType)
+ {
+ case NetworkErrorType.DnsFailure:
+ return config.RetryOnDnsFailure;
+
+ case NetworkErrorType.SocketError:
+ return config.RetryOnSocketFailure;
+
+ case NetworkErrorType.Timeout:
+ return config.RetryOnNetworkFailure;
+
+ case NetworkErrorType.HttpServerError:
+ return config.RetryOnHttpServerError;
+
+ default:
+ return config.RetryOnNetworkFailure;
+ }
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/NetworkErrorInfo.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/NetworkErrorInfo.cs
new file mode 100644
index 0000000..f5fa2e8
--- /dev/null
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/NetworkErrorInfo.cs
@@ -0,0 +1,65 @@
+namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
+{
+ ///
+ /// Information about a detected network error.
+ ///
+ public class NetworkErrorInfo
+ {
+ ///
+ /// The type of network error detected.
+ ///
+ public NetworkErrorType ErrorType { get; set; }
+
+ ///
+ /// Indicates if this is a transient error that should be retried.
+ ///
+ public bool IsTransient { get; set; }
+
+ ///
+ /// The original exception that caused this network error.
+ ///
+ public System.Exception OriginalException { get; set; }
+
+ ///
+ /// Creates a new NetworkErrorInfo instance.
+ ///
+ public NetworkErrorInfo(NetworkErrorType errorType, bool isTransient, System.Exception originalException)
+ {
+ ErrorType = errorType;
+ IsTransient = isTransient;
+ OriginalException = originalException;
+ }
+ }
+
+ ///
+ /// Types of network errors that can occur.
+ ///
+ public enum NetworkErrorType
+ {
+ ///
+ /// DNS resolution failure.
+ ///
+ DnsFailure,
+
+ ///
+ /// Socket connection error (connection reset, refused, etc.).
+ ///
+ SocketError,
+
+ ///
+ /// Request timeout.
+ ///
+ Timeout,
+
+ ///
+ /// HTTP server error (5xx).
+ ///
+ HttpServerError,
+
+ ///
+ /// Unknown or unclassified error.
+ ///
+ Unknown
+ }
+}
+
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryConfiguration.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryConfiguration.cs
new file mode 100644
index 0000000..3248ad1
--- /dev/null
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryConfiguration.cs
@@ -0,0 +1,128 @@
+using System;
+using System.Net;
+using Contentstack.Management.Core;
+
+namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
+{
+ ///
+ /// Configuration for retry behavior, supporting both network and HTTP error retries.
+ ///
+ public class RetryConfiguration
+ {
+ ///
+ /// When set to true, the client will retry requests.
+ /// When set to false, the client will not retry request.
+ /// The default value is true.
+ ///
+ public bool RetryOnError { get; set; } = true;
+
+ ///
+ /// Returns the flag indicating how many retry HTTP requests an SDK should
+ /// make for a single SDK operation invocation before giving up.
+ /// The default value is 5.
+ ///
+ public int RetryLimit { get; set; } = 5;
+
+ ///
+ /// Returns the flag indicating delay in retrying HTTP requests.
+ /// The default value is 300ms.
+ ///
+ public TimeSpan RetryDelay { get; set; } = TimeSpan.FromMilliseconds(300);
+
+ ///
+ /// When set to true, the client will retry on network failures.
+ /// The default value is true.
+ ///
+ public bool RetryOnNetworkFailure { get; set; } = true;
+
+ ///
+ /// When set to true, the client will retry on DNS failures.
+ /// The default value is true.
+ ///
+ public bool RetryOnDnsFailure { get; set; } = true;
+
+ ///
+ /// When set to true, the client will retry on socket failures.
+ /// The default value is true.
+ ///
+ public bool RetryOnSocketFailure { get; set; } = true;
+
+ ///
+ /// When set to true, the client will retry on HTTP server errors (5xx).
+ /// The default value is true.
+ ///
+ public bool RetryOnHttpServerError { get; set; } = true;
+
+ ///
+ /// Maximum number of network retry attempts.
+ /// The default value is 3.
+ ///
+ public int MaxNetworkRetries { get; set; } = 3;
+
+ ///
+ /// Base delay for network retries.
+ /// The default value is 100ms.
+ ///
+ public TimeSpan NetworkRetryDelay { get; set; } = TimeSpan.FromMilliseconds(100);
+
+ ///
+ /// Backoff strategy for network retries.
+ /// The default value is Exponential.
+ ///
+ public BackoffStrategy NetworkBackoffStrategy { get; set; } = BackoffStrategy.Exponential;
+
+ ///
+ /// Custom function to determine if a status code should be retried.
+ /// If null, default retry condition is used (429, 500, 502, 503, 504).
+ ///
+ public Func? RetryCondition { get; set; }
+
+ ///
+ /// Options for retry delay calculation.
+ ///
+ public RetryDelayOptions RetryDelayOptions { get; set; } = new RetryDelayOptions();
+
+ ///
+ /// Creates a RetryConfiguration from ContentstackClientOptions.
+ ///
+ public static RetryConfiguration FromOptions(ContentstackClientOptions options)
+ {
+ return new RetryConfiguration
+ {
+ RetryOnError = options.RetryOnError,
+ RetryLimit = options.RetryLimit,
+ RetryDelay = options.RetryDelay,
+ RetryOnNetworkFailure = options.RetryOnNetworkFailure,
+ RetryOnDnsFailure = options.RetryOnDnsFailure,
+ RetryOnSocketFailure = options.RetryOnSocketFailure,
+ RetryOnHttpServerError = options.RetryOnHttpServerError,
+ MaxNetworkRetries = options.MaxNetworkRetries,
+ NetworkRetryDelay = options.NetworkRetryDelay,
+ NetworkBackoffStrategy = options.NetworkBackoffStrategy,
+ RetryCondition = options.RetryCondition,
+ RetryDelayOptions = options.RetryDelayOptions ?? new RetryDelayOptions
+ {
+ Base = options.RetryDelay
+ }
+ };
+ }
+ }
+
+ ///
+ /// Options for retry delay calculation.
+ ///
+ public class RetryDelayOptions
+ {
+ ///
+ /// Base delay for retries.
+ ///
+ public TimeSpan Base { get; set; } = TimeSpan.FromMilliseconds(300);
+
+ ///
+ /// Custom backoff function. Parameters: retryCount, exception.
+ /// Return TimeSpan.Zero or negative TimeSpan to disable retry for that attempt.
+ ///
+ public Func? CustomBackoff { get; set; }
+ }
+}
+
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryDelayCalculator.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryDelayCalculator.cs
new file mode 100644
index 0000000..28e4c74
--- /dev/null
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryDelayCalculator.cs
@@ -0,0 +1,122 @@
+using System;
+using System.Net;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using Contentstack.Management.Core.Exceptions;
+
+namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
+{
+ ///
+ /// Utility for calculating retry delays with various backoff strategies.
+ ///
+ public class RetryDelayCalculator
+ {
+ private static readonly Random _random = new Random();
+ private const int MaxJitterMs = 100;
+
+ ///
+ /// Calculates the delay for a network retry attempt.
+ ///
+ /// The current retry attempt number (1-based).
+ /// The retry configuration.
+ /// The delay to wait before retrying.
+ public TimeSpan CalculateNetworkRetryDelay(int attempt, RetryConfiguration config)
+ {
+ TimeSpan baseDelay = config.NetworkRetryDelay;
+ TimeSpan calculatedDelay;
+
+ switch (config.NetworkBackoffStrategy)
+ {
+ case BackoffStrategy.Exponential:
+ // Exponential: baseDelay * 2^(attempt-1)
+ double exponentialDelay = baseDelay.TotalMilliseconds * Math.Pow(2, attempt - 1);
+ calculatedDelay = TimeSpan.FromMilliseconds(exponentialDelay);
+ break;
+
+ case BackoffStrategy.Fixed:
+ default:
+ calculatedDelay = baseDelay;
+ break;
+ }
+
+ // Add jitter (random 0-100ms)
+ int jitterMs = _random.Next(0, MaxJitterMs);
+ return calculatedDelay.Add(TimeSpan.FromMilliseconds(jitterMs));
+ }
+
+ ///
+ /// Calculates the delay for an HTTP retry attempt.
+ ///
+ /// The current retry count (0-based).
+ /// The retry configuration.
+ /// The exception that triggered the retry, if any.
+ /// The HTTP response headers, if available.
+ /// The delay to wait before retrying. Returns TimeSpan.Zero or negative to disable retry.
+ public TimeSpan CalculateHttpRetryDelay(int retryCount, RetryConfiguration config, Exception? error, HttpResponseHeaders? responseHeaders = null)
+ {
+ // Check for Retry-After header (for 429 Too Many Requests)
+ if (responseHeaders != null && responseHeaders.RetryAfter != null)
+ {
+ var retryAfter = responseHeaders.RetryAfter;
+ if (retryAfter.Delta.HasValue)
+ {
+ return retryAfter.Delta.Value;
+ }
+ if (retryAfter.Date.HasValue)
+ {
+ var delay = retryAfter.Date.Value - DateTimeOffset.UtcNow;
+ if (delay > TimeSpan.Zero)
+ {
+ return delay;
+ }
+ }
+ }
+
+ // Use custom backoff function if provided
+ if (config.RetryDelayOptions.CustomBackoff != null)
+ {
+ var customDelay = config.RetryDelayOptions.CustomBackoff(retryCount, error);
+ // Negative or zero delay means don't retry
+ if (customDelay <= TimeSpan.Zero)
+ {
+ return customDelay;
+ }
+ return customDelay;
+ }
+
+ // Default: use base delay with exponential backoff
+ TimeSpan baseDelay = config.RetryDelayOptions.Base;
+ if (baseDelay == TimeSpan.Zero)
+ {
+ baseDelay = config.RetryDelay;
+ }
+
+ // Exponential backoff: baseDelay * 2^retryCount
+ double exponentialDelay = baseDelay.TotalMilliseconds * Math.Pow(2, retryCount);
+ TimeSpan calculatedDelay = TimeSpan.FromMilliseconds(exponentialDelay);
+
+ // Add jitter (random 0-100ms)
+ int jitterMs = _random.Next(0, MaxJitterMs);
+ return calculatedDelay.Add(TimeSpan.FromMilliseconds(jitterMs));
+ }
+
+ ///
+ /// Determines if an HTTP status code should be retried based on configuration.
+ ///
+ public bool ShouldRetryHttpStatusCode(HttpStatusCode statusCode, RetryConfiguration config)
+ {
+ if (config.RetryCondition != null)
+ {
+ return config.RetryCondition(statusCode);
+ }
+
+ // Default retry condition: 429, 500, 502, 503, 504
+ return statusCode == HttpStatusCode.TooManyRequests ||
+ statusCode == HttpStatusCode.InternalServerError ||
+ statusCode == HttpStatusCode.BadGateway ||
+ statusCode == HttpStatusCode.ServiceUnavailable ||
+ statusCode == HttpStatusCode.GatewayTimeout;
+ }
+ }
+}
+
diff --git a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryHandler.cs b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryHandler.cs
index 4ce9fcd..3f44e78 100644
--- a/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryHandler.cs
+++ b/Contentstack.Management.Core/Runtime/Pipeline/RetryHandler/RetryHandler.cs
@@ -1,4 +1,5 @@
using System;
+using System.Net.Http;
using System.Threading.Tasks;
using Contentstack.Management.Core.Exceptions;
using Contentstack.Management.Core.Runtime.Contexts;
@@ -8,26 +9,56 @@ namespace Contentstack.Management.Core.Runtime.Pipeline.RetryHandler
public class RetryHandler : PipelineHandler
{
public RetryPolicy RetryPolicy { get; private set; }
+ private readonly NetworkErrorDetector networkErrorDetector;
public RetryHandler(RetryPolicy retryPolicy)
{
this.RetryPolicy = retryPolicy;
+ this.networkErrorDetector = new NetworkErrorDetector();
}
+
public override async Task InvokeAsync(IExecutionContext executionContext, bool addAcceptMediaHeader = false, string apiVersion = null)
{
var requestContext = executionContext.RequestContext;
var responseContext = executionContext.ResponseContext;
bool shouldRetry = false;
+ Exception lastException = null;
+
do
{
try
{
- var response = await base.InvokeAsync(executionContext, addAcceptMediaHeader, apiVersion);
+ var response = await base.InvokeAsync(executionContext, addAcceptMediaHeader, apiVersion);
+
+ // Check if response is an HTTP error that should be retried
+ if (response is ContentstackResponse contentstackResponse &&
+ !contentstackResponse.IsSuccessStatusCode)
+ {
+ var defaultRetryPolicy = RetryPolicy as DefaultRetryPolicy;
+ if (defaultRetryPolicy != null &&
+ defaultRetryPolicy.ShouldRetryHttpStatusCode(contentstackResponse.StatusCode, requestContext))
+ {
+ requestContext.HttpRetryCount++;
+ shouldRetry = true;
+ LogForHttpRetry(requestContext, contentstackResponse.StatusCode);
+
+ // Get delay and wait
+ var delay = defaultRetryPolicy.GetHttpRetryDelay(
+ requestContext,
+ null,
+ contentstackResponse.ResponseBody?.Headers);
+ await Task.Delay(delay);
+ continue;
+ }
+ }
+
return response;
}
catch (Exception exception)
{
+ lastException = exception;
shouldRetry = this.RetryPolicy.Retry(executionContext, exception);
+
if (!shouldRetry)
{
LogForError(requestContext, exception);
@@ -35,14 +66,30 @@ public override async Task InvokeAsync(IExecutionContext executionContext,
}
else
{
- requestContext.Retries++;
- LogForRetry(requestContext, exception);
+ // Classify error and increment appropriate counter
+ var networkErrorInfo = networkErrorDetector.IsTransientNetworkError(exception);
+ if (networkErrorInfo != null)
+ {
+ requestContext.NetworkRetryCount++;
+ LogForNetworkRetry(requestContext, exception, networkErrorInfo);
+ }
+ else if (exception is ContentstackErrorException)
+ {
+ requestContext.HttpRetryCount++;
+ LogForHttpRetry(requestContext, ((ContentstackErrorException)exception).StatusCode);
+ }
+ else
+ {
+ requestContext.Retries++;
+ LogForRetry(requestContext, exception);
+ }
}
}
this.RetryPolicy.WaitBeforeRetry(executionContext);
} while (shouldRetry == true);
+
throw new ContentstackException("No response was return nor exception was thrown");
}
@@ -50,16 +97,44 @@ public override void InvokeSync(IExecutionContext executionContext, bool addAcce
{
var requestContext = executionContext.RequestContext;
bool shouldRetry = false;
+ Exception lastException = null;
+
do
{
try
{
base.InvokeSync(executionContext, addAcceptMediaHeader, apiVersion);
+
+ // Check if response is an HTTP error that should be retried
+ var response = executionContext.ResponseContext.httpResponse;
+ if (response is ContentstackResponse contentstackResponse &&
+ !contentstackResponse.IsSuccessStatusCode)
+ {
+ var defaultRetryPolicy = RetryPolicy as DefaultRetryPolicy;
+ if (defaultRetryPolicy != null &&
+ defaultRetryPolicy.ShouldRetryHttpStatusCode(contentstackResponse.StatusCode, requestContext))
+ {
+ requestContext.HttpRetryCount++;
+ shouldRetry = true;
+ LogForHttpRetry(requestContext, contentstackResponse.StatusCode);
+
+ // Get delay and wait
+ var delay = defaultRetryPolicy.GetHttpRetryDelay(
+ requestContext,
+ null,
+ contentstackResponse.ResponseBody?.Headers);
+ System.Threading.Tasks.Task.Delay(delay).Wait();
+ continue;
+ }
+ }
+
return;
}
catch (Exception exception)
{
+ lastException = exception;
shouldRetry = this.RetryPolicy.Retry(executionContext, exception);
+
if (!shouldRetry)
{
LogForError(requestContext, exception);
@@ -67,10 +142,24 @@ public override void InvokeSync(IExecutionContext executionContext, bool addAcce
}
else
{
- requestContext.Retries++;
- LogForRetry(requestContext, exception);
+ // Classify error and increment appropriate counter
+ var networkErrorInfo = networkErrorDetector.IsTransientNetworkError(exception);
+ if (networkErrorInfo != null)
+ {
+ requestContext.NetworkRetryCount++;
+ LogForNetworkRetry(requestContext, exception, networkErrorInfo);
+ }
+ else if (exception is ContentstackErrorException)
+ {
+ requestContext.HttpRetryCount++;
+ LogForHttpRetry(requestContext, ((ContentstackErrorException)exception).StatusCode);
+ }
+ else
+ {
+ requestContext.Retries++;
+ LogForRetry(requestContext, exception);
+ }
}
-
}
this.RetryPolicy.WaitBeforeRetry(executionContext);
@@ -80,20 +169,52 @@ public override void InvokeSync(IExecutionContext executionContext, bool addAcce
private void LogForError(IRequestContext requestContext, Exception exception)
{
- LogManager.InfoFormat("{0} making request {1}. Attempt {2} of {3}.",
- exception.GetType().Name,
- requestContext.service.ResourcePath,
- requestContext.Retries + 1,
- RetryPolicy.RetryLimit);
+ var totalAttempts = requestContext.NetworkRetryCount + requestContext.HttpRetryCount + requestContext.Retries + 1;
+ LogManager.InfoFormat(
+ "[RequestId: {0}] {1} making request {2}. Final attempt {3} failed. Network retries: {4}, HTTP retries: {5}.",
+ requestContext.RequestId,
+ exception.GetType().Name,
+ requestContext.service.ResourcePath,
+ totalAttempts,
+ requestContext.NetworkRetryCount,
+ requestContext.HttpRetryCount);
}
private void LogForRetry(IRequestContext requestContext, Exception exception)
{
- LogManager.Error(exception, "{0} making request {1}. Attempt {2} of {3}.",
- exception.GetType().Name,
- requestContext.service.ResourcePath,
- requestContext.Retries,
- RetryPolicy.RetryLimit);
+ var totalAttempts = requestContext.NetworkRetryCount + requestContext.HttpRetryCount + requestContext.Retries;
+ LogManager.InfoFormat(
+ "[RequestId: {0}] {1} making request {2}. Retrying (attempt {3}). Network retries: {4}, HTTP retries: {5}.",
+ requestContext.RequestId,
+ exception.GetType().Name,
+ requestContext.service.ResourcePath,
+ totalAttempts,
+ requestContext.NetworkRetryCount,
+ requestContext.HttpRetryCount);
+ }
+
+ private void LogForNetworkRetry(IRequestContext requestContext, Exception exception, NetworkErrorInfo errorInfo)
+ {
+ var totalAttempts = requestContext.NetworkRetryCount + requestContext.HttpRetryCount + requestContext.Retries;
+ LogManager.InfoFormat(
+ "[RequestId: {0}] Network error ({1}) making request {2}. Retrying (attempt {3}, network retry {4}).",
+ requestContext.RequestId,
+ errorInfo.ErrorType,
+ requestContext.service.ResourcePath,
+ totalAttempts,
+ requestContext.NetworkRetryCount);
+ }
+
+ private void LogForHttpRetry(IRequestContext requestContext, System.Net.HttpStatusCode statusCode)
+ {
+ var totalAttempts = requestContext.NetworkRetryCount + requestContext.HttpRetryCount + requestContext.Retries;
+ LogManager.InfoFormat(
+ "[RequestId: {0}] HTTP error ({1}) making request {2}. Retrying (attempt {3}, HTTP retry {4}).",
+ requestContext.RequestId,
+ statusCode,
+ requestContext.service.ResourcePath,
+ totalAttempts,
+ requestContext.HttpRetryCount);
}
}
}
diff --git a/Contentstack.Management.Core/contentstack.management.core.csproj b/Contentstack.Management.Core/contentstack.management.core.csproj
index 0f14e8c..bdba4c5 100644
--- a/Contentstack.Management.Core/contentstack.management.core.csproj
+++ b/Contentstack.Management.Core/contentstack.management.core.csproj
@@ -4,7 +4,7 @@
netstandard2.0;net471;net472;
Contentstack Management
Contentstack
- Copyright © 2012-2025 Contentstack. All Rights Reserved
+ Copyright © 2012-2026 Contentstack. All Rights Reserved
.NET SDK for the Contentstack Content Management API.
Contentstack
contentstack.management.csharp
diff --git a/LICENSE b/LICENSE
index 3851325..4ea4612 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2012-2025 Contentstack
+Copyright (c) 2012-2026 Contentstack
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/Scripts/run-test-case.sh b/Scripts/run-test-case.sh
index 7550df9..a1d47c4 100644
--- a/Scripts/run-test-case.sh
+++ b/Scripts/run-test-case.sh
@@ -4,7 +4,7 @@
# Contentstack
#
# Created by Uttam Ukkoji on 12/04/21.
-# Copyright © 2025 Contentstack. All rights reserved.
+# Copyright © 2026 Contentstack. All rights reserved.
echo "Removing files"
diff --git a/Scripts/run-unit-test-case.sh b/Scripts/run-unit-test-case.sh
index ff14bdc..ba41e6c 100644
--- a/Scripts/run-unit-test-case.sh
+++ b/Scripts/run-unit-test-case.sh
@@ -4,7 +4,7 @@
# Contentstack
#
# Created by Uttam Ukkoji on 30/03/2023.
-# Copyright © 2025 Contentstack. All rights reserved.
+# Copyright © 2026 Contentstack. All rights reserved.
echo "Removing files"
rm -rf "./Contentstack.Management.Core.Unit.Tests/TestResults"