Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 40 additions & 38 deletions js/http_fetcher.js
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,19 @@ class HTTPFetcher extends EventEmitter {
return null;
}

/**
* Returns a shortened version of the URL for log messages.
* @returns {string} Shortened URL
*/
#shortenUrl () {
try {
const urlObj = new URL(this.url);
return `${urlObj.origin}${urlObj.pathname}${urlObj.search.length > 50 ? "?..." : urlObj.search}`;
} catch {
return this.url;
}
}

/**
* Determines the retry delay for a non-ok response
* @param {Response} response - The fetch Response object
Expand All @@ -198,28 +211,35 @@ class HTTPFetcher extends EventEmitter {
errorType = "AUTH_FAILURE";
delay = Math.max(this.reloadInterval * 5, THIRTY_MINUTES);
message = `Authentication failed (${status}). Check your API key. Waiting ${Math.round(delay / 60000)} minutes before retry.`;
Log.error(`${this.logContext}${this.url} - ${message}`);
Log.error(`${this.logContext}${this.#shortenUrl()} - ${message}`);
} else if (status === 429) {
errorType = "RATE_LIMITED";
const retryAfter = response.headers.get("retry-after");
const parsed = retryAfter ? this.#parseRetryAfter(retryAfter) : null;
delay = parsed !== null ? Math.max(parsed, this.reloadInterval) : Math.max(this.reloadInterval * 2, FIFTEEN_MINUTES);
message = `Rate limited (429). Retrying in ${Math.round(delay / 60000)} minutes.`;
Log.warn(`${this.logContext}${this.url} - ${message}`);
Log.warn(`${this.logContext}${this.#shortenUrl()} - ${message}`);
} else if (status >= 500) {
errorType = "SERVER_ERROR";
this.serverErrorCount = Math.min(this.serverErrorCount + 1, this.maxRetries);
delay = this.reloadInterval * Math.pow(2, this.serverErrorCount);
message = `Server error (${status}). Retry #${this.serverErrorCount} in ${Math.round(delay / 60000)} minutes.`;
Log.error(`${this.logContext}${this.url} - ${message}`);
if (this.serverErrorCount >= this.maxRetries) {
delay = this.reloadInterval;
message = `Server error (${status}). Max retries reached, retrying at configured interval (${Math.round(delay / 1000)}s).`;
} else {
delay = HTTPFetcher.calculateBackoffDelay(this.serverErrorCount, {
maxDelay: this.reloadInterval
});
message = `Server error (${status}). Retry #${this.serverErrorCount} in ${Math.round(delay / 1000)}s.`;
}
Log.error(`${this.logContext}${this.#shortenUrl()} - ${message}`);
} else if (status >= 400) {
errorType = "CLIENT_ERROR";
delay = Math.max(this.reloadInterval * 2, FIFTEEN_MINUTES);
message = `Client error (${status}). Retrying in ${Math.round(delay / 60000)} minutes.`;
Log.error(`${this.logContext}${this.url} - ${message}`);
Log.error(`${this.logContext}${this.#shortenUrl()} - ${message}`);
} else {
message = `Unexpected HTTP status ${status}.`;
Log.error(`${this.logContext}${this.url} - ${message}`);
Log.error(`${this.logContext}${this.#shortenUrl()} - ${message}`);
}

return {
Expand Down Expand Up @@ -293,28 +313,22 @@ class HTTPFetcher extends EventEmitter {
const isTimeout = error.name === "AbortError";
const message = isTimeout ? `Request timeout after ${this.timeout}ms` : `Network error: ${error.message}`;

// Apply exponential backoff for network errors
this.networkErrorCount = Math.min(this.networkErrorCount + 1, this.maxRetries);
const backoffDelay = HTTPFetcher.calculateBackoffDelay(this.networkErrorCount, {
maxDelay: this.reloadInterval
});
nextDelay = backoffDelay;

// Truncate URL for cleaner logs
let shortUrl = this.url;
try {
const urlObj = new URL(this.url);
shortUrl = `${urlObj.origin}${urlObj.pathname}${urlObj.search.length > 50 ? "?..." : urlObj.search}`;
} catch {
// If URL parsing fails, use original URL
}
const exhausted = this.networkErrorCount >= this.maxRetries;

// Gradual log-level escalation: WARN for first 2 attempts, ERROR after
const retryMessage = `Retry #${this.networkErrorCount} in ${Math.round(nextDelay / 1000)}s.`;
if (this.networkErrorCount <= 2) {
Log.warn(`${this.logContext}${shortUrl} - ${message} ${retryMessage}`);
if (exhausted) {
nextDelay = this.reloadInterval;
Log.error(`${this.logContext}${this.#shortenUrl()} - ${message} Max retries reached, retrying at configured interval (${Math.round(nextDelay / 1000)}s).`);
} else {
Log.error(`${this.logContext}${shortUrl} - ${message} ${retryMessage}`);
nextDelay = HTTPFetcher.calculateBackoffDelay(this.networkErrorCount, {
maxDelay: this.reloadInterval
});
const retryMsg = `${this.logContext}${this.#shortenUrl()} - ${message} Retry #${this.networkErrorCount} in ${Math.round(nextDelay / 1000)}s.`;
if (this.networkErrorCount <= 2) {
Log.warn(retryMsg);
} else {
Log.error(retryMsg);
}
}

const errorInfo = this.#createErrorInfo(
Expand All @@ -324,18 +338,6 @@ class HTTPFetcher extends EventEmitter {
nextDelay,
error
);

/**
* Error event - fired when fetch fails
* @event HTTPFetcher#error
* @type {object}
* @property {string} message - Error description
* @property {number|null} statusCode - HTTP status or null for network errors
* @property {number} retryDelay - Ms until next retry
* @property {number} retryCount - Number of consecutive server errors
* @property {string} url - The URL that was fetched
* @property {Error|null} originalError - The original error
*/
this.emit("error", errorInfo);
} finally {
clearTimeout(timeoutId);
Expand Down
80 changes: 80 additions & 0 deletions tests/unit/functions/http_fetcher_spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -469,3 +469,83 @@ describe("selfSignedCert dispatcher", () => {
expect(options.dispatcher).toBeUndefined();
});
});

describe("Retry exhaustion fallback", () => {
it("should fall back to reloadInterval after network retries exhausted", async () => {
server.use(
http.get(TEST_URL, () => {
return HttpResponse.error();
})
);

fetcher = new HTTPFetcher(TEST_URL, { reloadInterval: 300000, maxRetries: 3 });

const errors = [];
fetcher.on("error", (errorInfo) => errors.push(errorInfo));

// Trigger maxRetries + 1 fetches to reach exhaustion
for (let i = 0; i < 4; i++) {
await fetcher.fetch();
}

// First retries should use backoff (< reloadInterval)
expect(errors[0].retryAfter).toBe(15000);
expect(errors[1].retryAfter).toBe(30000);
// Third retry hits maxRetries, should fall back to reloadInterval
expect(errors[2].retryAfter).toBe(300000);
// Subsequent errors stay at reloadInterval
expect(errors[3].retryAfter).toBe(300000);
});

it("should fall back to reloadInterval after server error retries exhausted", async () => {
server.use(
http.get(TEST_URL, () => {
return new HttpResponse(null, { status: 503 });
})
);

fetcher = new HTTPFetcher(TEST_URL, { reloadInterval: 300000, maxRetries: 3 });

const errors = [];
fetcher.on("error", (errorInfo) => errors.push(errorInfo));

for (let i = 0; i < 4; i++) {
await fetcher.fetch();
}

// First retries should use backoff (< reloadInterval)
expect(errors[0].retryAfter).toBe(15000);
expect(errors[1].retryAfter).toBe(30000);
// Third retry hits maxRetries, should fall back to reloadInterval
expect(errors[2].retryAfter).toBe(300000);
// Subsequent errors stay at reloadInterval
expect(errors[3].retryAfter).toBe(300000);
});

it("should reset network error count on success", async () => {
let requestCount = 0;
server.use(
http.get(TEST_URL, () => {
requestCount++;
if (requestCount <= 2) return HttpResponse.error();
return HttpResponse.text("ok");
})
);

fetcher = new HTTPFetcher(TEST_URL, { reloadInterval: 300000, maxRetries: 3 });

const errors = [];
fetcher.on("error", (errorInfo) => errors.push(errorInfo));

// Two failures with backoff
await fetcher.fetch();
await fetcher.fetch();
expect(errors).toHaveLength(2);
expect(errors[0].retryAfter).toBe(15000);
expect(errors[1].retryAfter).toBe(30000);

// Success resets counter
await fetcher.fetch();
expect(fetcher.networkErrorCount).toBe(0);
});
});
Loading