Skip to content

Commit

Permalink
retry requests once if a 429 is received, adjust batch size down by o…
Browse files Browse the repository at this point in the history
…ne for every 429 response received to a minimum of one
  • Loading branch information
Philip Benson authored and Philip Benson committed Feb 28, 2025
1 parent 44f63ea commit 982d9e7
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 8 deletions.
51 changes: 51 additions & 0 deletions packages/connectors-lib/src/__tests__/http-request-batcher.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -185,5 +185,56 @@ describe('HTTP Request Batcher', () => {
await batcher.fetch()
expect(batcher.responses).toEqual([{ status: 200 }, { status: 200 }])
})

it('retries requests that received a 429 response', async () => {
const batcher = new HTTPRequestBatcher(1)
fetch.mockImplementationOnce(() => ({ status: 429 }))
batcher.addRequest('https://api.example.com')
global.setTimeout.mockImplementationOnce(cb => cb())
await batcher.fetch()
expect(fetch).toHaveBeenCalledTimes(2)
})

it('retries requests with the same options as the original request', async () => {
const batcher = new HTTPRequestBatcher(3)
fetch.mockResolvedValueOnce({ status: 200 }).mockResolvedValueOnce({ status: 429 })
batcher.addRequest('https://api.example.com')
const sampleOptions = { method: 'POST', body: Symbol('body') }
batcher.addRequest('https://alt-api.example.com', sampleOptions)
batcher.addRequest('https://api-three.example.com')
batcher.addRequest('https://api-four.example.com')
global.setTimeout.mockImplementationOnce(cb => cb())
await batcher.fetch()
expect(fetch).toHaveBeenNthCalledWith(5, 'https://alt-api.example.com', sampleOptions)
})

it('adjusts batch size if a 429 response is received', async () => {
const batcher = new HTTPRequestBatcher(3)
fetch.mockImplementationOnce(() => ({ status: 429 }))
batcher.addRequest('https://api.example.com')
batcher.addRequest('https://alt-api.example.com')
batcher.addRequest('https://api-three.example.com')
global.setTimeout.mockImplementationOnce(cb => cb())
await batcher.fetch()
expect(batcher.batchSize).toBe(2)
})

it("doesn't reduce batch size below 1", async () => {
const batcher = new HTTPRequestBatcher(1)
fetch.mockImplementationOnce(() => ({ status: 429 }))
batcher.addRequest('https://api.example.com')
global.setTimeout.mockImplementationOnce(cb => cb())
await batcher.fetch()
expect(batcher.batchSize).toBe(1)
})

it('only retry once if a 429 response is received again', async () => {
const batcher = new HTTPRequestBatcher(1)
fetch.mockResolvedValueOnce({ status: 429 }).mockResolvedValueOnce({ status: 429 })
batcher.addRequest('https://api.example.com')
global.setTimeout.mockImplementation(cb => cb())
await batcher.fetch()
expect(fetch).toHaveBeenCalledTimes(2)
})
})
})
28 changes: 20 additions & 8 deletions packages/connectors-lib/src/http-request-batcher.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,34 @@ export default class HTTPRequestBatcher {
this._requests.push({ url, options })
}

_sendBatch (fetchRequests, position) {
_sendBatch (fetchRequests) {
return fetchRequests.length === this._batchSize
}

async fetch () {
const requestQueue = [...this._requests]
const sentRequests = []
const fetchRequests = []
for (let position = 0; position < this._requests.length; position++) {
fetchRequests.push(fetch(this._requests[position].url, this._requests[position].options))
console.log('fetchRequests', fetchRequests)
if (this._sendBatch(fetchRequests, position)) {
this._responses.push(...(await Promise.all(fetchRequests)))
if (position !== this._requests.length - 1) {
while (requestQueue.length) {
const request = requestQueue.shift()
fetchRequests.push(fetch(request.url, request.options))
sentRequests.push({ attempts: 1, ...request })
if (this._sendBatch(fetchRequests)) {
const batchResponses = await Promise.all(fetchRequests)
this._responses.push(...batchResponses)
for (let x = 0; x < batchResponses.length; x++) {
const response = batchResponses[x]
if (response.status === 429 && sentRequests[x].attempts < 2) {
requestQueue.push({ ...sentRequests[x], attempts: sentRequests[x].attempts + 1 })
this._batchSize = Math.max(this._batchSize - 1, 1)
}
}
fetchRequests.length = 0
sentRequests.length = 0
if (requestQueue.length) {
// don't wait if this is the last batch
await new Promise(resolve => setTimeout(() => resolve(), 1000))
}
fetchRequests.length = 0
}
}
}
Expand Down

0 comments on commit 982d9e7

Please sign in to comment.