JAVASCRIPT
Consuming Paginated API Data with `async/await`
Efficiently fetch data from paginated APIs using modern `async/await` syntax, handling `next` links for sequential data retrieval.
async function fetchAllPaginatedData(initialUrl) {
let allData = [];
let currentUrl = initialUrl;
while (currentUrl) {
try {
console.log(`Fetching data from: ${currentUrl}`);
const response = await fetch(currentUrl, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
// 'Authorization': 'Bearer YOUR_API_KEY' // Include authentication if needed
},
});
if (!response.ok) {
throw new Error(`HTTP error! Status: ${response.status}`);
}
const data = await response.json();
// Assuming API response has 'results' for data and 'next' for next page URL
if (data.results && Array.isArray(data.results)) {
allData = allData.concat(data.results);
} else {
// If no 'results' key, assume the entire response is the data
allData = allData.concat(data);
}
currentUrl = data.next; // Update currentUrl to the next page URL
// Optional: Add a small delay to avoid hitting rate limits too aggressively
// await new Promise(resolve => setTimeout(resolve, 200));
} catch (error) {
console.error('Error fetching paginated data:', error);
// Decide whether to stop or retry based on error type
break;
}
}
return allData;
}
// Example usage: Fetch all pages from a hypothetical API
// Replace with your actual API endpoint
const API_ENDPOINT = 'https://swapi.dev/api/people/';
(async () => {
console.log('Starting to fetch all data...');
const allPeople = await fetchAllPaginatedData(API_ENDPOINT);
console.log('All data fetched:', allPeople.length, 'records.');
console.log('First 5 records:', allPeople.slice(0, 5));
})();
How it works: This JavaScript snippet demonstrates how to robustly fetch all data from an API that uses cursor-based pagination (e.g., providing a `next` URL). It uses an `async` function and a `while` loop to repeatedly make `fetch` requests, accumulating data from each page until no `next` URL is provided. Error handling is included to catch network issues or API errors, making it suitable for fetching large datasets sequentially.