← Back to all snippets
JAVASCRIPT

Efficiently Stream Large JSON Responses from External APIs (Node.js)

Process massive JSON data from API responses in a memory-efficient way using Node.js readable streams, avoiding loading entire payloads into memory.

const fetch = require('node-fetch'); // For Node.js; browser fetch streams differently
const { Writable } = require('stream');

// For parsing large JSON streams without loading the entire thing into memory
// You'd typically use a dedicated streaming JSON parser like 'stream-json'
// For demonstration, we'll just show reading the stream.
// In a real scenario, integrate a parser like 'stream-json' to extract objects.

async function streamLargeApiResponse(apiUrl, processChunk) {
  try {
    const response = await fetch(apiUrl);

    if (!response.ok) {
      const errorText = await response.text();
      throw new Error(`API request failed: ${response.status} - ${errorText}`);
    }

    if (!response.body) {
      throw new Error('Response body is not a readable stream.');
    }

    // Create a Writable stream to process chunks
    const processorStream = new Writable({
      write(chunk, encoding, callback) {
        // In a real application, you would parse `chunk` here
        // using a streaming JSON parser (e.g., stream-json)
        // and then call `processChunk` for each complete JSON object/element.
        processChunk(chunk.toString()); // For demonstration, just log the raw chunk
        callback(); // Call callback when processing is done for this chunk
      },
      final(callback) {
        console.log('Finished streaming and processing data.');
        callback();
      }
    });

    // Pipe the response body (ReadableStream) to our processor (WritableStream)
    await new Promise((resolve, reject) => {
      response.body
        .pipe(processorStream)
        .on('finish', resolve)
        .on('error', reject);
    });

  } catch (error) {
    console.error('Error streaming API response:', error);
    throw error;
  }
}

// Usage example:
// const LARGE_DATA_API_URL = 'https://api.example.com/v1/large-dataset'; // Replace with an actual large data endpoint

// (async () => {
//   console.log('Starting stream process...');
//   try {
//     await streamLargeApiResponse(LARGE_DATA_API_URL, (chunk) => {
//       // This function is called for each chunk of data
//       // In a real scenario, you'd feed this chunk to a streaming JSON parser
//       // to reconstruct JSON objects without loading the whole file.
//       // For this example, we'll just log a snippet of each chunk.
//       console.log(`Received chunk (length: ${chunk.length}): ${chunk.substring(0, 50)}...`);
//       // If you had 'stream-json', you'd do something like: parser.write(chunk);
//     });
//     console.log('Stream process completed.');
//   } catch (error) {
//     console.error('Failed to stream large API response:', error);
//   }
// })();

// Note: For actual JSON parsing from a stream, consider libraries like 'stream-json' (https://github.com/uhop/stream-json)
// Example with stream-json (conceptual, requires installation):
/*
const { parser } = require('stream-json');
const { streamArray } = require('stream-json/streamers/StreamArray');

async function streamAndParseLargeJson(apiUrl) {
  const response = await fetch(apiUrl);
  if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);

  return new Promise((resolve, reject) => {
    const jsonStream = response.body.pipe(parser()).pipe(streamArray());

    jsonStream.on('data', ({ key, value }) => {
      // Process each individual JSON object as it's parsed
      console.log(`Processing item ${key}:`, value);
    });

    jsonStream.on('end', () => {
      console.log('Finished parsing stream.');
      resolve();
    });

    jsonStream.on('error', reject);
  });
}
*/
How it works: This Node.js snippet demonstrates how to efficiently handle very large API responses by streaming the data instead of loading the entire payload into memory. It uses `node-fetch` to get a readable stream from the response body and pipes it to a custom writable stream. While this example primarily logs raw chunks, in a production environment, you would integrate a dedicated streaming JSON parser (e.g., `stream-json`) to process individual JSON objects as they arrive, significantly reducing memory consumption for large datasets.

Need help integrating this into your project?

Our team of expert developers can help you build your custom application from scratch.

Hire DigitalCodeLabs