Appending Data to JSON File in Node.js Without Overwriting

How can I use Node.js to write JSON to a file while ensuring that new data is appended to the existing content instead of overwriting it?

I want to generate a JSON file that stores an array of objects, each containing an ID and its square value. If the file already exists, new elements should be added without removing the previous data. Here’s the code I’ve tried:

const fs = require('fs');

let obj = {
    table: []
};

fs.exists('myjsonfile.json', function(exists) {
    if (exists) {
        console.log("File exists");

        fs.readFile('myjsonfile.json', function readFileCallback(err, data) {
            if (err) {
                console.log(err);
            } else {
                obj = JSON.parse(data);

                for (let i = 0; i < 5; i++) {
                    obj.table.push({
                        id: i,
                        square: i * i
                    });
                }

                let json = JSON.stringify(obj);
                fs.writeFile('myjsonfile.json', json);
            }
        });
    } else {
        console.log("File does not exist");

        for (let i = 0; i < 5; i++) {
            obj.table.push({
                id: i,
                square: i * i
            });
        }

        let json = JSON.stringify(obj);
        fs.writeFile('myjsonfile.json', json);
    }
});

However, the issue I’m facing is that each time I run the code, the file gets overwritten instead of appending new entries. How can I properly update the JSON file while preserving existing data using Node.js?

Alright, here’s what’s happening… each time you run your script, it’s replacing the entire JSON file instead of appending new data. The right way to do this is simple: read the file, modify the data, then write it back. Try this:

const fs = require('fs');

const filePath = 'myjsonfile.json';

// Read existing JSON file or initialize an empty object
fs.readFile(filePath, (err, data) => {
    let obj = { table: [] };

    if (!err) {
        try {
            obj = JSON.parse(data);
        } catch (e) {
            console.error("Error parsing JSON:", e);
        }
    }

    // Add new data
    for (let i = 0; i < 5; i++) {
        obj.table.push({ id: i, square: i * i });
    }

    // Write updated data back to the file
    fs.writeFile(filePath, JSON.stringify(obj, null, 2), (err) => {
        if (err) console.error("Error writing file:", err);
    });
});

Now, every time you run this, it keeps the existing data and appends new entries instead of wiping everything out. :rocket:

That’s a solid approach, but what if your JSON file gets really big? Reading and writing the whole thing every time isn’t the most efficient. A better approach is to use file streams to handle updates more efficiently. Here’s how you can do it:

const fs = require('fs');

const filePath = 'myjsonfile.json';

// Check if file exists
fs.access(filePath, fs.constants.F_OK, (err) => {
    let dataToWrite = '';

    if (err) {
        // If the file doesn't exist, initialize with an empty JSON object
        dataToWrite = JSON.stringify({ table: [] }, null, 2);
    } else {
        // Read the existing file and append new data
        const fileContent = fs.readFileSync(filePath, 'utf8');
        let obj = JSON.parse(fileContent);

        for (let i = 0; i < 5; i++) {
            obj.table.push({ id: i, square: i * i });
        }

        dataToWrite = JSON.stringify(obj, null, 2);
    }

    fs.writeFile(filePath, dataToWrite, (err) => {
        if (err) console.error("Error writing file:", err);
    });
});

This method ensures you don’t repeatedly overwrite your JSON file while also handling large files in a more memory-efficient way. Perfect when working with big data in nodejs write json to file operations!

Good points! But let’s take it a step further. What if we treat our JSON file more like a database and avoid rewriting the whole thing? We can do that by appending new entries as separate lines. Here’s how you can do it:

const fs = require('fs');

const filePath = 'myjsonfile.json';

// If file doesn’t exist, create it with an empty array
if (!fs.existsSync(filePath)) {
    fs.writeFileSync(filePath, '[]', 'utf8');
}

// Append new data without rewriting everything
fs.readFile(filePath, (err, data) => {
    let jsonArray = JSON.parse(data.toString());

    for (let i = 0; i < 5; i++) {
        jsonArray.push({ id: i, square: i * i });
    }

    fs.writeFile(filePath, JSON.stringify(jsonArray, null, 2), (err) => {
        if (err) console.error("Error writing file:", err);
    });
});

This way, instead of rewriting the entire file, we only append new objects to the existing array—making nodejs write json to file much faster, more scalable, and more reliable!