Skip to content

Create progress/metadata for import/backup actions #576

@mStirner

Description

@mStirner

Currently there is no progress/information about how far the import/export is.
Which may lead for ungeduldige to press the buttons again.

A progress could be calculated, when the number of total files is injected as metadata into the gzip header as extra fields.
A quick & dirty implmeentation with ChatGPT:
router.api.system.backup.js:

const crypto = require("crypto");
const zlib = require("zlib");
const path = require("path");
const fs = require("fs");
const { Writable, pipeline, PassThrough } = require("stream");
const { createInterface } = require("readline");
const { EOL } = require("os");
const { ObjectId } = require("mongodb");

const { client } = require("mongodb");
const tar = require("tar-stream");


const BASE_PATH = path.join(process.cwd(), "./plugins");
const ALGORITHM = "aes-256-cbc";

const GzipHeaderModifier = require("../system/gzip/injector.js");
const GzipExtraFieldExtractor = require("../system/gzip/extractor.js");


module.exports = (router) => {

    router.post("/export", async (req, res) => {


        const pack = tar.pack();
        const pass = new PassThrough();
        const metadata = {
            files: 0,
            includes: req.query.includes
        };

        const subfieldId = Buffer.from("MI"); // z.B. 'MI' für "MyInfo"
        //const metaBuffer = Buffer.from(JSON.stringify(metadata));
        const injector = new GzipHeaderModifier(subfieldId, metadata);

        // NOTE: add error listener here?

        res.setHeader("content-type", "application/tar+gzip");
        res.setHeader("content-disposition", `attachment; filename="OpenHaus-${Date.now()}.tgz"`);

        if (req.query.encrypt == "true") {

            const key = crypto.randomBytes(32);
            const iv = crypto.randomBytes(16);

            res.setHeader("X-ENCRYPTION-KEY", key.toString("hex"));
            res.setHeader("X-ENCRYPTION-IV", iv.toString("hex"));

            const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
            pack.pipe(zlib.createGzip()).pipe(cipher).pipe(pass);

        } else {

            pack.pipe(zlib.createGzip()).pipe(pass);

        }


        if ((req.query?.includes?.includes("database") || (!req.query?.includes && true))) {
            for await (let collection of client.listCollections()) {

                metadata.files += 1;

                // TODO: check/handle binary (serialized buffer objects)
                // > endpoint commands payload
                // > _id's should be mongodb object id's
                let data = (await client.collection(collection.name).find().toArray());
                pack.entry({ name: `database/${collection.name}.json` }, JSON.stringify(data));

            }
        }

        if ((req.query?.includes?.includes("plugins") || (!req.query?.includes && true))) {
            fs.readdirSync(BASE_PATH, {
                recursive: true
            }).filter((entry) => {

                // TODO: ignore .gitkeep file
                return !fs.statSync(path.join(BASE_PATH, entry)).isDirectory();

            }).map((entry) => {

                return [entry, fs.readFileSync(path.join(BASE_PATH, entry), "utf8")];

            }).forEach(([file, content]) => {

                metadata.files += 1;
                pack.entry({ name: `plugins/${file}` }, content);

            });
        }


        if ((req.query?.includes?.includes("env") || (!req.query?.includes && true)) && fs.existsSync(path.join(process.cwd(), ".env"))) {

            // encode .env value as base64, so the are not human readable
            let content = fs.readFileSync(path.join(process.cwd(), ".env"), "utf8").split(EOL).map((line) => {

                if (req.query?.encode !== "true") {
                    return line;
                }

                let [key, value] = line.split("=");

                if (!value) {
                    return line;
                }

                return `${key}=${Buffer.from(value).toString("base64")}`;

            });

            metadata.files += 1;
            pack.entry({ name: `.env` }, content.join(EOL));

        }

        console.log("MEtadata", metadata);
        pack.finalize();

        res.writeHead(200);
        pass.pipe(injector).pipe(res);

    });


    router.post("/import", async (req, res) => {

        // set deafult resotre includes to "all"
        req.query.includes = req?.query?.includes || [
            "database",
            "plugins",
            "env"
        ];

        // NOTE: this also deletes .gitkeep
        if (req.query?.truncate === "true") {
            for (let file of await fs.promises.readdir(BASE_PATH)) {
                await fs.promises.rm(path.join(BASE_PATH, file), {
                    recursive: true,
                    force: true
                });
            }
        }

        const extract = tar.extract();

        // NOTE: switch to `.once`?
        extract.on("error", (err) => {
            console.log("Terrible error", err);
            //process.exit(1);
        });


        // NOTE: switch to `.once`?
        extract.on("finish", () => {
            console.log("tar-stream finished");

            /*
            res.json({
                success: true,
                message: "Restart to apply changes!"
            });
            */

        });


        const extractor = new GzipExtraFieldExtractor("MI");
        const gzip = zlib.createGunzip();

        let metadata = null;
        let counter = 0;


        extractor.once("no-extra-fields", () => {
            console.log("No extra header, pipe regardelss, no progreess calaiction available");
            pipeline(extractor, gzip, extract, (err) => {
                if (err) {

                    res.status(500).json({
                        error: err.message,
                        details: err,
                        success: false
                    });

                } else {

                    res.json({
                        success: true,
                        message: "Restart to apply changes!"
                    });

                }
            });
        });

        extractor.once("metadata", (data) => {

            console.log("Metadata received", JSON.parse(data));

            metadata = JSON.parse(data);
            //extractor.pipe(gzip).pipe(extract);

            pipeline(extractor, gzip, extract, (err) => {
                if (err) {

                    res.status(500).json({
                        error: err.message,
                        details: err,
                        success: false
                    });

                } else {

                    res.json({
                        success: true,
                        message: "Restart to apply changes!"
                    });

                }
            });

        });


        if (req.query.encrypt == "true") {

            const key = Buffer.from(req.headers["x-encryption-key"], "hex");
            const iv = Buffer.from(req.headers["x-encryption-iv"], "hex");
            const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);

            pipeline(req, decipher, extractor, (err) => {
                if (err) {

                    console.error("encrypted", err);

                }
            });

        } else {

            pipeline(req, extractor, (err) => {
                //pipeline(req, gzip, extract, (err) => {
                if (err) {

                    console.error("uncrypted", err);

                }
            });

        }


        extract.on("entry", async (header, stream, next) => {

            counter += 1;

            console.log(`File ${counter}/${metadata?.files || 0}`);

            if (header.name.startsWith("database/") && req.query?.includes?.includes("database")) {

                console.log("restartoe database collection", header.name, header.size);

                let chunks = [];
                let name = header.name.replace("database/", "");

                let writeable = new Writable({
                    write(chunk, enc, cb) {
                        chunks.push(chunk);
                        cb(null);
                    }
                });

                stream.pipe(writeable).on("close", async () => {

                    // TODO: check/handle binary (serialized buffer objects)
                    // > endpoint commands payload
                    // > _id's should be mongodb object id's                    
                    let documents = JSON.parse(Buffer.concat(chunks).toString()).map((item) => {
                        // NOTE: Hotfix for #506
                        item._id = new ObjectId(item._id);
                        return item;
                    });

                    // prevents bulk write error
                    // MongoInvalidArgumentError: Invalid BulkOperation, Batch cannot be empty
                    if (documents.length === 0) {
                        next();
                        return;
                    }

                    //console.log("collection name", path.basename(name, ".json"));

                    let collection = client.collection(path.basename(name, ".json"));

                    if (req.query?.truncate === "true") {
                        await collection.deleteMany({});
                    }

                    collection.insertMany(documents).catch((err) => {
                        if (err?.code === 11000 && req.query?.skipDuplicates === "true") {
                            next();
                        } else {
                            next(err);
                        }
                    }).then(() => {
                        next();
                    });

                });

            } else if (header.name.startsWith("plugins/") && req.query?.includes?.includes("plugins")) {

                console.log("restroe plugin file", header.name, header.size);

                let name = header.name.replace("plugins/", "");

                fs.mkdirSync(path.dirname(path.join(BASE_PATH, name)), {
                    recursive: true
                });

                stream.pipe(fs.createWriteStream(path.join(BASE_PATH, name))).once("error", (err) => {
                    next(err);
                }).once("close", () => {
                    next();
                });

            } else if (header.name === ".env" && req.query?.includes?.includes("env")) {

                let envPath = path.join(process.cwd(), ".env");
                let fd = null;

                try {
                    if (req.query?.truncate === "true") {
                        fs.truncateSync(envPath, 0);
                    }
                } catch (err) {
                    // ignore
                } finally {
                    fd = fs.openSync(envPath, "w");
                }

                let rl = createInterface({
                    input: stream
                });

                rl.once("error", (err) => {
                    fs.closeSync(fd);
                    next(err);
                });

                rl.once("close", () => {
                    fs.closeSync(fd);
                    next();
                });

                rl.on("line", (line) => {

                    let [key, value] = line.split("=");

                    if (!key || !value || req.query?.decode !== "true") {
                        return fs.writeSync(fd, line + EOL);
                    }

                    line = `${key}=${Buffer.from(value, "base64").toString()}`;
                    fs.writeSync(fd, line + EOL);

                });

            } else {

                //console.log("unknown file prefix/name", header);
                next();

            }
        });




    });

};

system/gzip/injector.js

const { Writable, pipeline, PassThrough, Transform } = require("stream");

module.exports = class GzipHeaderModifier extends Transform {
    constructor(subfieldIdBuffer, metadata) {
        super();
        this.subfieldIdBuffer = subfieldIdBuffer; // 2 Byte Buffer
        //this.metadataBuffer = metadataBuffer;
        this.metadata = metadata;
        this.headerModified = false;
        this.headerBuffer = Buffer.alloc(0);
    }

    _transform(chunk, encoding, callback) {
        if (!this.headerModified) {
            this.headerBuffer = Buffer.concat([this.headerBuffer, chunk]);

            if (this.headerBuffer.length >= 10) {
                if (this.headerBuffer[0] !== 0x1f || this.headerBuffer[1] !== 0x8b) {
                    return callback(new Error("Kein gültiger Gzip-Stream"));
                }

                const FLG = this.headerBuffer[3];
                this.headerBuffer[3] = FLG | 0x04;

                let metadataBuffer = Buffer.from(JSON.stringify(this.metadata));

                // Subfield: 2 Byte ID + 2 Byte Länge + Daten
                const subfieldLength = metadataBuffer.length;
                const subfield = Buffer.concat([
                    this.subfieldIdBuffer,                                 // 2 Byte ID
                    Buffer.from([subfieldLength & 0xff, subfieldLength >> 8]), // 2 Byte Länge (LE)
                    metadataBuffer                                      // N Byte Daten
                ]);

                // Extra-Field Länge (nur dieser Subfield, hier)
                const xlen = subfield.length;
                const extraField = Buffer.concat([
                    Buffer.from([xlen & 0xff, xlen >> 8]), // 2 Byte XLEN (LE)
                    subfield
                ]);

                const newHeader = Buffer.concat([
                    this.headerBuffer.slice(0, 10),
                    extraField,
                    this.headerBuffer.slice(10)
                ]);

                this.push(newHeader);
                this.headerModified = true;
                callback();
            } else {
                callback();
            }
        } else {
            this.push(chunk);
            callback();
        }
    }
}

system/gzip/extractor.js

const { Transform } = require("stream");

module.exports = class GzipExtraFieldExtractor extends Transform {
    constructor(targetSubfieldId) {
        super();
        this.targetSubfieldId = targetSubfieldId; // z.B. 'MI'
        this.headerBuffer = Buffer.alloc(0);
        this.headerParsed = false;
        this.metadata = null;
    }

    _transform(chunk, encoding, callback) {
        if (!this.headerParsed) {
            this.headerBuffer = Buffer.concat([this.headerBuffer, chunk]);

            // Mindestens bis zum FLG-Byte vorhanden?
            if (this.headerBuffer.length >= 10) {
                if (this.headerBuffer[0] !== 0x1f || this.headerBuffer[1] !== 0x8b) {
                    return callback(new Error("Kein gültiger Gzip-Stream"));
                }

                const FLG = this.headerBuffer[3];
                const hasExtra = (FLG & 0x04) !== 0;
                let offset = 10;

                if (hasExtra) {
                    if (this.headerBuffer.length >= offset + 2) {
                        const xlen = this.headerBuffer.readUInt16LE(offset);
                        offset += 2;

                        if (this.headerBuffer.length >= offset + xlen) {
                            const extraFieldData = this.headerBuffer.slice(offset, offset + xlen);

                            // Extra-Field parsen
                            let subOffset = 0;
                            while (subOffset + 4 <= extraFieldData.length) {
                                const subfieldId = extraFieldData.slice(subOffset, subOffset + 2).toString("ascii");
                                const subfieldLen = extraFieldData.readUInt16LE(subOffset + 2);

                                if (subOffset + 4 + subfieldLen > extraFieldData.length) {
                                    break; // Ungültige Länge
                                }

                                if (subfieldId === this.targetSubfieldId) {
                                    this.metadata = extraFieldData.slice(subOffset + 4, subOffset + 4 + subfieldLen);
                                    this.emit("metadata", this.metadata);
                                    break;
                                }

                                subOffset += 4 + subfieldLen;
                            }

                            // Alles da, Header als gelesen markieren
                            this.headerParsed = true;
                            this.push(this.headerBuffer);
                            this.headerBuffer = null;
                            return callback();
                        }
                    }
                } else {
                    // Kein ExtraField — Event auslösen
                    this.emit("no-extra-fields");
                    this.headerParsed = true;
                    this.push(this.headerBuffer);
                    this.headerBuffer = null;
                    return callback();
                }
            }

            // Noch nicht genug Header-Daten gesammelt
            callback();
        } else {
            // Restlichen Stream direkt durchreichen
            this.push(chunk);
            callback();
        }
    }
}

Extractor/injector could also be used for plugins.

Metadata

Metadata

Assignees

No one assigned

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions