I'm currently building an application that takes in a user upload and then I want to encrypt it then save it to mongodb using the grid-fs library. I'm using multer to receive the file and them I'm trying to encrypt it then save it to the mongodb using the gridfs-stream library. Can someone take a look at this and help me out? I'm not sure how to work with streams in nodejs.
import { createCipher } from "crypto";
import { AES_PASSWORD } from "../config";
import stream from "stream";
import GridFs from "../models/GridFs";
const cipher = createCipher("aes-256-cbc", AES_PASSWORD);
const maxFileSize = 1024 * 1024 * 10; // 10MB
const fileChunkSize = 2 * 255 * 1024; // 510kb
const uploadController = (req,res,next) => {
// User information
const user_id = req.user._id;
// File meta data from the file infromation
const {
mimetype,
buffer,
size
} = req.file;
const content_type = mimetype;
// File information provided by Resumable.js
const {
resumableChunkNumber,
resumableChunkSize,
resumableCurrentChunkSize,
resumableTotalSize,
resumableType,
resumableIdentifier,
resumableFilename,
resumableRelativePath,
resumableTotalChunks
} = req.body;
/*
* Developer's note:
* Somehow the data being posted automatically by resumable.js
* is of type string. Thus we have to parse the string here
* to prevent errors when doing any comparisons
*/
const data = buffer;
const chunk_n = parseInt(resumableChunkNumber) - 1;
const chunk_total = parseInt(resumableTotalChunks);
const chunk_size = parseInt(resumableChunkSize);
const file_length = parseInt(resumableTotalSize);
const uid = resumableIdentifier;
const file_name = resumableFilename;
// Verify Chunk size as configured
if(chunk_size != fileChunkSize){
return res.status(400).json({
error: "chunk_size is not equal to configured chunk size as the backend"
});
}
// Ensure file size is not larger than max allowable at the moment
if( file_length && file_length > maxFileSize) {
return res.status(400).json({
error: "file length is greater than configured maxFileSize"
});
}
// Basic logic in ensuring the max number of chunks is correct when
// there is overlap
let expectedNumberOfChunks = file_length / chunk_size;
if(file_length % chunk_size){
expectedNumberOfChunks += 1;
}
if(chunk_n > expectedNumberOfChunks){
return res.status(400).json({
error: "chunk_n is greater than the number of expected chunks"
});
}
const writeStream = GridFs.createWriteStream({
content_type,
chunk_size,
chunk_n,
uid,
file_name,
file_length,
user_id
});
// All is good Start encrytion
const bufferStream = new stream.PassThrough();
try{
bufferStream.write(buffer);
bufferStream.pipe(cipher).pipe(writeStream).on("close", () => {
buffer.end();
return res.status(200).send("ok");
});
} catch(error){
console.log(error);
return res.status(500).json({ error });
}
};
export default uploadController;
Nothing is happening and the request just times out. I'm lost for words at the moment.
via Nate
No comments:
Post a Comment