Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Allow large Parse File uploads #9286

Open
wants to merge 21 commits into
base: alpha
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 94 additions & 0 deletions spec/FilesRouter.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
const fs = require('fs');
const path = require('path');

describe('FilesRouter', () => {
describe('File Uploads', () => {
const V8_STRING_LIMIT_BYTES = 536_870_912;

let server;

beforeAll(async () => {
server = await reconfigureServer({
maxUploadSize: '1GB',
port: 8384,
});
});

afterAll(async () => {
// clean up the server for resuse
if (server && server.close) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wouldn't it be enough to just call await reconfigureServer() to init the server with the default config?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Your right. Missed that, new to testing here.

await new Promise((resolve, reject) => {
server.close(err => {
if (err) return reject(err);
resolve();
});
});
}
});

/**
* Quick helper function to upload the file to the server via the REST API
* We do this because creating a Parse.File object with a file over 512MB
* will try to use the Web API FileReader API, which will fail the test
*
* @param {string} fileName the name of the file
* @param {string} filePath the path to the file locally
* @returns
*/
const postFile = async (fileName, filePath) => {
const url = `${Parse.serverURL}/files/${fileName}`;
const headers = {
'X-Parse-Application-Id': Parse.applicationId,
'X-Parse-Master-Key': Parse.masterKey,
'Content-Type': 'multipart/form-data',
};

// Create a FormData object to send the file
const formData = new FormData();
formData.append('file', fs.createReadStream(filePath));

// Send the request
const response = await fetch(url, {
method: 'POST',
headers,
body: formData,
});

return response;
};


it('should allow Parse.File uploads under 512MB', async done => {
const filePath = path.join(__dirname, 'file.txt');
fs.writeFileSync(filePath, Buffer.alloc(1024 * 1024));

const response = await postFile('file.txt', filePath);
expect(response.ok).toBe(true);

fs.unlinkSync(filePath);
done();
});

it('should allow Parse.File uploads exactly 512MB', async done => {
const filePath = path.join(__dirname, 'file.txt');
fs.writeFileSync(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES));

const response = await postFile('file.txt', filePath);
expect(response.ok).toBe(true);

fs.unlinkSync(filePath);
done();
});

it('should allow Parse.File uploads over 512MB', async done => {
const filePath = path.join(__dirname, 'file.txt');
fs.writeFileSync(filePath, Buffer.alloc(V8_STRING_LIMIT_BYTES + 50 * 1024 * 1024));

const response = await postFile('file.txt', filePath);
expect(response.ok).toBe(true);

fs.unlinkSync(filePath);
done();
});
});
});
2 changes: 1 addition & 1 deletion src/Adapters/Files/FilesAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export class FilesAdapter {
/** Responsible for storing the file in order to be retrieved later by its filename
*
* @param {string} filename - the filename to save
* @param {*} data - the buffer of data from the file
* @param {*} data - the repersentation of data from the file as buffer or a blob
mtrezza marked this conversation as resolved.
Show resolved Hide resolved
* @param {string} contentType - the supposed contentType
* @discussion the contentType can be undefined if the controller was not able to determine it
* @param {object} options - (Optional) options to be passed to file adapter (S3 File Adapter Only)
Expand Down
55 changes: 44 additions & 11 deletions src/Adapters/Files/GridFSBucketAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,24 +68,57 @@ export class GridFSBucketAdapter extends FilesAdapter {
const stream = await bucket.openUploadStream(filename, {
metadata: options.metadata,
});
if (this._encryptionKey !== null) {

// when working with a Blob, it could be over the max size of a buffer, so we need to stream it
if (typeof Blob !== 'undefined' && data instanceof Blob) {
const reader = data.stream().getReader();
const iv = crypto.randomBytes(16);
const cipher = this._encryptionKey !== null ? crypto.createCipheriv(this._algorithm, this._encryptionKey, iv) : null;

const processChunk = async ({ done, value }) => {
if (done) {
if (cipher) {
const finalChunk = Buffer.concat([cipher.final(), iv, cipher.getAuthTag()]);
await stream.write(finalChunk);
}
stream.end();
return;
}

if (cipher) {
value = cipher.update(value);
}

await stream.write(value);
reader.read().then(processChunk);
};
try {
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv);
const encryptedResult = Buffer.concat([
cipher.update(data),
cipher.final(),
iv,
cipher.getAuthTag(),
]);
await stream.write(encryptedResult);
reader.read().then(processChunk);
} catch (err) {
return new Promise((resolve, reject) => {
return reject(err);
});
}
} else {
await stream.write(data);
if (this._encryptionKey !== null) {
try {
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(this._algorithm, this._encryptionKey, iv);
const encryptedResult = Buffer.concat([
cipher.update(data),
cipher.final(),
iv,
cipher.getAuthTag(),
]);
await stream.write(encryptedResult);
} catch (err) {
return new Promise((resolve, reject) => {
return reject(err);
});
}
} else {
await stream.write(data);
}
}
stream.end();
return new Promise((resolve, reject) => {
Expand Down
34 changes: 29 additions & 5 deletions src/Routers/FilesRouter.js
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,22 @@ export class FilesRouter {
}
}

const base64 = req.body.toString('base64');
const file = new Parse.File(filename, { base64 }, contentType);
// If the request body is a buffer and it's size is greater than the V8 string size limit
// we need to use a Blob to avoid the V8 string size limit
const MAX_V8_STRING_SIZE_BYTES = 536_870_912;

let file;

if (
typeof Blob !== 'undefined' &&
Buffer.isBuffer(req.body) &&
req.body?.length >= MAX_V8_STRING_SIZE_BYTES
) {
file = new Parse.File(filename, new Blob([req.body]), contentType);
} else {
file = new Parse.File(filename, { base64: req.body.toString('base64') }, contentType);
}

const { metadata = {}, tags = {} } = req.fileData || {};
try {
// Scan request data for denied keywords
Expand Down Expand Up @@ -213,8 +227,18 @@ export class FilesRouter {
// if the ParseFile returned is type uri, download the file before saving it
await addFileDataIfNeeded(fileObject.file);
// update fileSize
const bufferData = Buffer.from(fileObject.file._data, 'base64');
fileObject.fileSize = Buffer.byteLength(bufferData);
let fileData;
// if the file is a blob, get the size from the blob
if (typeof Blob !== 'undefined' && fileObject.file._source?.file instanceof Blob) {
// get the size of the blob
fileObject.fileSize = fileObject.file._source.file.size;
// set the file data
fileData = fileObject.file._source?.file;
} else {
const bufferData = Buffer.from(fileObject.file._data, 'base64');
fileObject.fileSize = Buffer.byteLength(bufferData);
fileData = bufferData;
}
// prepare file options
const fileOptions = {
metadata: fileObject.file._metadata,
Expand All @@ -228,7 +252,7 @@ export class FilesRouter {
const createFileResult = await filesController.createFile(
config,
fileObject.file._name,
bufferData,
fileData,
fileObject.file._source.type,
fileOptions
);
Expand Down
Loading