Bug 1902020 - Fix a math error in calculating binary blob content length when it divides evenly by chunk size. r=backup-reviewers,fchasen
Differential Revision: https://phabricator.services.mozilla.com/D213481
This commit is contained in:
@@ -100,6 +100,8 @@ class ArchiveWorker {
|
|||||||
* The path on the file system where the compressed backup file is located.
|
* The path on the file system where the compressed backup file is located.
|
||||||
* @param {EncryptionArgs} [params.encryptionArgs=undefined]
|
* @param {EncryptionArgs} [params.encryptionArgs=undefined]
|
||||||
* Optional EncryptionArgs, which will be used to encrypt this archive.
|
* Optional EncryptionArgs, which will be used to encrypt this archive.
|
||||||
|
* @param {number} params.chunkSize
|
||||||
|
* The size of the chunks to break the byte stream into for encoding.
|
||||||
* @returns {Promise<undefined>}
|
* @returns {Promise<undefined>}
|
||||||
*/
|
*/
|
||||||
async constructArchive({
|
async constructArchive({
|
||||||
@@ -108,6 +110,7 @@ class ArchiveWorker {
|
|||||||
backupMetadata,
|
backupMetadata,
|
||||||
compressedBackupSnapshotPath,
|
compressedBackupSnapshotPath,
|
||||||
encryptionArgs,
|
encryptionArgs,
|
||||||
|
chunkSize,
|
||||||
}) {
|
}) {
|
||||||
let encryptor = null;
|
let encryptor = null;
|
||||||
if (encryptionArgs) {
|
if (encryptionArgs) {
|
||||||
@@ -177,30 +180,30 @@ ${JSON.stringify(jsonBlock)}
|
|||||||
|
|
||||||
// To calculate the Content-Length of the base64 block, we start by
|
// To calculate the Content-Length of the base64 block, we start by
|
||||||
// computing how many newlines we'll be adding...
|
// computing how many newlines we'll be adding...
|
||||||
let totalNewlines = Math.ceil(
|
let totalNewlines = Math.ceil(totalBytesToRead / chunkSize);
|
||||||
totalBytesToRead / ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE
|
|
||||||
);
|
|
||||||
|
|
||||||
// Next, we determine how many full-sized chunks of
|
// Next, we determine how many full-sized chunks of chunkSize we'll be
|
||||||
// ARCHIVE_CHUNK_MAX_BYTES_SIZE we'll be using, and multiply that by the
|
// using, and multiply that by the number of base64 bytes that such a chunk
|
||||||
// number of base64 bytes that such a chunk will require.
|
// will require.
|
||||||
let fullSizeChunks = totalNewlines - 1;
|
let fullSizeChunks = totalNewlines - 1;
|
||||||
let fullSizeChunkBase64Bytes = this.#computeChunkBase64Bytes(
|
let fullSizeChunkBase64Bytes = this.#computeChunkBase64Bytes(
|
||||||
ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE,
|
chunkSize,
|
||||||
!!encryptor
|
!!encryptor
|
||||||
);
|
);
|
||||||
let totalBase64Bytes = fullSizeChunks * fullSizeChunkBase64Bytes;
|
let totalBase64Bytes = fullSizeChunks * fullSizeChunkBase64Bytes;
|
||||||
|
|
||||||
// Finally, if there are any leftover bytes that are less than
|
// Finally, if there are any leftover bytes that are less than chunkSize,
|
||||||
// ARCHIVE_CHUNK_MAX_BYTES_SIZE, determine how many bytes those will
|
// determine how many bytes those will require, and add it to our total.
|
||||||
// require, and add it to our total.
|
let leftoverChunkBytes = totalBytesToRead % chunkSize;
|
||||||
let leftoverChunkBytes =
|
|
||||||
totalBytesToRead % ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE;
|
|
||||||
if (leftoverChunkBytes) {
|
if (leftoverChunkBytes) {
|
||||||
totalBase64Bytes += this.#computeChunkBase64Bytes(
|
totalBase64Bytes += this.#computeChunkBase64Bytes(
|
||||||
leftoverChunkBytes,
|
leftoverChunkBytes,
|
||||||
!!encryptor
|
!!encryptor
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
// We divided perfectly by chunkSize, so add another
|
||||||
|
// fullSizeChunkBase64Bytes to the total.
|
||||||
|
totalBase64Bytes += fullSizeChunkBase64Bytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
await IOUtils.writeUTF8(
|
await IOUtils.writeUTF8(
|
||||||
@@ -220,10 +223,7 @@ Content-Length: ${totalBase64Bytes}
|
|||||||
// encryption will be done.
|
// encryption will be done.
|
||||||
let currentIndex = 0;
|
let currentIndex = 0;
|
||||||
while (currentIndex < totalBytesToRead) {
|
while (currentIndex < totalBytesToRead) {
|
||||||
let bytesToRead = Math.min(
|
let bytesToRead = Math.min(chunkSize, totalBytesToRead - currentIndex);
|
||||||
ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE,
|
|
||||||
totalBytesToRead - currentIndex
|
|
||||||
);
|
|
||||||
if (bytesToRead <= 0) {
|
if (bytesToRead <= 0) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Failed to calculate the right number of bytes to read."
|
"Failed to calculate the right number of bytes to read."
|
||||||
@@ -236,8 +236,7 @@ Content-Length: ${totalBase64Bytes}
|
|||||||
let bytesToWrite;
|
let bytesToWrite;
|
||||||
|
|
||||||
if (encryptor) {
|
if (encryptor) {
|
||||||
let isLastChunk =
|
let isLastChunk = bytesToRead < chunkSize;
|
||||||
bytesToRead < ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE;
|
|
||||||
bytesToWrite = await encryptor.encrypt(buffer, isLastChunk);
|
bytesToWrite = await encryptor.encrypt(buffer, isLastChunk);
|
||||||
} else {
|
} else {
|
||||||
bytesToWrite = buffer;
|
bytesToWrite = buffer;
|
||||||
|
|||||||
@@ -1023,19 +1023,27 @@ export class BackupService extends EventTarget {
|
|||||||
* @param {object} backupMetadata
|
* @param {object} backupMetadata
|
||||||
* The metadata for the backup, which is also stored in the backup manifest
|
* The metadata for the backup, which is also stored in the backup manifest
|
||||||
* of the compressed backup snapshot.
|
* of the compressed backup snapshot.
|
||||||
|
* @param {object} options
|
||||||
|
* Options to pass to the worker, mainly for testing.
|
||||||
|
* @param {object} [options.chunkSize=ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE]
|
||||||
|
* The chunk size to break the bytes into.
|
||||||
*/
|
*/
|
||||||
async createArchive(
|
async createArchive(
|
||||||
archivePath,
|
archivePath,
|
||||||
templateURI,
|
templateURI,
|
||||||
compressedBackupSnapshotPath,
|
compressedBackupSnapshotPath,
|
||||||
encState,
|
encState,
|
||||||
backupMetadata
|
backupMetadata,
|
||||||
|
options = {}
|
||||||
) {
|
) {
|
||||||
let worker = new lazy.BasePromiseWorker(
|
let worker = new lazy.BasePromiseWorker(
|
||||||
"resource:///modules/backup/Archive.worker.mjs",
|
"resource:///modules/backup/Archive.worker.mjs",
|
||||||
{ type: "module" }
|
{ type: "module" }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let chunkSize =
|
||||||
|
options.chunkSize || lazy.ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let encryptionArgs = encState
|
let encryptionArgs = encState
|
||||||
? {
|
? {
|
||||||
@@ -1054,6 +1062,7 @@ export class BackupService extends EventTarget {
|
|||||||
backupMetadata,
|
backupMetadata,
|
||||||
compressedBackupSnapshotPath,
|
compressedBackupSnapshotPath,
|
||||||
encryptionArgs,
|
encryptionArgs,
|
||||||
|
chunkSize,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
} finally {
|
} finally {
|
||||||
|
|||||||
@@ -164,3 +164,67 @@ add_task(async function test_createArchive_encrypted() {
|
|||||||
await IOUtils.remove(FAKE_ARCHIVE_PATH);
|
await IOUtils.remove(FAKE_ARCHIVE_PATH);
|
||||||
await IOUtils.remove(EXTRACTION_PATH);
|
await IOUtils.remove(EXTRACTION_PATH);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests that an archive can be created where the bytes of the archive are
|
||||||
|
* a multiple of 6, but the individual chunks of those bytes are not a multiple
|
||||||
|
* of 6 (which will necessitate base64 padding).
|
||||||
|
*/
|
||||||
|
add_task(async function test_createArchive_multiple_of_six_test() {
|
||||||
|
let bs = new BackupService();
|
||||||
|
|
||||||
|
const FAKE_ARCHIVE_PATH = PathUtils.join(
|
||||||
|
testProfilePath,
|
||||||
|
"fake-unencrypted-archive.html"
|
||||||
|
);
|
||||||
|
const FAKE_COMPRESSED_FILE = PathUtils.join(
|
||||||
|
testProfilePath,
|
||||||
|
"fake-compressed-staging.zip"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Instead of generating a gigantic chunk of data to test this particular
|
||||||
|
// case, we'll override the default chunk size. We'll choose a chunk size of
|
||||||
|
// 500 bytes, which doesn't divide evenly by 6 - but we'll encode a set of
|
||||||
|
// 6 * 500 bytes, which will naturally divide evenly by 6.
|
||||||
|
const NOT_MULTIPLE_OF_SIX_OVERRIDE_CHUNK_SIZE = 500;
|
||||||
|
const MULTIPLE_OF_SIX_SIZE_IN_BYTES = 6 * 500;
|
||||||
|
let multipleOfSixBytes = new Uint8Array(MULTIPLE_OF_SIX_SIZE_IN_BYTES);
|
||||||
|
|
||||||
|
// seededRandomNumberGenerator is defined in head.js, but eslint doesn't seem
|
||||||
|
// happy about it. Maybe that's because it's a generator function.
|
||||||
|
// eslint-disable-next-line no-undef
|
||||||
|
let gen = seededRandomNumberGenerator();
|
||||||
|
for (let i = 0; i < MULTIPLE_OF_SIX_SIZE_IN_BYTES; ++i) {
|
||||||
|
multipleOfSixBytes.set(gen.next().value, i);
|
||||||
|
}
|
||||||
|
|
||||||
|
await IOUtils.write(FAKE_COMPRESSED_FILE, multipleOfSixBytes);
|
||||||
|
|
||||||
|
await bs.createArchive(
|
||||||
|
FAKE_ARCHIVE_PATH,
|
||||||
|
archiveTemplateURI,
|
||||||
|
FAKE_COMPRESSED_FILE,
|
||||||
|
null /* no ArchiveEncryptionState */,
|
||||||
|
FAKE_METADATA,
|
||||||
|
{
|
||||||
|
chunkSize: NOT_MULTIPLE_OF_SIX_OVERRIDE_CHUNK_SIZE,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const EXTRACTION_PATH = PathUtils.join(testProfilePath, "extraction.bin");
|
||||||
|
await bs.extractCompressedSnapshotFromArchive(
|
||||||
|
FAKE_ARCHIVE_PATH,
|
||||||
|
EXTRACTION_PATH
|
||||||
|
);
|
||||||
|
|
||||||
|
let writtenBytes = await IOUtils.read(EXTRACTION_PATH);
|
||||||
|
assertUint8ArraysSimilarity(
|
||||||
|
writtenBytes,
|
||||||
|
multipleOfSixBytes,
|
||||||
|
true /* expectSimilar */
|
||||||
|
);
|
||||||
|
|
||||||
|
await IOUtils.remove(FAKE_COMPRESSED_FILE);
|
||||||
|
await IOUtils.remove(FAKE_ARCHIVE_PATH);
|
||||||
|
await IOUtils.remove(EXTRACTION_PATH);
|
||||||
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user