Bug 1903127 - Move the single-file archive into the configured destination folder. r=backup-reviewers,fluent-reviewers,sthompson,flod,kpatenio
Differential Revision: https://phabricator.services.mozilla.com/D214451
This commit is contained in:
@@ -52,6 +52,7 @@ ChromeUtils.defineESModuleGetters(lazy, {
|
||||
ArchiveUtils: "resource:///modules/backup/ArchiveUtils.sys.mjs",
|
||||
BasePromiseWorker: "resource://gre/modules/PromiseWorker.sys.mjs",
|
||||
ClientID: "resource://gre/modules/ClientID.sys.mjs",
|
||||
DownloadPaths: "resource://gre/modules/DownloadPaths.sys.mjs",
|
||||
FileUtils: "resource://gre/modules/FileUtils.sys.mjs",
|
||||
JsonSchema: "resource://gre/modules/JsonSchema.sys.mjs",
|
||||
NetUtil: "resource://gre/modules/NetUtil.sys.mjs",
|
||||
@@ -516,6 +517,13 @@ export class BackupService extends EventTarget {
|
||||
*/
|
||||
static #backupFolderName = null;
|
||||
|
||||
/**
|
||||
* The name of the backup archive file. Should be localized.
|
||||
*
|
||||
* @see BACKUP_FILE_NAME
|
||||
*/
|
||||
static #backupFileName = null;
|
||||
|
||||
/**
|
||||
* Set to true if a backup is currently in progress. Causes stateUpdate()
|
||||
* to be called.
|
||||
@@ -619,12 +627,28 @@ export class BackupService extends EventTarget {
|
||||
*/
|
||||
static get BACKUP_DIR_NAME() {
|
||||
if (!BackupService.#backupFolderName) {
|
||||
BackupService.#backupFolderName =
|
||||
lazy.gFluentStrings.formatValueSync("backup-folder-name");
|
||||
BackupService.#backupFolderName = lazy.DownloadPaths.sanitize(
|
||||
lazy.gFluentStrings.formatValueSync("backup-folder-name")
|
||||
);
|
||||
}
|
||||
return BackupService.#backupFolderName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The localized name for the user's backup archive file. This will have
|
||||
* `.html` appended to it before writing the archive file.
|
||||
*
|
||||
* @returns {string} The localized backup file name
|
||||
*/
|
||||
static get BACKUP_FILE_NAME() {
|
||||
if (!BackupService.#backupFileName) {
|
||||
BackupService.#backupFileName = lazy.DownloadPaths.sanitize(
|
||||
lazy.gFluentStrings.formatValueSync("backup-file-name")
|
||||
);
|
||||
}
|
||||
return BackupService.#backupFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the folder within the profile folder where this service reads
|
||||
* and writes state to.
|
||||
@@ -854,6 +878,81 @@ export class BackupService extends EventTarget {
|
||||
return Object.freeze(structuredClone(this.#_state));
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to find the right folder to write the single-file archive to, and
|
||||
* if it does not exist, to create it.
|
||||
*
|
||||
* If the configured destination's parent folder does not exist and cannot
|
||||
* be recreated, we will fall back to the `defaultParentDirPath`. If
|
||||
* `defaultParentDirPath` happens to not exist or cannot be created, we will
|
||||
* fall back to the home directory. If _that_ folder does not exist and cannot
|
||||
* be recreated, this method will reject.
|
||||
*
|
||||
* @param {string} configuredDestFolderPath
|
||||
* The currently configured destination folder for the archive.
|
||||
* @returns {Promise<string, Error>}
|
||||
*/
|
||||
async resolveArchiveDestFolderPath(configuredDestFolderPath) {
|
||||
lazy.logConsole.log(
|
||||
"Resolving configured archive destination folder: ",
|
||||
configuredDestFolderPath
|
||||
);
|
||||
|
||||
// Try to create the configured folder ancestry. If that fails, we clear
|
||||
// configuredDestFolderPath so that we can try the fallback paths, as
|
||||
// if the folder was never set.
|
||||
try {
|
||||
await IOUtils.makeDirectory(configuredDestFolderPath, {
|
||||
createAncestors: true,
|
||||
ignoreExisting: true,
|
||||
});
|
||||
return configuredDestFolderPath;
|
||||
} catch (e) {
|
||||
lazy.logConsole.warn("Could not create configured destination path: ", e);
|
||||
}
|
||||
|
||||
lazy.logConsole.warn(
|
||||
"The destination directory was invalid. Attempting to fall back to " +
|
||||
"default parent folder: ",
|
||||
BackupService.DEFAULT_PARENT_DIR_PATH
|
||||
);
|
||||
let fallbackFolderPath = PathUtils.join(
|
||||
BackupService.DEFAULT_PARENT_DIR_PATH,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
try {
|
||||
await IOUtils.makeDirectory(fallbackFolderPath, {
|
||||
createAncestors: true,
|
||||
ignoreExisting: true,
|
||||
});
|
||||
return fallbackFolderPath;
|
||||
} catch (e) {
|
||||
lazy.logConsole.warn("Could not create fallback destination path: ", e);
|
||||
}
|
||||
|
||||
let homeDirPath = PathUtils.join(
|
||||
Services.dirsvc.get("Home", Ci.nsIFile).path,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
lazy.logConsole.warn(
|
||||
"The destination directory was invalid. Attempting to fall back to " +
|
||||
"Home folder: ",
|
||||
homeDirPath
|
||||
);
|
||||
try {
|
||||
await IOUtils.makeDirectory(homeDirPath, {
|
||||
createAncestors: true,
|
||||
ignoreExisting: true,
|
||||
});
|
||||
return homeDirPath;
|
||||
} catch (e) {
|
||||
lazy.logConsole.warn("Could not create Home destination path: ", e);
|
||||
throw new Error(
|
||||
"Could not resolve to a writable destination folder path."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} CreateBackupResult
|
||||
* @property {string} stagingPath
|
||||
@@ -887,6 +986,14 @@ export class BackupService extends EventTarget {
|
||||
|
||||
try {
|
||||
lazy.logConsole.debug(`Creating backup for profile at ${profilePath}`);
|
||||
|
||||
let archiveDestFolderPath = await this.resolveArchiveDestFolderPath(
|
||||
lazy.backupDirPref
|
||||
);
|
||||
lazy.logConsole.debug(
|
||||
`Destination for archive: ${archiveDestFolderPath}`
|
||||
);
|
||||
|
||||
let manifest = await this.#createBackupManifest();
|
||||
|
||||
// First, check to see if a `backups` directory already exists in the
|
||||
@@ -1023,18 +1130,24 @@ export class BackupService extends EventTarget {
|
||||
);
|
||||
|
||||
// Now create the single-file archive. For now, we'll stash this in the
|
||||
// backups folder while we test this. It'll eventually get moved to the
|
||||
// user's configured backup path once that part is built out.
|
||||
let archivePath = PathUtils.join(backupDirPath, "archive.html");
|
||||
lazy.logConsole.log("Exporting single-file archive to ", archivePath);
|
||||
// backups folder while it gets written. Once that's done, we'll attempt
|
||||
// to move it to the user's configured backup path.
|
||||
let archiveTmpPath = PathUtils.join(backupDirPath, "archive.html");
|
||||
lazy.logConsole.log("Exporting single-file archive to ", archiveTmpPath);
|
||||
await this.createArchive(
|
||||
archivePath,
|
||||
archiveTmpPath,
|
||||
BackupService.ARCHIVE_TEMPLATE,
|
||||
compressedStagingPath,
|
||||
this.#encState,
|
||||
manifest.meta
|
||||
);
|
||||
|
||||
let archivePath = await this.finalizeSingleFileArchive(
|
||||
archiveTmpPath,
|
||||
archiveDestFolderPath,
|
||||
manifest.meta
|
||||
);
|
||||
|
||||
let nowSeconds = Math.floor(Date.now() / 1000);
|
||||
Services.prefs.setIntPref(LAST_BACKUP_TIMESTAMP_PREF_NAME, nowSeconds);
|
||||
this.#_state.lastBackupDate = nowSeconds;
|
||||
@@ -1049,6 +1162,88 @@ export class BackupService extends EventTarget {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a string from a Date in the form of:
|
||||
*
|
||||
* YYYYMMDD-HHMM
|
||||
*
|
||||
* @param {Date} date
|
||||
* The date to convert into the archive date suffix.
|
||||
* @returns {string}
|
||||
*/
|
||||
generateArchiveDateSuffix(date) {
|
||||
let year = date.getFullYear().toString();
|
||||
|
||||
// In all cases, months or days with single digits are expected to start
|
||||
// with a 0.
|
||||
|
||||
// Note that getMonth() is 0-indexed for some reason, so we increment by 1.
|
||||
let month = `${date.getMonth() + 1}`.padStart(2, "0");
|
||||
|
||||
let day = `${date.getDate()}`.padStart(2, "0");
|
||||
let hours = `${date.getHours()}`.padStart(2, "0");
|
||||
let minutes = `${date.getMinutes()}`.padStart(2, "0");
|
||||
|
||||
return `${year}${month}${day}-${hours}${minutes}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves the single-file archive into its configured location with a filename
|
||||
* that is sanitized and contains a timecode. This also removes any existing
|
||||
* single-file archives in that same folder after the move completes.
|
||||
*
|
||||
* @param {string} sourcePath
|
||||
* The file system location of the single-file archive prior to the move.
|
||||
* @param {string} destFolder
|
||||
* The folder that the single-file archive is configured to be eventually
|
||||
* written to.
|
||||
* @param {object} metadata
|
||||
* The metadata for the backup. See the BackupManifest schema for details.
|
||||
* @returns {Promise<string>}
|
||||
* Resolves with the path that the single-file archive was moved to.
|
||||
*/
|
||||
async finalizeSingleFileArchive(sourcePath, destFolder, metadata) {
|
||||
let archiveDateSuffix = this.generateArchiveDateSuffix(
|
||||
new Date(metadata.date)
|
||||
);
|
||||
|
||||
let existingChildren = await IOUtils.getChildren(destFolder);
|
||||
|
||||
const FILENAME_PREFIX = `${BackupService.BACKUP_FILE_NAME}_${metadata.profileName}`;
|
||||
const FILENAME = `${FILENAME_PREFIX}_${archiveDateSuffix}.html`;
|
||||
let destPath = PathUtils.join(destFolder, FILENAME);
|
||||
lazy.logConsole.log("Moving single-file archive to ", destPath);
|
||||
await IOUtils.move(sourcePath, destPath);
|
||||
|
||||
for (let childFilePath of existingChildren) {
|
||||
let childFileName = PathUtils.filename(childFilePath);
|
||||
// We check both the prefix and the suffix, because the prefix encodes
|
||||
// the profile name in it. If there are other profiles from the same
|
||||
// application performing backup, we don't want to accidentally remove
|
||||
// those.
|
||||
if (
|
||||
childFileName.startsWith(FILENAME_PREFIX) &&
|
||||
childFileName.endsWith(".html")
|
||||
) {
|
||||
if (childFileName == FILENAME) {
|
||||
// Since filenames don't include seconds, this might occur if a
|
||||
// backup was created seconds after the last one during the same
|
||||
// minute. That tends not to happen in practice, but might occur
|
||||
// during testing, in which case, we'll skip clearing this file.
|
||||
lazy.logConsole.warn(
|
||||
"Collided with a pre-existing archive name, so not clearing: ",
|
||||
FILENAME
|
||||
);
|
||||
continue;
|
||||
}
|
||||
lazy.logConsole.debug("Getting rid of ", childFilePath);
|
||||
await IOUtils.remove(childFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
return destPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs the staging folder for the backup in the passed in backup
|
||||
* folder. If a pre-existing staging folder exists, it will be cleared out.
|
||||
|
||||
@@ -49,20 +49,9 @@ class BackupTest(MarionetteTestCase):
|
||||
self.marionette.start_session()
|
||||
self.marionette.set_context("chrome")
|
||||
|
||||
self.marionette.execute_script(
|
||||
"""
|
||||
const DefaultBackupResources = ChromeUtils.importESModule("resource:///modules/backup/BackupResources.sys.mjs");
|
||||
let resourceKeys = [];
|
||||
for (const resourceName in DefaultBackupResources) {
|
||||
let resource = DefaultBackupResources[resourceName];
|
||||
resourceKeys.push(resource.key);
|
||||
}
|
||||
return resourceKeys;
|
||||
"""
|
||||
)
|
||||
|
||||
archiveDestPath = os.path.join(tempfile.gettempdir(), "backup-dest")
|
||||
recoveryCode = "This is a test password"
|
||||
originalArchivePath = self.marionette.execute_async_script(
|
||||
archivePath = self.marionette.execute_async_script(
|
||||
"""
|
||||
const { OSKeyStore } = ChromeUtils.importESModule(
|
||||
"resource://gre/modules/OSKeyStore.sys.mjs"
|
||||
@@ -73,7 +62,9 @@ class BackupTest(MarionetteTestCase):
|
||||
throw new Error("Could not get initialized BackupService.");
|
||||
}
|
||||
|
||||
let [recoveryCode, outerResolve] = arguments;
|
||||
let [archiveDestPath, recoveryCode, outerResolve] = arguments;
|
||||
bs.setParentDirPath(archiveDestPath);
|
||||
|
||||
(async () => {
|
||||
// This is some hackery to make it so that OSKeyStore doesn't kick
|
||||
// off an OS authentication dialog in our test, and also to make
|
||||
@@ -94,21 +85,9 @@ class BackupTest(MarionetteTestCase):
|
||||
return archivePath;
|
||||
})().then(outerResolve);
|
||||
""",
|
||||
script_args=[recoveryCode],
|
||||
script_args=[archiveDestPath, recoveryCode],
|
||||
)
|
||||
|
||||
# When we switch over to the recovered profile, the Marionette framework
|
||||
# will blow away the profile directory of the one that we created the
|
||||
# backup on, which ruins our ability to do postRecovery work, since
|
||||
# that relies on the prior profile sticking around. We work around this
|
||||
# by moving the backup archive we got back to the OS temporary
|
||||
# directory, and telling the recovery method to use that instead of the
|
||||
# one from the profile directory.
|
||||
archivePath = os.path.join(tempfile.gettempdir(), "archive.html")
|
||||
# Delete the destination folder if it exists already
|
||||
shutil.rmtree(archivePath, ignore_errors=True)
|
||||
shutil.move(originalArchivePath, archivePath)
|
||||
|
||||
recoveryPath = os.path.join(tempfile.gettempdir(), "recovery")
|
||||
shutil.rmtree(recoveryPath, ignore_errors=True)
|
||||
|
||||
|
||||
@@ -42,6 +42,9 @@ const FAKE_METADATA = {
|
||||
|
||||
do_get_profile();
|
||||
|
||||
// Configure any backup files to get written into a temporary folder.
|
||||
Services.prefs.setStringPref("browser.backup.location", PathUtils.tempDir);
|
||||
|
||||
/**
|
||||
* Some fake backup resource classes to test with.
|
||||
*/
|
||||
|
||||
@@ -129,13 +129,12 @@ async function testCreateBackupHelper(sandbox, taskFn) {
|
||||
let stagingPath = PathUtils.join(backupsFolderPath, "staging");
|
||||
|
||||
// For now, we expect a single backup only to be saved. There should also be
|
||||
// a single compressed file for the staging folder, and a single HTML file
|
||||
// export.
|
||||
// a single compressed file for the staging folder.
|
||||
let backupsChildren = await IOUtils.getChildren(backupsFolderPath);
|
||||
Assert.equal(
|
||||
backupsChildren.length,
|
||||
3,
|
||||
"There should only be 3 items in the backups folder"
|
||||
2,
|
||||
"There should only be 2 items in the backups folder"
|
||||
);
|
||||
|
||||
// The folder and the compressed file should have the same filename, but
|
||||
@@ -254,6 +253,21 @@ async function testCreateBackupHelper(sandbox, taskFn) {
|
||||
1
|
||||
);
|
||||
|
||||
let archiveDateSuffix = bs.generateArchiveDateSuffix(
|
||||
new Date(manifest.meta.date)
|
||||
);
|
||||
|
||||
// We also expect the HTML file to have been written to the folder pointed
|
||||
// at by browser.backups.location, within backupDirPath folder.
|
||||
const EXPECTED_ARCHIVE_PATH = PathUtils.join(
|
||||
bs.state.backupDirPath,
|
||||
`${BackupService.BACKUP_FILE_NAME}_${manifest.meta.profileName}_${archiveDateSuffix}.html`
|
||||
);
|
||||
Assert.ok(
|
||||
await IOUtils.exists(EXPECTED_ARCHIVE_PATH),
|
||||
"Single-file backup archive was written."
|
||||
);
|
||||
|
||||
taskFn(manifest);
|
||||
|
||||
// After createBackup is more fleshed out, we're going to want to make sure
|
||||
@@ -261,6 +275,7 @@ async function testCreateBackupHelper(sandbox, taskFn) {
|
||||
// ManifestEntry objects, and that the staging folder was successfully
|
||||
// renamed with the current date.
|
||||
await IOUtils.remove(fakeProfilePath, { recursive: true });
|
||||
await IOUtils.remove(EXPECTED_ARCHIVE_PATH);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,116 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
https://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
"use strict";
|
||||
|
||||
let gTestSourcePath;
|
||||
let gTestDestPath;
|
||||
let gMatchingRegex;
|
||||
|
||||
add_setup(async () => {
|
||||
gMatchingRegex = new RegExp(
|
||||
`^${BackupService.BACKUP_FILE_NAME}_[a-z0-9-]+_[0-9_-]+.html$`
|
||||
);
|
||||
gTestSourcePath = await IOUtils.createUniqueDirectory(
|
||||
PathUtils.tempDir,
|
||||
"testFinalizeSingleFileArchiveSource"
|
||||
);
|
||||
gTestDestPath = await IOUtils.createUniqueDirectory(
|
||||
PathUtils.tempDir,
|
||||
"testFinalizeSingleFileArchiveDest"
|
||||
);
|
||||
|
||||
registerCleanupFunction(async () => {
|
||||
await IOUtils.remove(gTestSourcePath, { recursive: true });
|
||||
await IOUtils.remove(gTestDestPath, { recursive: true });
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Utility function that writes a pretend archive file into gTestSourcePath,
|
||||
* and then calls finalizeSingleFileArchive for it, passing
|
||||
* gTestDestPath as the destination, and some metadata for encoding in the
|
||||
* filename.
|
||||
*
|
||||
* Once the async testFn function resolves, the gTestSourcePath and
|
||||
* gTestDestPath are cleared.
|
||||
*
|
||||
* @param {object} metadata
|
||||
* The metadata to encode in the filename. See the BackupService schema for
|
||||
* details.
|
||||
* @param {Function} testFn
|
||||
* An async testing function to run after calling finalizeSingleFileArchive.
|
||||
*/
|
||||
async function testFinalizeSingleFileArchive(metadata, testFn) {
|
||||
let bs = new BackupService();
|
||||
const TEST_FILE_PATH = PathUtils.join(gTestSourcePath, "test.txt");
|
||||
await IOUtils.writeUTF8(TEST_FILE_PATH, "test");
|
||||
let movedFilePath = await bs.finalizeSingleFileArchive(
|
||||
TEST_FILE_PATH,
|
||||
gTestDestPath,
|
||||
metadata
|
||||
);
|
||||
let movedFile = PathUtils.filename(movedFilePath);
|
||||
try {
|
||||
await testFn(movedFile);
|
||||
} finally {
|
||||
// Clear out any files in the source and destination paths between tests.
|
||||
let filePathsToClear = [
|
||||
...(await IOUtils.getChildren(gTestSourcePath)),
|
||||
...(await IOUtils.getChildren(gTestDestPath)),
|
||||
];
|
||||
for (let filePath of filePathsToClear) {
|
||||
await IOUtils.remove(filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that a single file archive will get the expected filename when moved
|
||||
* to the destination directory.
|
||||
*/
|
||||
add_task(async function test_filename() {
|
||||
await testFinalizeSingleFileArchive(FAKE_METADATA, async movedFile => {
|
||||
Assert.ok(movedFile.match(gMatchingRegex));
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that a single file archive will remove older backup files in the
|
||||
* same directory.
|
||||
*/
|
||||
add_task(async function test_remove_old_files() {
|
||||
const OLDER_BACKUP = PathUtils.join(
|
||||
gTestDestPath,
|
||||
`FirefoxBackup_${FAKE_METADATA.profileName}_20200101-0000.html`
|
||||
);
|
||||
await IOUtils.writeUTF8(OLDER_BACKUP, "test");
|
||||
|
||||
await testFinalizeSingleFileArchive(FAKE_METADATA, async movedFile => {
|
||||
Assert.ok(movedFile.match(gMatchingRegex));
|
||||
Assert.ok(
|
||||
!(await IOUtils.exists(OLDER_BACKUP)),
|
||||
"Older backup was deleted."
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that a single file archive will not remove older backup files for
|
||||
* other profiles.
|
||||
*/
|
||||
add_task(async function test_remove_old_files_other_profile() {
|
||||
const OLDER_BACKUP = PathUtils.join(
|
||||
gTestDestPath,
|
||||
`FirefoxBackup_SomeOtherProfile_20200101-0000.html`
|
||||
);
|
||||
await IOUtils.writeUTF8(OLDER_BACKUP, "test");
|
||||
|
||||
await testFinalizeSingleFileArchive(FAKE_METADATA, async movedFile => {
|
||||
Assert.ok(movedFile.match(gMatchingRegex));
|
||||
Assert.ok(
|
||||
await IOUtils.exists(OLDER_BACKUP),
|
||||
"Older backup from another profile was not deleted."
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,286 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
https://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
"use strict";
|
||||
|
||||
const HOME_KEY = "Home";
|
||||
let gTestRoot;
|
||||
let gFakeHomePath;
|
||||
let gFakeHomeFile;
|
||||
|
||||
add_setup(async () => {
|
||||
gTestRoot = await IOUtils.createUniqueDirectory(
|
||||
PathUtils.tempDir,
|
||||
"testResolveArchiveDestFolderPath"
|
||||
);
|
||||
gFakeHomePath = PathUtils.join(gTestRoot, "FakeHome");
|
||||
await IOUtils.makeDirectory(gFakeHomePath);
|
||||
|
||||
gFakeHomeFile = await IOUtils.getFile(gFakeHomePath);
|
||||
|
||||
let dirsvc = Services.dirsvc.QueryInterface(Ci.nsIProperties);
|
||||
let originalFile;
|
||||
try {
|
||||
originalFile = dirsvc.get(HOME_KEY, Ci.nsIFile);
|
||||
dirsvc.undefine(HOME_KEY);
|
||||
} catch (e) {
|
||||
// dirsvc.get will throw if nothing provides for the key and dirsvc.undefine
|
||||
// will throw if it's not a persistent entry, in either case we don't want
|
||||
// to set the original file in cleanup.
|
||||
originalFile = undefined;
|
||||
}
|
||||
|
||||
dirsvc.set(HOME_KEY, gFakeHomeFile);
|
||||
registerCleanupFunction(() => {
|
||||
dirsvc.undefine(HOME_KEY);
|
||||
if (originalFile) {
|
||||
dirsvc.set(HOME_KEY, originalFile);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that we create the destination folder if the parent folder exists
|
||||
* and the destination folder does not.
|
||||
*/
|
||||
add_task(async function test_create_folder() {
|
||||
const PARENT_FOLDER = PathUtils.join(gTestRoot, "TestFolder");
|
||||
await IOUtils.makeDirectory(PARENT_FOLDER);
|
||||
let bs = new BackupService();
|
||||
|
||||
const DESTINATION_PATH = PathUtils.join(
|
||||
PARENT_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
let path = await bs.resolveArchiveDestFolderPath(DESTINATION_PATH);
|
||||
|
||||
Assert.equal(path, DESTINATION_PATH, "Got back the expected folder path.");
|
||||
Assert.ok(await IOUtils.exists(path), "The destination folder was created.");
|
||||
Assert.equal(
|
||||
(await IOUtils.getChildren(path)).length,
|
||||
0,
|
||||
"Destination folder should be empty."
|
||||
);
|
||||
await IOUtils.remove(PARENT_FOLDER, { recursive: true });
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that we will recreate the configured destination folder if the parent
|
||||
* folder does not exist. This recreates the entire configured folder
|
||||
* hierarchy.
|
||||
*/
|
||||
add_task(async function test_create_parent_folder_hierarchy() {
|
||||
const MISSING_PARENT_FOLDER = PathUtils.join(gTestRoot, "DoesNotExistYet");
|
||||
Assert.ok(
|
||||
!(await IOUtils.exists(MISSING_PARENT_FOLDER)),
|
||||
"Folder should not exist yet."
|
||||
);
|
||||
let bs = new BackupService();
|
||||
|
||||
const CONFIGURED_DESTINATION_PATH = PathUtils.join(
|
||||
MISSING_PARENT_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
let path = await bs.resolveArchiveDestFolderPath(CONFIGURED_DESTINATION_PATH);
|
||||
Assert.equal(
|
||||
path,
|
||||
CONFIGURED_DESTINATION_PATH,
|
||||
"Got back the expected folder path."
|
||||
);
|
||||
Assert.ok(await IOUtils.exists(path), "The destination folder was created.");
|
||||
|
||||
await IOUtils.remove(MISSING_PARENT_FOLDER, { recursive: true });
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that we return the destination folder if the parent folder exists
|
||||
* along with the destination folder.
|
||||
*/
|
||||
add_task(async function test_find_folder() {
|
||||
const PARENT_FOLDER = PathUtils.join(gTestRoot, "TestFolder");
|
||||
const DESTINATION_PATH = PathUtils.join(
|
||||
PARENT_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
await IOUtils.makeDirectory(DESTINATION_PATH, { createAncestors: true });
|
||||
|
||||
let bs = new BackupService();
|
||||
let path = await bs.resolveArchiveDestFolderPath(DESTINATION_PATH);
|
||||
|
||||
Assert.equal(path, DESTINATION_PATH, "Got back the expected folder path.");
|
||||
Assert.ok(await IOUtils.exists(path), "The destination folder exists.");
|
||||
Assert.equal(
|
||||
(await IOUtils.getChildren(path)).length,
|
||||
0,
|
||||
"Destination folder should be empty."
|
||||
);
|
||||
await IOUtils.remove(PARENT_FOLDER, { recursive: true });
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that we fall back to the DEFAULT_PARENT_DIR_PATH folder if the
|
||||
* configured path cannot be written to. This might happen if, for example, the
|
||||
* configured destination is a removable drive that has been removed.
|
||||
*/
|
||||
add_task(async function test_fallback_to_default() {
|
||||
const UNWRITABLE_PARENT = PathUtils.join(gTestRoot, "UnwritableParent");
|
||||
await IOUtils.makeDirectory(UNWRITABLE_PARENT);
|
||||
// Make the folder read-only across the board. 0o444 is the chmod numeric code
|
||||
// for that.
|
||||
await IOUtils.setPermissions(UNWRITABLE_PARENT, 0o444);
|
||||
|
||||
const CONFIGURED_FOLDER = PathUtils.join(
|
||||
UNWRITABLE_PARENT,
|
||||
"ImpossibleChild"
|
||||
);
|
||||
Assert.ok(
|
||||
!(await IOUtils.exists(CONFIGURED_FOLDER)),
|
||||
"Configured folder should not exist."
|
||||
);
|
||||
|
||||
const DEFAULT_FOLDER = PathUtils.join(gTestRoot, "FakeDocuments");
|
||||
await IOUtils.makeDirectory(DEFAULT_FOLDER);
|
||||
|
||||
let bs = new BackupService();
|
||||
// Stub out the DEFAULT_PARENT_DIR_PATH into a folder path we control in this
|
||||
// test, so that we don't pollute this machine's actual Documents folder.
|
||||
let sandbox = sinon.createSandbox();
|
||||
sandbox
|
||||
.stub(BackupService, "DEFAULT_PARENT_DIR_PATH")
|
||||
.get(() => DEFAULT_FOLDER);
|
||||
|
||||
const CONFIGURED_DESTINATION_PATH = PathUtils.join(
|
||||
CONFIGURED_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
const EXPECTED_DESTINATION_PATH = PathUtils.join(
|
||||
DEFAULT_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
let path = await bs.resolveArchiveDestFolderPath(CONFIGURED_DESTINATION_PATH);
|
||||
Assert.equal(
|
||||
path,
|
||||
EXPECTED_DESTINATION_PATH,
|
||||
"Got back the expected folder path."
|
||||
);
|
||||
Assert.ok(await IOUtils.exists(path), "The destination folder was created.");
|
||||
|
||||
await IOUtils.remove(DEFAULT_FOLDER, { recursive: true });
|
||||
await IOUtils.remove(UNWRITABLE_PARENT, { recursive: true });
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that we fall back to the Home folder if the configured path AND the
|
||||
* DEFAULT_PARENT_DIR_PATH cannot be written to.
|
||||
*/
|
||||
add_task(async function test_fallback_to_home() {
|
||||
const UNWRITABLE_PARENT = PathUtils.join(gTestRoot, "UnwritableParent");
|
||||
await IOUtils.makeDirectory(UNWRITABLE_PARENT);
|
||||
// Make the folder read-only across the board. 0o444 is the chmod numeric code
|
||||
// for that.
|
||||
await IOUtils.setPermissions(UNWRITABLE_PARENT, 0o444);
|
||||
|
||||
const CONFIGURED_FOLDER = PathUtils.join(
|
||||
UNWRITABLE_PARENT,
|
||||
"ImpossibleChild"
|
||||
);
|
||||
Assert.ok(
|
||||
!(await IOUtils.exists(CONFIGURED_FOLDER)),
|
||||
"Configured folder should not exist."
|
||||
);
|
||||
|
||||
const DEFAULT_FOLDER = PathUtils.join(gTestRoot, "FakeDocuments");
|
||||
await IOUtils.makeDirectory(DEFAULT_FOLDER);
|
||||
await IOUtils.setPermissions(DEFAULT_FOLDER, 0o444);
|
||||
|
||||
let bs = new BackupService();
|
||||
// Stub out the DEFAULT_PARENT_DIR_PATH into a folder path we control in this
|
||||
// test, so that we don't pollute this machine's actual Documents folder.
|
||||
let sandbox = sinon.createSandbox();
|
||||
sandbox
|
||||
.stub(BackupService, "DEFAULT_PARENT_DIR_PATH")
|
||||
.get(() => DEFAULT_FOLDER);
|
||||
|
||||
const CONFIGURED_DESTINATION_PATH = PathUtils.join(
|
||||
CONFIGURED_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
const EXPECTED_DESTINATION_PATH = PathUtils.join(
|
||||
gFakeHomePath,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
let path = await bs.resolveArchiveDestFolderPath(CONFIGURED_DESTINATION_PATH);
|
||||
Assert.equal(
|
||||
path,
|
||||
EXPECTED_DESTINATION_PATH,
|
||||
"Got back the expected folder path."
|
||||
);
|
||||
Assert.ok(await IOUtils.exists(path), "The destination folder was created.");
|
||||
|
||||
await IOUtils.remove(EXPECTED_DESTINATION_PATH, { recursive: true });
|
||||
await IOUtils.remove(DEFAULT_FOLDER, { recursive: true });
|
||||
await IOUtils.remove(UNWRITABLE_PARENT, { recursive: true });
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
/**
|
||||
* Tests that if we fall back to the $HOME folder and some how that doesn't
|
||||
* exist, then we reject.
|
||||
*/
|
||||
add_task(async function test_fallback_to_home_fail() {
|
||||
const UNWRITABLE_PARENT = PathUtils.join(gTestRoot, "UnwritableParent");
|
||||
await IOUtils.makeDirectory(UNWRITABLE_PARENT);
|
||||
// Make the folder read-only across the board. 0o444 is the chmod numeric code
|
||||
// for that.
|
||||
await IOUtils.setPermissions(UNWRITABLE_PARENT, 0o444);
|
||||
|
||||
const CONFIGURED_FOLDER = PathUtils.join(
|
||||
UNWRITABLE_PARENT,
|
||||
"ImpossibleChild"
|
||||
);
|
||||
Assert.ok(
|
||||
!(await IOUtils.exists(CONFIGURED_FOLDER)),
|
||||
"Configured folder should not exist."
|
||||
);
|
||||
|
||||
const DEFAULT_FOLDER = PathUtils.join(gTestRoot, "FakeDocuments");
|
||||
await IOUtils.makeDirectory(DEFAULT_FOLDER);
|
||||
await IOUtils.setPermissions(DEFAULT_FOLDER, 0o444);
|
||||
|
||||
const UNWRITABLE_HOME_FOLDER = PathUtils.join(gTestRoot, "UnwritableHome");
|
||||
await IOUtils.makeDirectory(UNWRITABLE_HOME_FOLDER);
|
||||
await IOUtils.setPermissions(UNWRITABLE_HOME_FOLDER, 0o444);
|
||||
|
||||
let unwritableHomeFolderFile = await IOUtils.getFile(UNWRITABLE_HOME_FOLDER);
|
||||
let dirsvc = Services.dirsvc.QueryInterface(Ci.nsIProperties);
|
||||
dirsvc.undefine(HOME_KEY);
|
||||
dirsvc.set(HOME_KEY, unwritableHomeFolderFile);
|
||||
|
||||
// Stub out the DEFAULT_PARENT_DIR_PATH into a folder path we control in this
|
||||
// test, so that we don't pollute this machine's actual Documents folder.
|
||||
let sandbox = sinon.createSandbox();
|
||||
sandbox
|
||||
.stub(BackupService, "DEFAULT_PARENT_DIR_PATH")
|
||||
.get(() => DEFAULT_FOLDER);
|
||||
|
||||
let bs = new BackupService();
|
||||
|
||||
const CONFIGURED_DESTINATION_PATH = PathUtils.join(
|
||||
CONFIGURED_FOLDER,
|
||||
BackupService.BACKUP_DIR_NAME
|
||||
);
|
||||
|
||||
await Assert.rejects(
|
||||
bs.resolveArchiveDestFolderPath(CONFIGURED_DESTINATION_PATH),
|
||||
/Could not resolve/
|
||||
);
|
||||
|
||||
sandbox.restore();
|
||||
await IOUtils.remove(UNWRITABLE_HOME_FOLDER, { recursive: true });
|
||||
await IOUtils.remove(DEFAULT_FOLDER, { recursive: true });
|
||||
await IOUtils.remove(UNWRITABLE_PARENT, { recursive: true });
|
||||
|
||||
dirsvc.undefine(HOME_KEY);
|
||||
dirsvc.set(HOME_KEY, gFakeHomeFile);
|
||||
});
|
||||
@@ -29,10 +29,14 @@ skip-if = ["apple_silicon && automation"] # bug 1729538
|
||||
["test_BackupService_enable_disable_encryption.js"]
|
||||
skip-if = ["apple_silicon && automation"] # bug 1729538
|
||||
|
||||
["test_BackupService_finalizeSingleFileArchive.js"]
|
||||
|
||||
["test_BackupService_recoverFromSnapshotFolder.js"]
|
||||
|
||||
["test_BackupService_renderTemplate.js"]
|
||||
|
||||
["test_BackupService_resolveArchiveDestFolderPath.js"]
|
||||
|
||||
["test_BackupService_scheduler.js"]
|
||||
|
||||
["test_BackupService_schema_versions.js"]
|
||||
|
||||
Reference in New Issue
Block a user