Bug 1939658 - Replace new URL usage with URL.parse/URL.canParse where possible. r=robwu,Standard8,Gijs,mtigley,pdahiya,settings-reviewers,credential-management-reviewers,devtools-reviewers,tabbrowser-reviewers,places-reviewers,omc-reviewers,migration-reviewers,firefox-desktop-core-reviewers ,home-newtab-reviewers,webcompat-reviewers,urlbar-reviewers,twisniewski,mossop,dao,nchevobbe,webdriver-reviewers,whimboo,issammani,mconley,nbarrett,beth
Differential Revision: https://phabricator.services.mozilla.com/D233071
This commit is contained in:
@@ -259,8 +259,10 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
let ctx = canvas.getContext("2d");
|
||||
ctx.drawImage(target, 0, 0);
|
||||
let dataURL = canvas.toDataURL();
|
||||
let url = new URL(target.ownerDocument.location.href).pathname;
|
||||
let imageName = url.substr(url.lastIndexOf("/") + 1);
|
||||
let url = target.ownerDocument.location;
|
||||
let imageName = url.pathname.substr(
|
||||
url.pathname.lastIndexOf("/") + 1
|
||||
);
|
||||
return Promise.resolve({ failed: false, dataURL, imageName });
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
@@ -309,7 +311,7 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
if (href) {
|
||||
// Handle SVG links:
|
||||
if (typeof href == "object" && href.animVal) {
|
||||
return this._makeURLAbsolute(this.context.link.baseURI, href.animVal);
|
||||
return new URL(href.animVal, this.context.link.baseURI).href;
|
||||
}
|
||||
|
||||
return href;
|
||||
@@ -325,7 +327,7 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
throw new Error("Empty href");
|
||||
}
|
||||
|
||||
return this._makeURLAbsolute(this.context.link.baseURI, href);
|
||||
return new URL(href, this.context.link.baseURI).href;
|
||||
}
|
||||
|
||||
_getLinkURI() {
|
||||
@@ -436,10 +438,6 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
return urls[0];
|
||||
}
|
||||
|
||||
_makeURLAbsolute(aBase, aUrl) {
|
||||
return Services.io.newURI(aUrl, null, Services.io.newURI(aBase)).spec;
|
||||
}
|
||||
|
||||
_isProprietaryDRM() {
|
||||
return (
|
||||
this.context.target.isEncrypted &&
|
||||
@@ -998,10 +996,10 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
const descURL = context.target.getAttribute("longdesc");
|
||||
|
||||
if (descURL) {
|
||||
context.imageDescURL = this._makeURLAbsolute(
|
||||
context.target.ownerDocument.body.baseURI,
|
||||
descURL
|
||||
);
|
||||
context.imageDescURL = new URL(
|
||||
descURL,
|
||||
context.target.ownerDocument.body.baseURI
|
||||
).href;
|
||||
}
|
||||
} else if (
|
||||
this.contentWindow.HTMLCanvasElement.isInstance(context.target)
|
||||
@@ -1088,10 +1086,7 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
|
||||
if (computedURL) {
|
||||
context.hasBGImage = true;
|
||||
context.bgImageURL = this._makeURLAbsolute(
|
||||
bodyElt.baseURI,
|
||||
computedURL
|
||||
);
|
||||
context.bgImageURL = new URL(computedURL, bodyElt.baseURI).href;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1177,7 +1172,7 @@ export class ContextMenuChild extends JSWindowActorChild {
|
||||
|
||||
if (bgImgUrl) {
|
||||
context.hasBGImage = true;
|
||||
context.bgImageURL = this._makeURLAbsolute(elem.baseURI, bgImgUrl);
|
||||
context.bgImageURL = new URL(bgImgUrl, elem.baseURI).href;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,18 +189,14 @@ export class PageInfoChild extends JSWindowActorChild {
|
||||
!elem.hasAttribute("alt")
|
||||
);
|
||||
} else if (content.SVGImageElement.isInstance(elem)) {
|
||||
try {
|
||||
// Note: makeURLAbsolute will throw if either the baseURI is not a valid URI
|
||||
// or the URI formed from the baseURI and the URL is not a valid URI.
|
||||
if (elem.href.baseVal) {
|
||||
let href = Services.io.newURI(
|
||||
elem.href.baseVal,
|
||||
null,
|
||||
Services.io.newURI(elem.baseURI)
|
||||
).spec;
|
||||
// Note: makeURLAbsolute will throw if either the baseURI is not a valid URI
|
||||
// or the URI formed from the baseURI and the URL is not a valid URI.
|
||||
if (elem.href.baseVal) {
|
||||
let href = URL.parse(elem.href.baseVal, elem.baseURI)?.href;
|
||||
if (href) {
|
||||
addMedia(href, "img", "", elem, false);
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
} else if (content.HTMLVideoElement.isInstance(elem)) {
|
||||
addMedia(elem.currentSrc, "video", "", elem, false);
|
||||
} else if (content.HTMLAudioElement.isInstance(elem)) {
|
||||
|
||||
@@ -1227,10 +1227,8 @@ class DomainExtractor {
|
||||
|
||||
let href = element.getAttribute("href");
|
||||
|
||||
let url;
|
||||
try {
|
||||
url = new URL(href, origin);
|
||||
} catch (ex) {
|
||||
let url = URL.parse(href, origin);
|
||||
if (!url) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -1242,9 +1240,8 @@ class DomainExtractor {
|
||||
if (queryParam) {
|
||||
let paramValue = url.searchParams.get(queryParam);
|
||||
if (queryParamValueIsHref) {
|
||||
try {
|
||||
paramValue = new URL(paramValue).hostname;
|
||||
} catch (e) {
|
||||
paramValue = URL.parse(paramValue)?.hostname;
|
||||
if (!paramValue) {
|
||||
continue;
|
||||
}
|
||||
paramValue = this.#processDomain(paramValue, providerName);
|
||||
@@ -1339,9 +1336,8 @@ class DomainExtractor {
|
||||
textContent = "https://" + textContent;
|
||||
}
|
||||
|
||||
try {
|
||||
domain = new URL(textContent).hostname;
|
||||
} catch (e) {
|
||||
domain = URL.parse(textContent)?.hostname;
|
||||
if (!domain) {
|
||||
domain = fixup(textContent);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -203,13 +203,11 @@ var gBrowserInit = {
|
||||
// Hack to ensure that the various initial pages favicon is loaded
|
||||
// instantaneously, to avoid flickering and improve perceived performance.
|
||||
this._callWithURIToLoad(uriToLoad => {
|
||||
let url;
|
||||
try {
|
||||
url = Services.io.newURI(uriToLoad);
|
||||
} catch (e) {
|
||||
let url = URL.parse(uriToLoad);
|
||||
if (!url) {
|
||||
return;
|
||||
}
|
||||
let nonQuery = url.prePath + url.filePath;
|
||||
let nonQuery = url.URI.prePath + url.pathname;
|
||||
if (nonQuery in gPageIcons) {
|
||||
gBrowser.setIcon(gBrowser.selectedTab, gPageIcons[nonQuery]);
|
||||
}
|
||||
|
||||
@@ -4597,7 +4597,7 @@ function setToolbarVisibility(
|
||||
break;
|
||||
case "newtab":
|
||||
default: {
|
||||
let currentURI = gBrowser?.currentURI;
|
||||
let currentURI;
|
||||
if (!gBrowserInit.domContentLoaded) {
|
||||
let uriToLoad = gBrowserInit.uriToLoadPromise;
|
||||
if (uriToLoad) {
|
||||
@@ -4605,10 +4605,13 @@ function setToolbarVisibility(
|
||||
// We only care about the first tab being loaded
|
||||
uriToLoad = uriToLoad[0];
|
||||
}
|
||||
try {
|
||||
currentURI = Services.io.newURI(uriToLoad);
|
||||
} catch (ex) {}
|
||||
currentURI = URL.parse(uriToLoad)?.URI;
|
||||
if (!currentURI) {
|
||||
currentURI = gBrowser?.currentURI;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
currentURI = gBrowser.currentURI;
|
||||
}
|
||||
isVisible = BookmarkingUI.isOnNewTabPage(currentURI);
|
||||
break;
|
||||
|
||||
@@ -2625,12 +2625,9 @@ export class nsContextMenu {
|
||||
return "";
|
||||
}
|
||||
|
||||
try {
|
||||
// If the underlying link text is a URL, we should not offer to translate.
|
||||
new URL(linkText);
|
||||
if (URL.canParse(linkText)) {
|
||||
// The underlying link text is a URL, we should not offer to translate.
|
||||
return "";
|
||||
} catch {
|
||||
// A URL could not be parsed from the unerlying link text.
|
||||
}
|
||||
|
||||
// Since the underlying link text is not a URL, we should offer to translate it.
|
||||
|
||||
@@ -44,7 +44,7 @@ ChromeUtils.defineLazyGetter(lazy, "gWindowsAlertsService", () => {
|
||||
});
|
||||
|
||||
// One-time startup homepage override configurations
|
||||
const ONCE_DOMAINS = ["mozilla.org", "firefox.com"];
|
||||
const ONCE_DOMAINS = new Set(["mozilla.org", "firefox.com"]);
|
||||
const ONCE_PREF = "browser.startup.homepage_override.once";
|
||||
|
||||
// Index of Private Browsing icon in firefox.exe
|
||||
@@ -1023,20 +1023,18 @@ nsBrowserContentHandler.prototype = {
|
||||
overridePage = url
|
||||
.split("|")
|
||||
.map(val => {
|
||||
try {
|
||||
return new URL(val);
|
||||
} catch (ex) {
|
||||
let parsed = URL.parse(val);
|
||||
if (!parsed) {
|
||||
// Invalid URL, so filter out below
|
||||
console.error("Invalid once url:", ex);
|
||||
return null;
|
||||
console.error(`Invalid once url: ${val}`);
|
||||
}
|
||||
return parsed;
|
||||
})
|
||||
.filter(
|
||||
parsed =>
|
||||
parsed &&
|
||||
parsed.protocol == "https:" &&
|
||||
parsed?.protocol == "https:" &&
|
||||
// Only accept exact hostname or subdomain; without port
|
||||
ONCE_DOMAINS.includes(
|
||||
ONCE_DOMAINS.has(
|
||||
Services.eTLD.getBaseDomainFromHost(parsed.host)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -101,11 +101,8 @@ export class LoginBreachAlert extends MozLitElement {
|
||||
}
|
||||
|
||||
get displayHostname() {
|
||||
try {
|
||||
return new URL(this.hostname).hostname;
|
||||
} catch (err) {
|
||||
return this.hostname;
|
||||
}
|
||||
let url = URL.parse(this.hostname);
|
||||
return url?.hostname ?? this.hostname;
|
||||
}
|
||||
|
||||
render() {
|
||||
|
||||
@@ -190,7 +190,7 @@ async function insertBookmark(bookmark) {
|
||||
let parentGuid = await getParentGuid(bookmark.Placement, bookmark.Folder);
|
||||
|
||||
await lazy.PlacesUtils.bookmarks.insert({
|
||||
url: Services.io.newURI(bookmark.URL.href),
|
||||
url: bookmark.URL.URI,
|
||||
title: bookmark.Title,
|
||||
guid: lazy.PlacesUtils.generateGuidWithPrefix(
|
||||
BookmarksPolicies.BOOKMARK_GUID_PREFIX
|
||||
|
||||
@@ -89,12 +89,10 @@ export var ProxyPolicies = {
|
||||
}
|
||||
|
||||
function setProxyHostAndPort(type, address) {
|
||||
let url;
|
||||
try {
|
||||
// Prepend https just so we can use the URL parser
|
||||
// instead of parsing manually.
|
||||
url = new URL(`https://${address}`);
|
||||
} catch (e) {
|
||||
// Prepend https just so we can use the URL parser
|
||||
// instead of parsing manually.
|
||||
let url = URL.parse(`https://${address}`);
|
||||
if (!url) {
|
||||
lazy.log.error(`Invalid address for ${type} proxy: ${address}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -107,22 +107,20 @@ export let WebsiteFilter = {
|
||||
|
||||
shouldLoad(contentLocation, loadInfo) {
|
||||
let contentType = loadInfo.externalContentPolicyType;
|
||||
let url = contentLocation.spec;
|
||||
let url = contentLocation.spec.toLowerCase();
|
||||
if (contentLocation.scheme == "view-source") {
|
||||
url = contentLocation.pathQueryRef;
|
||||
} else if (url.toLowerCase().startsWith("about:reader")) {
|
||||
url = decodeURIComponent(
|
||||
url.toLowerCase().substr("about:reader?url=".length)
|
||||
);
|
||||
url = contentLocation.pathQueryRef.toLowerCase();
|
||||
} else if (url.startsWith("about:reader?url=")) {
|
||||
url = decodeURIComponent(url.substr(17));
|
||||
}
|
||||
if (
|
||||
contentType == Ci.nsIContentPolicy.TYPE_DOCUMENT ||
|
||||
contentType == Ci.nsIContentPolicy.TYPE_SUBDOCUMENT
|
||||
) {
|
||||
if (this._blockPatterns.matches(url.toLowerCase())) {
|
||||
if (this._blockPatterns.matches(url)) {
|
||||
if (
|
||||
!this._exceptionsPatterns ||
|
||||
!this._exceptionsPatterns.matches(url.toLowerCase())
|
||||
!this._exceptionsPatterns.matches(url)
|
||||
) {
|
||||
return Ci.nsIContentPolicy.REJECT_POLICY;
|
||||
}
|
||||
@@ -145,13 +143,11 @@ export let WebsiteFilter = {
|
||||
}
|
||||
let location = channel.getResponseHeader("location");
|
||||
// location might not be a fully qualified URL
|
||||
let url;
|
||||
try {
|
||||
url = new URL(location);
|
||||
} catch (e) {
|
||||
url = new URL(location, channel.URI.spec);
|
||||
let url = URL.parse(location);
|
||||
if (!url) {
|
||||
url = URL.parse(location, channel.URI.spec);
|
||||
}
|
||||
if (this._blockPatterns.matches(url.href.toLowerCase())) {
|
||||
if (url && this._blockPatterns.matches(url.href.toLowerCase())) {
|
||||
if (
|
||||
!this._exceptionsPatterns ||
|
||||
!this._exceptionsPatterns.matches(url.href.toLowerCase())
|
||||
|
||||
@@ -72,12 +72,9 @@ Bookmarks.prototype = {
|
||||
};
|
||||
folderMap.set(id, bmToInsert);
|
||||
} else {
|
||||
try {
|
||||
new URL(url);
|
||||
} catch (ex) {
|
||||
if (!URL.canParse(url)) {
|
||||
console.error(
|
||||
`Ignoring ${url} when importing from 360se because of exception:`,
|
||||
ex
|
||||
`Ignoring ${url} when importing from 360se because it is not a valid URL.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -451,6 +451,7 @@ export class ChromeProfileMigrator extends MigratorBase {
|
||||
|
||||
let logins = [];
|
||||
let fallbackCreationDate = new Date();
|
||||
const kValidSchemes = new Set(["https", "http", "ftp"]);
|
||||
for (let row of rows) {
|
||||
try {
|
||||
let origin_url = lazy.NetUtil.newURI(
|
||||
@@ -458,7 +459,6 @@ export class ChromeProfileMigrator extends MigratorBase {
|
||||
);
|
||||
// Ignore entries for non-http(s)/ftp URLs because we likely can't
|
||||
// use them anyway.
|
||||
const kValidSchemes = new Set(["https", "http", "ftp"]);
|
||||
if (!kValidSchemes.has(origin_url.scheme)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,8 @@ import { MSMigrationUtils } from "resource:///modules/MSMigrationUtils.sys.mjs";
|
||||
const EDGE_COOKIE_PATH_OPTIONS = ["", "#!001\\", "#!002\\"];
|
||||
const EDGE_COOKIES_SUFFIX = "MicrosoftEdge\\Cookies";
|
||||
|
||||
const ALLOWED_PROTOCOLS = new Set(["http:", "https:", "ftp:"]);
|
||||
|
||||
const lazy = {};
|
||||
ChromeUtils.defineESModuleGetters(lazy, {
|
||||
ESEDBReader: "resource:///modules/ESEDBReader.sys.mjs",
|
||||
@@ -136,14 +138,8 @@ EdgeTypedURLMigrator.prototype = {
|
||||
continue;
|
||||
}
|
||||
|
||||
let url;
|
||||
try {
|
||||
url = new URL(urlString);
|
||||
if (!["http:", "https:", "ftp:"].includes(url.protocol)) {
|
||||
continue;
|
||||
}
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
let url = URL.parse(urlString);
|
||||
if (!url || !ALLOWED_PROTOCOLS.has(url.protocol)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -223,31 +219,27 @@ EdgeTypedURLDBMigrator.prototype = {
|
||||
Date.now() - MigrationUtils.HISTORY_MAX_AGE_IN_MILLISECONDS
|
||||
);
|
||||
for (let typedUrlInfo of typedUrls) {
|
||||
try {
|
||||
let date = typedUrlInfo.AccessDateTimeUTC;
|
||||
if (!date) {
|
||||
date = kDateCutOff;
|
||||
} else if (date < kDateCutOff) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let url = new URL(typedUrlInfo.URL);
|
||||
if (!["http:", "https:", "ftp:"].includes(url.protocol)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
pageInfos.push({
|
||||
url,
|
||||
visits: [
|
||||
{
|
||||
transition: lazy.PlacesUtils.history.TRANSITIONS.TYPED,
|
||||
date,
|
||||
},
|
||||
],
|
||||
});
|
||||
} catch (ex) {
|
||||
console.error(ex);
|
||||
let date = typedUrlInfo.AccessDateTimeUTC;
|
||||
if (!date) {
|
||||
date = kDateCutOff;
|
||||
} else if (date < kDateCutOff) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let url = URL.parse(typedUrlInfo.URL);
|
||||
if (!url || !ALLOWED_PROTOCOLS.has(url.protocol)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
pageInfos.push({
|
||||
url,
|
||||
visits: [
|
||||
{
|
||||
transition: lazy.PlacesUtils.history.TRANSITIONS.TYPED,
|
||||
date,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
await MigrationUtils.insertVisitsWrapper(pageInfos);
|
||||
},
|
||||
@@ -319,9 +311,7 @@ EdgeReadingListMigrator.prototype = {
|
||||
for (let item of readingListItems) {
|
||||
let dateAdded = item.AddedDate || new Date();
|
||||
// Avoid including broken URLs:
|
||||
try {
|
||||
new URL(item.URL);
|
||||
} catch (ex) {
|
||||
if (!URL.canParse(item.URL)) {
|
||||
continue;
|
||||
}
|
||||
bookmarks.push({ url: item.URL, title: item.Title, dateAdded });
|
||||
@@ -426,11 +416,9 @@ EdgeBookmarksMigrator.prototype = {
|
||||
let bmToInsert;
|
||||
// Ignore invalid URLs:
|
||||
if (!bookmark.IsFolder) {
|
||||
try {
|
||||
new URL(bookmark.URL);
|
||||
} catch (ex) {
|
||||
if (!URL.canParse(bookmark.URL)) {
|
||||
console.error(
|
||||
`Ignoring ${bookmark.URL} when importing from Edge because of exception: ${ex}`
|
||||
`Ignoring ${bookmark.URL} when importing from Edge because it is not a valid URL.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -638,13 +638,11 @@ WindowsVaultFormPasswords.prototype = {
|
||||
}
|
||||
let url =
|
||||
item.contents.pResourceElement.contents.itemValue.readString();
|
||||
let realURL;
|
||||
try {
|
||||
realURL = Services.io.newURI(url);
|
||||
} catch (ex) {
|
||||
/* leave realURL as null */
|
||||
}
|
||||
if (!realURL || !["http", "https", "ftp"].includes(realURL.scheme)) {
|
||||
let realURL = URL.parse(url);
|
||||
if (
|
||||
!realURL ||
|
||||
!["http:", "https:", "ftp:"].includes(realURL.protocol)
|
||||
) {
|
||||
// Ignore items for non-URLs or URLs that aren't HTTP(S)/FTP
|
||||
continue;
|
||||
}
|
||||
@@ -690,7 +688,7 @@ WindowsVaultFormPasswords.prototype = {
|
||||
logins.push({
|
||||
username,
|
||||
password,
|
||||
origin: realURL.prePath,
|
||||
origin: realURL.URI.prePath,
|
||||
timeCreated: creation,
|
||||
});
|
||||
|
||||
|
||||
@@ -1042,13 +1042,9 @@ class MigrationUtils {
|
||||
let url = pageInfo.url;
|
||||
if (url instanceof Ci.nsIURI) {
|
||||
url = pageInfo.url.spec;
|
||||
} else if (typeof url != "string") {
|
||||
pageInfo.url.href;
|
||||
}
|
||||
|
||||
try {
|
||||
new URL(url);
|
||||
} catch (ex) {
|
||||
if (!URL.canParse(url)) {
|
||||
// This won't save and we won't need to 'undo' it, so ignore this URL.
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ Bookmarks.prototype = {
|
||||
if (rows) {
|
||||
// Convert the rows from our SQLite database into a map from bookmark url to uuid
|
||||
for (let row of rows) {
|
||||
let uniqueURL = Services.io.newURI(row.getResultByName("url")).spec;
|
||||
let uniqueURL = new URL(row.getResultByName("url")).href;
|
||||
|
||||
// Normalize the URL by removing any trailing slashes. We'll make sure to do
|
||||
// the same when doing look-ups during a migration.
|
||||
@@ -292,12 +292,9 @@ Bookmarks.prototype = {
|
||||
} else if (type == "WebBookmarkTypeLeaf" && entry.has("URLString")) {
|
||||
// Check we understand this URL before adding it:
|
||||
let url = entry.get("URLString");
|
||||
try {
|
||||
new URL(url);
|
||||
} catch (ex) {
|
||||
if (!URL.canParse(url)) {
|
||||
console.error(
|
||||
`Ignoring ${url} when importing from Safari because of exception:`,
|
||||
ex
|
||||
`Ignoring ${url} when importing from Safari because it is not a valid URL.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ XPCOMUtils.defineLazyPreferenceGetter(
|
||||
300
|
||||
);
|
||||
|
||||
const ALLOWED_SCHEMES = ["http", "https", "data", "blob"];
|
||||
const ALLOWED_PROTOCOLS = new Set(["http:", "https:", "data:", "blob:"]);
|
||||
|
||||
/**
|
||||
* Shifts the first element out of the set.
|
||||
@@ -358,8 +358,7 @@ export const PageDataService = new (class PageDataService extends EventEmitter {
|
||||
* The url of the page.
|
||||
*/
|
||||
async pageLoaded(actor, url) {
|
||||
let uri = Services.io.newURI(url);
|
||||
if (!ALLOWED_SCHEMES.includes(uri.scheme)) {
|
||||
if (!ALLOWED_PROTOCOLS.has(new URL(url).protocol)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -140,15 +140,11 @@ function parseMicrodataProp(propElement) {
|
||||
return "";
|
||||
}
|
||||
|
||||
try {
|
||||
let url = new URL(
|
||||
urlElement.getAttribute(attr),
|
||||
urlElement.ownerDocument.documentURI
|
||||
);
|
||||
return url.toString();
|
||||
} catch (e) {
|
||||
return "";
|
||||
}
|
||||
let url = URL.parse(
|
||||
urlElement.getAttribute(attr),
|
||||
urlElement.ownerDocument.documentURI
|
||||
);
|
||||
return url ? url.toString() : "";
|
||||
};
|
||||
|
||||
switch (propElement.localName) {
|
||||
|
||||
@@ -179,15 +179,10 @@ class _InteractionsBlocklist {
|
||||
|
||||
// First, find the URL's base host: the hostname without any subdomains or a
|
||||
// public suffix.
|
||||
let url;
|
||||
try {
|
||||
url = new URL(urlToCheck);
|
||||
if (!url) {
|
||||
throw new Error();
|
||||
}
|
||||
} catch (ex) {
|
||||
let url = URL.parse(urlToCheck);
|
||||
if (!url) {
|
||||
lazy.logConsole.warn(
|
||||
`Invalid URL passed to InteractionsBlocklist.isUrlBlocklisted: ${url}`
|
||||
`Invalid URL passed to InteractionsBlocklist.isUrlBlocklisted: ${urlToCheck}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -679,10 +679,8 @@ var gPrivacyPane = {
|
||||
|
||||
async updateDoHStatus() {
|
||||
let trrURI = Services.dns.currentTrrURI;
|
||||
let hostname = "";
|
||||
try {
|
||||
hostname = new URL(trrURI).hostname;
|
||||
} catch (e) {
|
||||
let hostname = URL.parse(trrURI)?.hostname;
|
||||
if (!hostname) {
|
||||
hostname = await document.l10n.formatValue("preferences-doh-bad-url");
|
||||
}
|
||||
|
||||
|
||||
@@ -799,9 +799,8 @@ class TelemetryHandler {
|
||||
* tracking, since they're inside a WeakMap.
|
||||
*/
|
||||
_findBrowserItemForURL(url) {
|
||||
try {
|
||||
url = new URL(url);
|
||||
} catch (ex) {
|
||||
url = URL.parse(url);
|
||||
if (!url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -811,13 +810,12 @@ class TelemetryHandler {
|
||||
if (currentBestMatch === Infinity) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
// Make sure to cache the parsed URL object, since there's no reason to
|
||||
// do it twice.
|
||||
trackingURL =
|
||||
candidateItem._trackingURL ||
|
||||
(candidateItem._trackingURL = new URL(trackingURL));
|
||||
} catch (ex) {
|
||||
// Make sure to cache the parsed URL object, since there's no reason to
|
||||
// do it twice.
|
||||
trackingURL =
|
||||
candidateItem._trackingURL ||
|
||||
(candidateItem._trackingURL = URL.parse(trackingURL));
|
||||
if (!trackingURL) {
|
||||
continue;
|
||||
}
|
||||
let score = this.compareUrls(url, trackingURL);
|
||||
@@ -1448,19 +1446,18 @@ class ContentHandler {
|
||||
info.nonAdsLinkQueryParamNames.length &&
|
||||
info.nonAdsLinkRegexps.some(r => r.test(url))
|
||||
) {
|
||||
let newParsedUrl;
|
||||
for (let key of info.nonAdsLinkQueryParamNames) {
|
||||
let paramValue = parsedUrl.searchParams.get(key);
|
||||
if (paramValue) {
|
||||
try {
|
||||
newParsedUrl = /^https?:\/\//.test(paramValue)
|
||||
? new URL(paramValue)
|
||||
: new URL(paramValue, parsedUrl.origin);
|
||||
let newParsedUrl = /^https?:\/\//.test(paramValue)
|
||||
? URL.parse(paramValue)
|
||||
: URL.parse(paramValue, parsedUrl.origin);
|
||||
if (newParsedUrl) {
|
||||
parsedUrl = newParsedUrl;
|
||||
break;
|
||||
} catch (e) {}
|
||||
}
|
||||
}
|
||||
}
|
||||
parsedUrl = newParsedUrl ?? parsedUrl;
|
||||
}
|
||||
|
||||
// Determine the component type of the link.
|
||||
|
||||
@@ -25,13 +25,11 @@ ChromeUtils.registerWindowActor("Screenshot", {
|
||||
});
|
||||
|
||||
function loadContentWindow(browser, url) {
|
||||
let uri;
|
||||
try {
|
||||
uri = Services.io.newURI(url);
|
||||
} catch (e) {
|
||||
let msg = `Invalid URL passed to loadContentWindow(): ${url}`;
|
||||
console.error(msg);
|
||||
return Promise.reject(new Error(msg));
|
||||
let uri = URL.parse(url)?.URI;
|
||||
if (!uri) {
|
||||
let err = new Error(`Invalid URL passed to loadContentWindow(): ${url}`);
|
||||
console.error(err);
|
||||
return Promise.reject(err);
|
||||
}
|
||||
|
||||
const principal = Services.scriptSecurityManager.getSystemPrincipal();
|
||||
|
||||
@@ -255,14 +255,12 @@ export class ShoppingContainer extends MozLitElement {
|
||||
}
|
||||
|
||||
getHostnameFromProductUrl() {
|
||||
let hostname;
|
||||
try {
|
||||
hostname = new URL(this.productUrl)?.hostname;
|
||||
let hostname = URL.parse(this.productUrl)?.hostname;
|
||||
if (hostname) {
|
||||
return hostname;
|
||||
} catch (e) {
|
||||
console.warn(`Unknown product url ${this.productUrl}.`);
|
||||
return null;
|
||||
}
|
||||
console.warn(`Unknown product url ${this.productUrl}.`);
|
||||
return null;
|
||||
}
|
||||
|
||||
analysisDetailsTemplate() {
|
||||
|
||||
@@ -98,19 +98,19 @@ export default class TabHoverPreviewPanel {
|
||||
}
|
||||
|
||||
getPrettyURI(uri) {
|
||||
try {
|
||||
let url = new URL(uri);
|
||||
if (url.protocol == "about:" && url.pathname == "reader") {
|
||||
url = new URL(url.searchParams.get("url"));
|
||||
}
|
||||
|
||||
if (url.protocol === "about:") {
|
||||
return url.href;
|
||||
}
|
||||
return `${url.hostname}`.replace(/^w{3}\./, "");
|
||||
} catch {
|
||||
let url = URL.parse(uri);
|
||||
if (!url) {
|
||||
return uri;
|
||||
}
|
||||
|
||||
if (url.protocol == "about:" && url.pathname == "reader") {
|
||||
url = URL.parse(url.searchParams.get("url"));
|
||||
}
|
||||
|
||||
if (url?.protocol === "about:") {
|
||||
return url.href;
|
||||
}
|
||||
return url ? url.hostname.replace(/^w{3}\./, "") : uri;
|
||||
}
|
||||
|
||||
_hasValidWireframeState(tab) {
|
||||
|
||||
@@ -10,10 +10,10 @@ const TIPPYTOP_JSON_PATH =
|
||||
* Get a domain from a url optionally stripping subdomains.
|
||||
*/
|
||||
export function getDomain(url, strip = "www.") {
|
||||
let domain = "";
|
||||
try {
|
||||
domain = new URL(url).hostname;
|
||||
} catch (ex) {}
|
||||
let domain = URL.parse(url)?.hostname;
|
||||
if (!domain) {
|
||||
return "";
|
||||
}
|
||||
if (strip === "*") {
|
||||
try {
|
||||
domain = Services.eTLD.getBaseDomainFromHost(domain);
|
||||
|
||||
@@ -168,10 +168,9 @@ export class MerinoClient {
|
||||
if (!endpointString) {
|
||||
return [];
|
||||
}
|
||||
let url;
|
||||
try {
|
||||
url = new URL(endpointString);
|
||||
} catch (error) {
|
||||
let url = URL.parse(endpointString);
|
||||
if (!url) {
|
||||
let error = new Error(`${endpointString} is not a valid URL`);
|
||||
this.logger.error("Error creating endpoint URL", error);
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -854,12 +854,7 @@ export class UrlbarInput {
|
||||
result: selectedResult || this._resultForCurrentValue || null,
|
||||
});
|
||||
|
||||
let isValidUrl = false;
|
||||
try {
|
||||
new URL(url);
|
||||
isValidUrl = true;
|
||||
} catch (ex) {}
|
||||
if (isValidUrl) {
|
||||
if (URL.canParse(url)) {
|
||||
// Annotate if the untrimmed value contained a scheme, to later potentially
|
||||
// be upgraded by schemeless HTTPS-First.
|
||||
openParams.schemelessInput = this.#getSchemelessInput(
|
||||
@@ -2808,9 +2803,8 @@ export class UrlbarInput {
|
||||
return result.payload.url;
|
||||
}
|
||||
|
||||
try {
|
||||
uri = Services.io.newURI(this._untrimmedValue);
|
||||
} catch (ex) {
|
||||
uri = URL.parse(this._untrimmedValue)?.URI;
|
||||
if (!uri) {
|
||||
return selectedVal;
|
||||
}
|
||||
}
|
||||
@@ -2846,10 +2840,11 @@ export class UrlbarInput {
|
||||
// Unless decodeURLsOnCopy is set. Do not encode data: URIs.
|
||||
if (!lazy.UrlbarPrefs.get("decodeURLsOnCopy") && !uri.schemeIs("data")) {
|
||||
try {
|
||||
new URL(selectedVal);
|
||||
// Use encodeURI instead of URL.href because we don't want
|
||||
// trailing slash.
|
||||
selectedVal = encodeURI(selectedVal);
|
||||
if (URL.canParse(selectedVal)) {
|
||||
// Use encodeURI instead of URL.href because we don't want
|
||||
// trailing slash.
|
||||
selectedVal = encodeURI(selectedVal);
|
||||
}
|
||||
} catch (ex) {
|
||||
// URL is invalid. Return original selected value.
|
||||
}
|
||||
@@ -3375,10 +3370,9 @@ export class UrlbarInput {
|
||||
return null;
|
||||
}
|
||||
let strippedURI = null;
|
||||
let uri = null;
|
||||
|
||||
// Error check occurs during isClipboardURIValid
|
||||
uri = Services.io.newURI(copyString);
|
||||
let uri = Services.io.newURI(copyString);
|
||||
try {
|
||||
strippedURI = lazy.QueryStringStripper.stripForCopyOrShare(uri);
|
||||
} catch (e) {
|
||||
@@ -3402,14 +3396,8 @@ export class UrlbarInput {
|
||||
if (!copyString) {
|
||||
return false;
|
||||
}
|
||||
// throws if the selected string is not a valid URI
|
||||
try {
|
||||
Services.io.newURI(copyString);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
return URL.canParse(copyString);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -4781,7 +4769,7 @@ function getDroppableData(event) {
|
||||
}
|
||||
// The URL bar automatically handles inputs with newline characters,
|
||||
// so we can get away with treating text/x-moz-url flavours as text/plain.
|
||||
if (links.length && links[0].url) {
|
||||
if (links[0]?.url) {
|
||||
event.preventDefault();
|
||||
let href = links[0].url;
|
||||
if (lazy.UrlbarUtils.stripUnsafeProtocolOnPaste(href) != href) {
|
||||
@@ -4791,13 +4779,13 @@ function getDroppableData(event) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
// If this throws, checkLoadURStrWithPrincipal would also throw,
|
||||
// as that's what it does with things that don't pass the IO
|
||||
// service's newURI constructor without fixup. It's conceivable we
|
||||
// may want to relax this check in the future (so e.g. www.foo.com
|
||||
// gets fixed up), but not right now.
|
||||
let url = new URL(href);
|
||||
// If this fails, checkLoadURIStrWithPrincipal would also fail,
|
||||
// as that's what it does with things that don't pass the IO
|
||||
// service's newURI constructor without fixup. It's conceivable we
|
||||
// may want to relax this check in the future (so e.g. www.foo.com
|
||||
// gets fixed up), but not right now.
|
||||
let url = URL.parse(href);
|
||||
if (url) {
|
||||
// If we succeed, try to pass security checks. If this works, return the
|
||||
// URL object. If the *security checks* fail, return null.
|
||||
try {
|
||||
@@ -4812,9 +4800,8 @@ function getDroppableData(event) {
|
||||
} catch (ex) {
|
||||
return null;
|
||||
}
|
||||
} catch (ex) {
|
||||
// We couldn't make a URL out of this. Continue on, and return text below.
|
||||
}
|
||||
// We couldn't make a URL out of this. Continue on, and return text below.
|
||||
}
|
||||
// Handle as text.
|
||||
return event.dataTransfer.getData("text/plain");
|
||||
|
||||
@@ -985,10 +985,7 @@ class MuxerUnifiedComplete extends UrlbarMuxer {
|
||||
);
|
||||
if (param) {
|
||||
let [key, value] = param.split("=");
|
||||
let searchParams;
|
||||
try {
|
||||
({ searchParams } = new URL(result.payload.url));
|
||||
} catch (error) {}
|
||||
let searchParams = URL.parse(result.payload.url)?.searchParams;
|
||||
if (
|
||||
(value === undefined && searchParams?.has(key)) ||
|
||||
(value !== undefined && searchParams?.getAll(key).includes(value))
|
||||
|
||||
@@ -89,15 +89,16 @@ class ProviderClipboard extends UrlbarProvider {
|
||||
}
|
||||
|
||||
#validUrl(clipboardVal) {
|
||||
try {
|
||||
let givenUrl;
|
||||
givenUrl = new URL(clipboardVal);
|
||||
if (givenUrl.protocol == "http:" || givenUrl.protocol == "https:") {
|
||||
return givenUrl.href;
|
||||
}
|
||||
} catch (ex) {
|
||||
let givenUrl = URL.parse(clipboardVal);
|
||||
if (!givenUrl) {
|
||||
// Not a valid URI.
|
||||
return null;
|
||||
}
|
||||
|
||||
if (givenUrl.protocol == "http:" || givenUrl.protocol == "https:") {
|
||||
return givenUrl.href;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -86,9 +86,7 @@ class ProviderHeuristicFallback extends UrlbarProvider {
|
||||
// to visit or search for it, we provide an alternative searchengine
|
||||
// match if the string looks like an alphanumeric origin or an e-mail.
|
||||
let str = queryContext.searchString;
|
||||
try {
|
||||
new URL(str);
|
||||
} catch (ex) {
|
||||
if (!URL.canParse(str)) {
|
||||
if (
|
||||
lazy.UrlbarPrefs.get("keyword.enabled") &&
|
||||
(lazy.UrlbarTokenizer.looksLikeOrigin(str, {
|
||||
|
||||
@@ -491,11 +491,8 @@ async function isDefaultEngineHomepage(urlStr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// The URL object throws if the string isn't a valid URL.
|
||||
let url;
|
||||
try {
|
||||
url = new URL(urlStr);
|
||||
} catch (e) {
|
||||
let url = URL.parse(urlStr);
|
||||
if (!url) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -311,10 +311,7 @@ class ProviderTopSites extends UrlbarProvider {
|
||||
|
||||
if (!engine && site.url) {
|
||||
// Look up the engine by its domain.
|
||||
let host;
|
||||
try {
|
||||
host = new URL(site.url).hostname;
|
||||
} catch (err) {}
|
||||
let host = URL.parse(site.url)?.hostname;
|
||||
if (host) {
|
||||
engine = (
|
||||
await lazy.UrlbarSearchUtils.enginesForDomainPrefix(host)
|
||||
|
||||
@@ -382,10 +382,8 @@ class _UrlbarSearchTermsPersistence {
|
||||
* first matched query parameter to be persisted.
|
||||
*/
|
||||
isDefaultPage(currentURI, provider) {
|
||||
let searchParams;
|
||||
try {
|
||||
searchParams = new URL(currentURI.spec).searchParams;
|
||||
} catch (ex) {
|
||||
let { searchParams } = URL.fromURI(currentURI);
|
||||
if (!searchParams.size) {
|
||||
return false;
|
||||
}
|
||||
if (provider.includeParams) {
|
||||
|
||||
@@ -798,12 +798,11 @@ export var UrlbarUtils = {
|
||||
* if there is no ref and undefined if url is not well-formed.
|
||||
*/
|
||||
extractRefFromUrl(url) {
|
||||
try {
|
||||
let nsUri = Services.io.newURI(url);
|
||||
return { base: nsUri.specIgnoringRef, ref: nsUri.ref };
|
||||
} catch {
|
||||
return { base: url };
|
||||
let uri = URL.parse(url)?.URI;
|
||||
if (uri) {
|
||||
return { base: uri.specIgnoringRef, ref: uri.ref };
|
||||
}
|
||||
return { base: url };
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -1200,10 +1199,10 @@ export var UrlbarUtils = {
|
||||
if (!lazy.UrlbarTokenizer.REGEXP_PREFIX.test(candidate)) {
|
||||
candidate = "http://" + candidate;
|
||||
}
|
||||
try {
|
||||
url = new URL(url);
|
||||
candidate = new URL(candidate);
|
||||
} catch (e) {
|
||||
|
||||
url = URL.parse(url);
|
||||
candidate = URL.parse(candidate);
|
||||
if (!url || !candidate) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,16 @@ this.shot = (function () {
|
||||
Object.prototype.toString.call(process) === "[object process]";
|
||||
const URL = (isNode && require("url").URL) || window.URL;
|
||||
|
||||
if (typeof URL.parse !== "function") {
|
||||
URL.parse = function (url, base) {
|
||||
try {
|
||||
return new URL(url, base);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Throws an error if the condition isn't true. Any extra arguments after the condition
|
||||
are used as console.error() arguments. */
|
||||
function assert(condition, ...args) {
|
||||
@@ -25,17 +35,14 @@ this.shot = (function () {
|
||||
|
||||
/** True if `url` is a valid URL */
|
||||
function isUrl(url) {
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
|
||||
const parsed = URL.parse(url);
|
||||
if (parsed) {
|
||||
if (parsed.protocol === "view-source:") {
|
||||
return isUrl(url.substr("view-source:".length));
|
||||
return isUrl(url.substr(12));
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function isValidClipImageUrl(url) {
|
||||
@@ -59,7 +66,7 @@ this.shot = (function () {
|
||||
|
||||
function assertOrigin(url) {
|
||||
assertUrl(url);
|
||||
if (url.search(/^https?:/i) !== -1) {
|
||||
if (/^https?:/i.test(url)) {
|
||||
let newUrl = new URL(url);
|
||||
if (newUrl.pathname != "/") {
|
||||
throw new Error("Bad origin, might include path");
|
||||
@@ -71,16 +78,15 @@ this.shot = (function () {
|
||||
if (!url) {
|
||||
return null;
|
||||
}
|
||||
if (url.search(/^https?:/i) === -1) {
|
||||
if (!/^https?:/i.test(url)) {
|
||||
// Non-HTTP URLs don't have an origin
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
let tryUrl = new URL(url);
|
||||
let tryUrl = URL.parse(url);
|
||||
if (tryUrl) {
|
||||
return tryUrl.origin;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Check if the given object has all of the required attributes, and no extra
|
||||
|
||||
@@ -164,11 +164,9 @@ export let LaterRun = {
|
||||
let rv = [];
|
||||
for (let [, pageData] of pageDataStore) {
|
||||
if (pageData.url) {
|
||||
let uri = null;
|
||||
try {
|
||||
let urlString = Services.urlFormatter.formatURL(pageData.url.trim());
|
||||
uri = Services.io.newURI(urlString);
|
||||
} catch (ex) {
|
||||
let urlString = Services.urlFormatter.formatURL(pageData.url.trim());
|
||||
let uri = URL.parse(urlString)?.URI;
|
||||
if (!uri) {
|
||||
console.error(
|
||||
"Invalid LaterRun page URL ",
|
||||
pageData.url,
|
||||
|
||||
@@ -504,9 +504,7 @@ export const URILoadingHelper = {
|
||||
if (where == "current") {
|
||||
targetBrowser = params.targetBrowser || w.gBrowser.selectedBrowser;
|
||||
loadInBackground = false;
|
||||
try {
|
||||
uriObj = Services.io.newURI(url);
|
||||
} catch (e) {}
|
||||
uriObj = URL.parse(url)?.URI;
|
||||
|
||||
// In certain tabs, we restrict what if anything may replace the loaded
|
||||
// page. If a load request bounces off for the currently selected tab,
|
||||
|
||||
@@ -306,14 +306,7 @@ var Builder = class {
|
||||
*/
|
||||
_clearHistory(uriSpecsToRemove) {
|
||||
let URIsToRemove = uriSpecsToRemove
|
||||
.map(spec => {
|
||||
try {
|
||||
// in case we get a bad uri
|
||||
return Services.io.newURI(spec);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.map(spec => URL.parse(spec)?.URI)
|
||||
.filter(uri => !!uri);
|
||||
|
||||
if (URIsToRemove.length) {
|
||||
|
||||
@@ -3462,11 +3462,11 @@ Toolbox.prototype = {
|
||||
* @return {String} pathname
|
||||
*/
|
||||
getExtensionPathName(url) {
|
||||
if (!URL.canParse(url)) {
|
||||
const parsedURL = URL.parse(url);
|
||||
if (!parsedURL) {
|
||||
// Return the url if unable to resolve the pathname.
|
||||
return url;
|
||||
}
|
||||
const parsedURL = new URL(url);
|
||||
// Only moz-extension URL should be shortened into the URL pathname.
|
||||
if (parsedURL.protocol !== "moz-extension:") {
|
||||
return url;
|
||||
|
||||
@@ -288,10 +288,12 @@ function TargetMixin(parentClass) {
|
||||
if (this._title) {
|
||||
return this._title;
|
||||
}
|
||||
return URL.canParse(this._url)
|
||||
? new URL(this._url).pathname
|
||||
: // If document URL can't be parsed, fallback to the raw URL.
|
||||
this._url;
|
||||
const parsedURL = URL.parse(this._url);
|
||||
if (parsedURL) {
|
||||
return parsedURL.pathname;
|
||||
}
|
||||
// If document URL can't be parsed, fallback to the raw URL.
|
||||
return this._url;
|
||||
}
|
||||
|
||||
if (this.isContentProcess) {
|
||||
|
||||
@@ -190,14 +190,10 @@ function getFileName(baseNameWithQuery) {
|
||||
* @return {URL} The URL object
|
||||
*/
|
||||
function getUrl(url) {
|
||||
try {
|
||||
if (url instanceof URL) {
|
||||
return url;
|
||||
}
|
||||
return new URL(url);
|
||||
} catch (err) {
|
||||
return null;
|
||||
if (URL.isInstance(url)) {
|
||||
return url;
|
||||
}
|
||||
return URL.parse(url);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -209,7 +205,7 @@ function getUrl(url) {
|
||||
*/
|
||||
function getUrlProperty(input, property) {
|
||||
const url = getUrl(input);
|
||||
return url?.[property] ? url[property] : "";
|
||||
return url?.[property] ?? "";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -705,7 +701,7 @@ function removeXSSIString(payloadUnclean) {
|
||||
const xssiRegexMatch = payloadUnclean.match(xssiRegex);
|
||||
|
||||
// Remove XSSI string if there was one found
|
||||
if (xssiRegexMatch?.length > 0) {
|
||||
if (xssiRegexMatch?.length) {
|
||||
const xssiLen = xssiRegexMatch[0].length;
|
||||
try {
|
||||
// substring the payload by the length of the XSSI match to remove it
|
||||
@@ -743,7 +739,7 @@ function getRequestHeadersRawText(
|
||||
requestHeaders,
|
||||
urlDetails
|
||||
) {
|
||||
const url = new URL(urlDetails.url);
|
||||
const url = getUrl(urlDetails.url);
|
||||
const path = url ? `${url.pathname}${url.search}` : "<unknown>";
|
||||
const preHeaderText = `${method} ${path} ${httpVersion}`;
|
||||
return writeHeaderText(requestHeaders.headers, preHeaderText).trim();
|
||||
|
||||
@@ -448,15 +448,10 @@ define(function (require, exports, module) {
|
||||
* Whether the token is a URL.
|
||||
*/
|
||||
function isURL(token) {
|
||||
try {
|
||||
if (!validProtocols.test(token)) {
|
||||
return false;
|
||||
}
|
||||
new URL(token);
|
||||
return true;
|
||||
} catch (e) {
|
||||
if (!validProtocols.test(token)) {
|
||||
return false;
|
||||
}
|
||||
return URL.canParse(token);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2027,8 +2027,8 @@ class OutputParser {
|
||||
*/
|
||||
#appendURL(match, url, options) {
|
||||
if (options.urlClass) {
|
||||
// Sanitize the URL. Note that if we modify the URL, we just
|
||||
// leave the termination characters. This isn't strictly
|
||||
// Sanitize the URL. Note that if we modify the URL, we just
|
||||
// leave the termination characters. This isn't strictly
|
||||
// "as-authored", but it makes a bit more sense.
|
||||
match = this.#sanitizeURL(match);
|
||||
const urlParts = URL_REGEX.exec(match);
|
||||
@@ -2043,21 +2043,14 @@ class OutputParser {
|
||||
|
||||
this.#appendTextNode(leader);
|
||||
|
||||
let href = url;
|
||||
if (options.baseURI) {
|
||||
try {
|
||||
href = new URL(url, options.baseURI).href;
|
||||
} catch (e) {
|
||||
// Ignore.
|
||||
}
|
||||
}
|
||||
|
||||
this.#appendNode(
|
||||
"a",
|
||||
{
|
||||
target: "_blank",
|
||||
class: options.urlClass,
|
||||
href,
|
||||
href: options.baseURI
|
||||
? (URL.parse(url, options.baseURI)?.href ?? url)
|
||||
: url,
|
||||
},
|
||||
body
|
||||
);
|
||||
|
||||
@@ -40,18 +40,19 @@ const windowsDrive = /^([a-zA-Z]:)/;
|
||||
|
||||
function resolveSourceURL(sourceURL, targetActor) {
|
||||
if (sourceURL) {
|
||||
try {
|
||||
let baseURL;
|
||||
if (targetActor.window) {
|
||||
baseURL = targetActor.window.location?.href;
|
||||
}
|
||||
// For worker, we don't have easy access to location,
|
||||
// so pull extra information directly from the target actor.
|
||||
if (targetActor.workerUrl) {
|
||||
baseURL = targetActor.workerUrl;
|
||||
}
|
||||
return new URL(sourceURL, baseURL || undefined).href;
|
||||
} catch (err) {}
|
||||
let baseURL;
|
||||
if (targetActor.window) {
|
||||
baseURL = targetActor.window.location?.href;
|
||||
}
|
||||
// For worker, we don't have easy access to location,
|
||||
// so pull extra information directly from the target actor.
|
||||
if (targetActor.workerUrl) {
|
||||
baseURL = targetActor.workerUrl;
|
||||
}
|
||||
const parsedURL = URL.parse(sourceURL, baseURL);
|
||||
if (parsedURL) {
|
||||
return parsedURL.href;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -13,7 +13,7 @@ function getSourcemapBaseURL(url, global) {
|
||||
} else if (global?.location?.href) {
|
||||
// If there is no URL for the source, the map comment is relative to the
|
||||
// page being viewed, so we use the document href.
|
||||
sourceMapBaseURL = global?.location?.href;
|
||||
sourceMapBaseURL = global.location.href;
|
||||
} else {
|
||||
// If there is no valid base, the sourcemap URL will need to be an absolute
|
||||
// URL of some kind.
|
||||
@@ -30,12 +30,11 @@ function getSourcemapBaseURL(url, global) {
|
||||
// If the base URL is a blob, we want to resolve relative to the origin
|
||||
// that created the blob URL, if there is one.
|
||||
if (sourceMapBaseURL.startsWith("blob:")) {
|
||||
try {
|
||||
const parsedBaseURL = new URL(sourceMapBaseURL);
|
||||
const parsedBaseURL = URL.parse(sourceMapBaseURL);
|
||||
if (parsedBaseURL) {
|
||||
return parsedBaseURL.origin === "null" ? null : parsedBaseURL.origin;
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
return sourceMapBaseURL;
|
||||
|
||||
@@ -205,17 +205,16 @@ class SourcesManager extends EventEmitter {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const url = new URL(uri);
|
||||
const url = URL.parse(uri);
|
||||
if (url) {
|
||||
const pathname = url.pathname;
|
||||
return MINIFIED_SOURCE_REGEXP.test(
|
||||
pathname.slice(pathname.lastIndexOf("/") + 1)
|
||||
);
|
||||
} catch (e) {
|
||||
// Not a valid URL so don't try to parse out the filename, just test the
|
||||
// whole thing with the minified source regexp.
|
||||
return MINIFIED_SOURCE_REGEXP.test(uri);
|
||||
}
|
||||
// Not a valid URL so don't try to parse out the filename, just test the
|
||||
// whole thing with the minified source regexp.
|
||||
return MINIFIED_SOURCE_REGEXP.test(uri);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -865,12 +865,8 @@ exports.WatcherActor = class WatcherActor extends Actor {
|
||||
* @param {String} newTargetUrl
|
||||
*/
|
||||
async updateDomainSessionDataForServiceWorkers(newTargetUrl) {
|
||||
let host = "";
|
||||
// Accessing `host` can throw on some URLs with no valid host like about:home.
|
||||
// In such scenario, reset the host to an empty string.
|
||||
try {
|
||||
host = new URL(newTargetUrl).host;
|
||||
} catch (e) {}
|
||||
// If the url could not be parsed the host defaults to an empty string.
|
||||
const host = URL.parse(newTargetUrl)?.host ?? "";
|
||||
|
||||
ParentProcessWatcherRegistry.addOrSetSessionDataEntry(
|
||||
this,
|
||||
|
||||
@@ -289,13 +289,12 @@ class LegacyServiceWorkersWatcher extends LegacyWorkersWatcher {
|
||||
// For local tabs, we match ServiceWorkerRegistrations and the target
|
||||
// if they share the same hostname for their "url" properties.
|
||||
const targetDomain = this.#currentTargetURL.hostname;
|
||||
try {
|
||||
const registrationDomain = new URL(registration.url).hostname;
|
||||
const registrationDomain = URL.parse(registration.url)?.hostname;
|
||||
if (registrationDomain) {
|
||||
return registrationDomain === targetDomain;
|
||||
} catch (e) {
|
||||
// XXX: Some registrations have an empty URL.
|
||||
return false;
|
||||
}
|
||||
// XXX: Some registrations have an empty URL.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -166,23 +166,19 @@ exports.shortSource = function (sheet) {
|
||||
: dataUrl[1];
|
||||
} else {
|
||||
// We try, in turn, the filename, filePath, query string, whole thing
|
||||
let url = {};
|
||||
try {
|
||||
url = new URL(sheet.href);
|
||||
} catch (ex) {
|
||||
// Some UA-provided stylesheets are not valid URLs.
|
||||
}
|
||||
|
||||
if (url.pathname) {
|
||||
const index = url.pathname.lastIndexOf("/");
|
||||
if (index !== -1 && index < url.pathname.length) {
|
||||
name = url.pathname.slice(index + 1);
|
||||
} else {
|
||||
name = url.pathname;
|
||||
const url = URL.parse(sheet.href);
|
||||
if (url) {
|
||||
if (url.pathname) {
|
||||
const index = url.pathname.lastIndexOf("/");
|
||||
if (index !== -1 && index < url.pathname.length) {
|
||||
name = url.pathname.slice(index + 1);
|
||||
} else {
|
||||
name = url.pathname;
|
||||
}
|
||||
} else if (url.query) {
|
||||
name = url.query;
|
||||
}
|
||||
} else if (url.query) {
|
||||
name = url.query;
|
||||
}
|
||||
} // else some UA-provided stylesheets are not valid URLs.
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -9,11 +9,8 @@
|
||||
* The initial path must be an full URI with a protocol (i.e. http://).
|
||||
*/
|
||||
exports.joinURI = (initialPath, ...paths) => {
|
||||
let url;
|
||||
|
||||
try {
|
||||
url = new URL(initialPath);
|
||||
} catch (e) {
|
||||
let url = URL.parse(initialPath);
|
||||
if (!url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -259,7 +259,7 @@ interface WebExtensionPolicy {
|
||||
static WebExtensionPolicy? getByHostname(ByteString hostname);
|
||||
|
||||
/**
|
||||
* Returns the currently-active policy for the extension extension URI, or
|
||||
* Returns the currently-active policy for the extension URI, or
|
||||
* null if the URI is not an extension URI, or no policy is currently active
|
||||
* for it.
|
||||
*/
|
||||
|
||||
@@ -209,16 +209,14 @@ export var ManifestProcessor = {
|
||||
expectedType: "string",
|
||||
trim: false,
|
||||
};
|
||||
let scopeURL;
|
||||
const startURL = new URL(processedManifest.start_url);
|
||||
const defaultScope = new URL(".", startURL).href;
|
||||
const value = extractor.extractValue(spec);
|
||||
if (value === undefined || value === "") {
|
||||
return defaultScope;
|
||||
}
|
||||
try {
|
||||
scopeURL = new URL(value, manifestURL);
|
||||
} catch (e) {
|
||||
let scopeURL = URL.parse(value, manifestURL);
|
||||
if (!scopeURL) {
|
||||
const warn = domBundle.GetStringFromName("ManifestScopeURLInvalid");
|
||||
errors.push({ warn });
|
||||
return defaultScope;
|
||||
@@ -259,10 +257,8 @@ export var ManifestProcessor = {
|
||||
if (value === undefined || value === "") {
|
||||
return defaultStartURL;
|
||||
}
|
||||
let potentialResult;
|
||||
try {
|
||||
potentialResult = new URL(value, manifestURL);
|
||||
} catch (e) {
|
||||
let potentialResult = URL.parse(value, manifestURL);
|
||||
if (!potentialResult) {
|
||||
const warn = domBundle.GetStringFromName("ManifestStartURLInvalid");
|
||||
errors.push({ warn });
|
||||
return defaultStartURL;
|
||||
@@ -328,10 +324,8 @@ export var ManifestProcessor = {
|
||||
return startURL.href;
|
||||
}
|
||||
|
||||
let appId;
|
||||
try {
|
||||
appId = new URL(extractedValue, startURL.origin);
|
||||
} catch {
|
||||
let appId = URL.parse(extractedValue, startURL.origin);
|
||||
if (!appId) {
|
||||
const warn = domBundle.GetStringFromName("ManifestIdIsInvalid");
|
||||
errors.push({ warn });
|
||||
return startURL.href;
|
||||
|
||||
@@ -273,13 +273,11 @@ export class Network extends Domain {
|
||||
}
|
||||
|
||||
// Retrieve host. Check domain first because it has precedence.
|
||||
let hostname = cookie.domain || "";
|
||||
let cookieURL;
|
||||
let hostname = cookie.domain ?? "";
|
||||
let schemeType = Ci.nsICookie.SCHEME_UNSET;
|
||||
if (!hostname.length) {
|
||||
try {
|
||||
cookieURL = new URL(cookie.url);
|
||||
} catch (e) {
|
||||
if (!hostname) {
|
||||
let cookieURL = URL.parse(cookie.url);
|
||||
if (!cookieURL) {
|
||||
return { success: false };
|
||||
}
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ ChromeUtils.defineESModuleGetters(lazy, {
|
||||
TabManager: "chrome://remote/content/shared/TabManager.sys.mjs",
|
||||
TimedPromise: "chrome://remote/content/marionette/sync.sys.mjs",
|
||||
Timeouts: "chrome://remote/content/shared/webdriver/Capabilities.sys.mjs",
|
||||
truncate: "chrome://remote/content/shared/Format.sys.mjs",
|
||||
unregisterCommandsActor:
|
||||
"chrome://remote/content/marionette/actors/MarionetteCommandsParent.sys.mjs",
|
||||
waitForInitialNavigationCompleted:
|
||||
@@ -1047,11 +1048,13 @@ GeckoDriver.prototype.navigateTo = async function (cmd) {
|
||||
);
|
||||
await this._handleUserPrompts();
|
||||
|
||||
let validURL;
|
||||
try {
|
||||
validURL = new URL(cmd.parameters.url);
|
||||
} catch (e) {
|
||||
throw new lazy.error.InvalidArgumentError(`Malformed URL: ${e.message}`);
|
||||
let { url } = cmd.parameters;
|
||||
|
||||
let validURL = URL.parse(url);
|
||||
if (!validURL) {
|
||||
throw new lazy.error.InvalidArgumentError(
|
||||
lazy.truncate`Expected "url" to be a valid URL, got ${url}`
|
||||
);
|
||||
}
|
||||
|
||||
// Switch to the top-level browsing context before navigating
|
||||
|
||||
@@ -12,6 +12,7 @@ ChromeUtils.defineESModuleGetters(lazy, {
|
||||
error: "chrome://remote/content/shared/webdriver/Errors.sys.mjs",
|
||||
pprint: "chrome://remote/content/shared/Format.sys.mjs",
|
||||
RemoteAgent: "chrome://remote/content/components/RemoteAgent.sys.mjs",
|
||||
truncate: "chrome://remote/content/shared/Format.sys.mjs",
|
||||
UserPromptHandler:
|
||||
"chrome://remote/content/shared/webdriver/UserPromptHandler.sys.mjs",
|
||||
});
|
||||
@@ -277,31 +278,21 @@ export class Proxy {
|
||||
throw new lazy.error.InvalidArgumentError(`${host} contains a scheme`);
|
||||
}
|
||||
|
||||
// To parse the host a scheme has to be added temporarily.
|
||||
// If the returned value for the port is an empty string it
|
||||
// could mean no port or the default port for this scheme was
|
||||
// specified. In such a case parse again with a different
|
||||
// scheme to ensure we filter out the default port.
|
||||
let url;
|
||||
try {
|
||||
// To parse the host a scheme has to be added temporarily.
|
||||
// If the returned value for the port is an empty string it
|
||||
// could mean no port or the default port for this scheme was
|
||||
// specified. In such a case parse again with a different
|
||||
// scheme to ensure we filter out the default port.
|
||||
url = new URL("http://" + host);
|
||||
if (url.port == "") {
|
||||
url = new URL("https://" + host);
|
||||
for (let _url of [`http://${host}`, `https://${host}`]) {
|
||||
url = URL.parse(_url);
|
||||
if (!url) {
|
||||
throw new lazy.error.InvalidArgumentError(
|
||||
lazy.truncate`Expected "url" to be a valid URL, got ${_url}`
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
throw new lazy.error.InvalidArgumentError(e.message);
|
||||
}
|
||||
|
||||
let hostname = stripBracketsFromIpv6Hostname(url.hostname);
|
||||
|
||||
// If the port hasn't been set, use the default port of
|
||||
// the selected scheme (except for socks which doesn't have one).
|
||||
let port = parseInt(url.port);
|
||||
if (!Number.isInteger(port)) {
|
||||
if (scheme === "socks") {
|
||||
port = null;
|
||||
} else {
|
||||
port = Services.io.getDefaultPort(scheme);
|
||||
if (url.port != "") {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -317,6 +308,19 @@ export class Proxy {
|
||||
);
|
||||
}
|
||||
|
||||
let hostname = stripBracketsFromIpv6Hostname(url.hostname);
|
||||
|
||||
// If the port hasn't been set, use the default port of
|
||||
// the selected scheme (except for socks which doesn't have one).
|
||||
let port = parseInt(url.port);
|
||||
if (!Number.isInteger(port)) {
|
||||
if (scheme === "socks") {
|
||||
port = null;
|
||||
} else {
|
||||
port = Services.io.getDefaultPort(scheme);
|
||||
}
|
||||
}
|
||||
|
||||
return [hostname, port];
|
||||
}
|
||||
|
||||
|
||||
@@ -829,7 +829,7 @@ class StorageModule extends RootBiDiModule {
|
||||
const hostname = url.hostname;
|
||||
|
||||
const principal = Services.scriptSecurityManager.createContentPrincipal(
|
||||
Services.io.newURI(url),
|
||||
url.URI,
|
||||
{}
|
||||
);
|
||||
const isSecureProtocol = principal.isOriginPotentiallyTrustworthy;
|
||||
|
||||
@@ -32,20 +32,17 @@ import { RESTRequest } from "resource://services-common/rest.sys.mjs";
|
||||
* @param {String} options.serverURL
|
||||
* The URL of the profile server to query.
|
||||
* Example: https://profile.accounts.firefox.com/v1
|
||||
* @param {String} options.token
|
||||
* The bearer token to access the profile server
|
||||
* @constructor
|
||||
*/
|
||||
export var FxAccountsProfileClient = function (options) {
|
||||
if (!options || !options.serverURL) {
|
||||
if (!options?.serverURL) {
|
||||
throw new Error("Missing 'serverURL' configuration option");
|
||||
}
|
||||
|
||||
this.fxai = options.fxai || fxAccounts._internal;
|
||||
|
||||
try {
|
||||
this.serverURL = new URL(options.serverURL);
|
||||
} catch (e) {
|
||||
this.serverURL = URL.parse(options.serverURL);
|
||||
if (!this.serverURL) {
|
||||
throw new Error("Invalid 'serverURL'");
|
||||
}
|
||||
log.debug("FxAccountsProfileClient: Initialized");
|
||||
@@ -53,7 +50,7 @@ export var FxAccountsProfileClient = function (options) {
|
||||
|
||||
FxAccountsProfileClient.prototype = {
|
||||
/**
|
||||
* {nsIURI}
|
||||
* {URL}
|
||||
* The server to fetch profile information from.
|
||||
*/
|
||||
serverURL: null,
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
// sync script tags that could appear between desired meta tags
|
||||
const TIMEOUT_DELAY = 1000;
|
||||
|
||||
const ACCEPTED_PROTOCOLS = ["http:", "https:"];
|
||||
const ACCEPTED_PROTOCOLS = new Set(["http:", "https:"]);
|
||||
|
||||
// Possible description tags, listed in order from least favourable to most favourable
|
||||
const DESCRIPTION_RULES = [
|
||||
@@ -51,7 +51,7 @@ function shouldExtractMetadata(aRules, aTag, aEntry) {
|
||||
* @returns {Boolean} true if the preview URL is safe and can be stored, false otherwise
|
||||
*/
|
||||
function checkLoadURIStr(aURL) {
|
||||
if (!ACCEPTED_PROTOCOLS.includes(aURL.protocol)) {
|
||||
if (!ACCEPTED_PROTOCOLS.has(aURL.protocol)) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
@@ -140,13 +140,11 @@ export class ContentMetaChild extends JSWindowActorChild {
|
||||
entry.description.currMaxScore = DESCRIPTION_RULES.indexOf(tag);
|
||||
} else if (shouldExtractMetadata(PREVIEW_IMAGE_RULES, tag, entry.image)) {
|
||||
// Extract the preview image
|
||||
let value;
|
||||
try {
|
||||
value = new URL(content, url);
|
||||
} catch (e) {
|
||||
let value = URL.parse(content, url);
|
||||
if (!value) {
|
||||
return;
|
||||
}
|
||||
if (value && checkLoadURIStr(value)) {
|
||||
if (checkLoadURIStr(value)) {
|
||||
entry.image.value = value.href;
|
||||
entry.image.currMaxScore = PREVIEW_IMAGE_RULES.indexOf(tag);
|
||||
}
|
||||
|
||||
@@ -427,10 +427,8 @@ EnterprisePoliciesManager.prototype = {
|
||||
},
|
||||
|
||||
isExemptExecutableExtension(url, extension) {
|
||||
let urlObject;
|
||||
try {
|
||||
urlObject = new URL(url);
|
||||
} catch (e) {
|
||||
let urlObject = URL.parse(url);
|
||||
if (!urlObject) {
|
||||
return false;
|
||||
}
|
||||
let { hostname } = urlObject;
|
||||
|
||||
@@ -1101,21 +1101,13 @@ class InjectionContext extends Context {
|
||||
*
|
||||
* Each method either returns a normalized version of the original
|
||||
* value, or throws an error if the value is not valid for the given
|
||||
* format.
|
||||
* format. The original input is always a string.
|
||||
*/
|
||||
const FORMATS = {
|
||||
hostname(string) {
|
||||
// TODO bug 1797376: Despite the name, this format is NOT a "hostname",
|
||||
// but hostname + port and may fail with IPv6. Use canonicalDomain instead.
|
||||
let valid = true;
|
||||
|
||||
try {
|
||||
valid = new URL(`http://${string}`).host === string;
|
||||
} catch (e) {
|
||||
valid = false;
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
if (URL.parse(`http://${string}`)?.host !== string) {
|
||||
throw new Error(`Invalid hostname ${string}`);
|
||||
}
|
||||
|
||||
@@ -1123,15 +1115,7 @@ const FORMATS = {
|
||||
},
|
||||
|
||||
canonicalDomain(string) {
|
||||
let valid;
|
||||
|
||||
try {
|
||||
valid = new URL(`http://${string}`).hostname === string;
|
||||
} catch (e) {
|
||||
valid = false;
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
if (URL.parse(`http://${string}`)?.hostname !== string) {
|
||||
// Require the input to be a canonical domain.
|
||||
// Rejects obvious non-domains such as URLs,
|
||||
// but also catches non-IDN (punycode) domains.
|
||||
@@ -1151,10 +1135,8 @@ const FORMATS = {
|
||||
},
|
||||
|
||||
origin(string, context) {
|
||||
let url;
|
||||
try {
|
||||
url = new URL(string);
|
||||
} catch (e) {
|
||||
let url = URL.parse(string);
|
||||
if (!url) {
|
||||
throw new Error(`Invalid origin: ${string}`);
|
||||
}
|
||||
if (!/^https?:/.test(url.protocol)) {
|
||||
@@ -1178,9 +1160,7 @@ const FORMATS = {
|
||||
if (!context.url) {
|
||||
// If there's no context URL, return relative URLs unresolved, and
|
||||
// skip security checks for them.
|
||||
try {
|
||||
new URL(string);
|
||||
} catch (e) {
|
||||
if (!URL.canParse(string)) {
|
||||
return string;
|
||||
}
|
||||
}
|
||||
@@ -1199,12 +1179,8 @@ const FORMATS = {
|
||||
},
|
||||
|
||||
unresolvedRelativeUrl(string) {
|
||||
if (!string.startsWith("//")) {
|
||||
try {
|
||||
new URL(string);
|
||||
} catch (e) {
|
||||
return string;
|
||||
}
|
||||
if (!string.startsWith("//") && !URL.canParse(string)) {
|
||||
return string;
|
||||
}
|
||||
|
||||
throw new SyntaxError(
|
||||
|
||||
@@ -45,31 +45,29 @@ this.identity = class extends ExtensionAPI {
|
||||
},
|
||||
launchWebAuthFlow: function (details) {
|
||||
// Validate the url and retreive redirect_uri if it was provided.
|
||||
let url, redirectURI;
|
||||
let baseRedirectURL = this.getRedirectURL();
|
||||
|
||||
// Allow using loopback address for native OAuth flows as some
|
||||
// providers do not accept the URL provided by getRedirectURL.
|
||||
// providers do not accept the URL provided by getRedirectURL.
|
||||
// For more context, see bug 1635344.
|
||||
let loopbackURL = `http://127.0.0.1/mozoauth2/${computeHash(
|
||||
extension.id
|
||||
)}`;
|
||||
try {
|
||||
url = new URL(details.url);
|
||||
} catch (e) {
|
||||
let url = URL.parse(details.url);
|
||||
if (!url) {
|
||||
return Promise.reject({ message: "details.url is invalid" });
|
||||
}
|
||||
try {
|
||||
redirectURI = new URL(
|
||||
url.searchParams.get("redirect_uri") || baseRedirectURL
|
||||
);
|
||||
let redirectURI = URL.parse(
|
||||
url.searchParams.get("redirect_uri") || baseRedirectURL
|
||||
);
|
||||
if (redirectURI) {
|
||||
if (
|
||||
!redirectURI.href.startsWith(baseRedirectURL) &&
|
||||
!redirectURI.href.startsWith(loopbackURL)
|
||||
) {
|
||||
return Promise.reject({ message: "redirect_uri not allowed" });
|
||||
}
|
||||
} catch (e) {
|
||||
} else {
|
||||
return Promise.reject({ message: "redirect_uri is invalid" });
|
||||
}
|
||||
|
||||
|
||||
@@ -20,9 +20,12 @@ const isIPv4 = host => {
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const isIPv6 = host => host.includes(":");
|
||||
|
||||
const addBracketIfIPv6 = host =>
|
||||
isIPv6(host) && !host.startsWith("[") ? `[${host}]` : host;
|
||||
|
||||
const dropBracketIfIPv6 = host =>
|
||||
isIPv6(host) && host.startsWith("[") && host.endsWith("]")
|
||||
? host.slice(1, -1)
|
||||
@@ -47,13 +50,13 @@ function fromExtPartitionKey(extPartitionKey, cookieUrl) {
|
||||
try {
|
||||
// This is subtle! We define the ancestor bit in our code in a different
|
||||
// way than the extension API, but they are isomorphic.
|
||||
// If we have cookieUrl (which is guaranteed to be the case in get, set,
|
||||
// and remove) this will return the topLevelSite parsed partition key,
|
||||
// and include the foreign ancestor bit iff the details.url is
|
||||
// same-site and a truthy value was passed in the hasCrossSiteAncestor
|
||||
// property. If we don't have cookieUrl, we handle the difference in
|
||||
// ancestor bit definition by returning a OA pattern that matches both
|
||||
// values and filtering them later on in matches.
|
||||
// If we have cookieUrl (which is guaranteed to be the case in get, set,
|
||||
// and remove) this will return the topLevelSite parsed partition key,
|
||||
// and include the foreign ancestor bit iff the details.url is
|
||||
// same-site and a truthy value was passed in the hasCrossSiteAncestor
|
||||
// property. If we don't have cookieUrl, we handle the difference in
|
||||
// ancestor bit definition by returning a OA pattern that matches both
|
||||
// values and filtering them later on in matches.
|
||||
if (cookieUrl == null) {
|
||||
let topLevelSiteURI = Services.io.newURI(topLevelSite);
|
||||
let topLevelSiteFilter = Services.eTLD.getSite(topLevelSiteURI);
|
||||
@@ -399,11 +402,11 @@ const query = function* (detailsIn, props, context, allowPattern) {
|
||||
let host;
|
||||
let url;
|
||||
if ("url" in details) {
|
||||
try {
|
||||
url = new URL(details.url);
|
||||
url = URL.parse(details.url);
|
||||
if (url) {
|
||||
host = dropBracketIfIPv6(url.hostname);
|
||||
} catch (ex) {
|
||||
// This often happens for about: URLs
|
||||
} else {
|
||||
// The url could not be parsed successfully
|
||||
return;
|
||||
}
|
||||
} else if ("domain" in details) {
|
||||
|
||||
@@ -161,7 +161,7 @@ this.proxy = class extends ExtensionAPIPersistent {
|
||||
extensionApi: self,
|
||||
}).api(),
|
||||
|
||||
// Leaving as non-persistent. By itself it's not useful since proxy-error
|
||||
// Leaving as non-persistent. By itself it's not useful since proxy-error
|
||||
// is emitted from the proxy filter.
|
||||
onError: new EventManager({
|
||||
context,
|
||||
@@ -283,15 +283,21 @@ this.proxy = class extends ExtensionAPIPersistent {
|
||||
for (let prop of ["http", "ssl", "socks"]) {
|
||||
let host = value[prop];
|
||||
if (host) {
|
||||
try {
|
||||
// Fixup in case a full url is passed.
|
||||
if (host.includes("://")) {
|
||||
value[prop] = new URL(host).host;
|
||||
let valid = true;
|
||||
// Fixup in case a full url is passed.
|
||||
if (host.includes("://")) {
|
||||
host = URL.parse(host)?.host;
|
||||
if (host) {
|
||||
value[prop] = host;
|
||||
} else {
|
||||
// Validate the host value.
|
||||
new URL(`http://${host}`);
|
||||
valid = false;
|
||||
}
|
||||
} catch (e) {
|
||||
} else {
|
||||
// Validate the host value.
|
||||
valid = URL.canParse(`http://${host}`);
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
throw new ExtensionError(
|
||||
`${value[prop]} is not a valid value for ${prop}.`
|
||||
);
|
||||
@@ -300,9 +306,7 @@ this.proxy = class extends ExtensionAPIPersistent {
|
||||
}
|
||||
|
||||
if (value.proxyType === "autoConfig" || value.autoConfigUrl) {
|
||||
try {
|
||||
new URL(value.autoConfigUrl);
|
||||
} catch (e) {
|
||||
if (!URL.canParse(value.autoConfigUrl)) {
|
||||
throw new ExtensionError(
|
||||
`${value.autoConfigUrl} is not a valid value for autoConfigUrl.`
|
||||
);
|
||||
|
||||
@@ -358,10 +358,8 @@ this.runtime = class extends ExtensionAPIPersistent {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
let uri;
|
||||
try {
|
||||
uri = new URL(url);
|
||||
} catch (e) {
|
||||
let uri = URL.parse(url);
|
||||
if (!uri) {
|
||||
return Promise.reject({
|
||||
message: `Invalid URL: ${JSON.stringify(url)}`,
|
||||
});
|
||||
|
||||
@@ -559,16 +559,12 @@ export class URLChecker {
|
||||
* @returns {string} - Normalized URL.
|
||||
*/
|
||||
normalizeLocalhost(url) {
|
||||
try {
|
||||
const parsedURL = new URL(url);
|
||||
if (parsedURL.hostname === "localhost") {
|
||||
// Normalize to only scheme and localhost without port or user info
|
||||
return `${parsedURL.protocol}//localhost/`;
|
||||
}
|
||||
return url;
|
||||
} catch (error) {
|
||||
return url;
|
||||
const parsedURL = URL.parse(url);
|
||||
if (parsedURL?.hostname === "localhost") {
|
||||
// Normalize to only scheme and localhost without port or user info
|
||||
return `${parsedURL.protocol}//localhost/`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -24,7 +24,7 @@ function sendPageEvent(action, data) {
|
||||
}
|
||||
|
||||
function readOptinParams() {
|
||||
let searchParams = new URLSearchParams(new URL(location).search);
|
||||
let { searchParams } = new URL(location);
|
||||
return {
|
||||
slug: searchParams.get("optin_slug"),
|
||||
branch: searchParams.get("optin_branch"),
|
||||
|
||||
@@ -93,11 +93,7 @@ export var Heartbeat = class {
|
||||
}
|
||||
|
||||
if (options.learnMoreUrl) {
|
||||
try {
|
||||
options.learnMoreUrl = new URL(options.learnMoreUrl);
|
||||
} catch (e) {
|
||||
options.learnMoreUrl = null;
|
||||
}
|
||||
options.learnMoreUrl = URL.parse(options.learnMoreUrl);
|
||||
}
|
||||
|
||||
this.chromeWindow = chromeWindow;
|
||||
|
||||
@@ -532,13 +532,13 @@ export class LoginManagerParent extends JSWindowActorParent {
|
||||
async #getRecipesForHost(origin) {
|
||||
let recipes;
|
||||
if (origin) {
|
||||
try {
|
||||
const formHost = new URL(origin).host;
|
||||
let recipeManager = await LoginManagerParent.recipeParentPromise;
|
||||
recipes = recipeManager.getRecipesForHost(formHost);
|
||||
} catch (ex) {
|
||||
const formHost = URL.parse(origin)?.host;
|
||||
if (!formHost) {
|
||||
// Some schemes e.g. chrome aren't supported by URL
|
||||
return [];
|
||||
}
|
||||
let recipeManager = await LoginManagerParent.recipeParentPromise;
|
||||
recipes = recipeManager.getRecipesForHost(formHost);
|
||||
}
|
||||
|
||||
return recipes ?? [];
|
||||
|
||||
@@ -1867,8 +1867,10 @@ export var Bookmarks = Object.freeze({
|
||||
}
|
||||
|
||||
if (query.url) {
|
||||
if (typeof query.url === "string" || URL.isInstance(query.url)) {
|
||||
if (typeof query.url === "string") {
|
||||
query.url = new URL(query.url).href;
|
||||
} else if (URL.isInstance(query.url)) {
|
||||
query.url = query.url.href;
|
||||
} else if (query.url instanceof Ci.nsIURI) {
|
||||
query.url = query.url.spec;
|
||||
} else {
|
||||
|
||||
@@ -1141,10 +1141,8 @@ var fetchAnnotatedPages = async function (db, annotations) {
|
||||
);
|
||||
|
||||
for (let row of rows) {
|
||||
let uri;
|
||||
try {
|
||||
uri = new URL(row.getResultByName("url"));
|
||||
} catch (ex) {
|
||||
let uri = URL.parse(row.getResultByName("url"));
|
||||
if (!uri) {
|
||||
console.error("Invalid URL read from database in fetchAnnotatedPages");
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -122,7 +122,9 @@ function serializeNode(aNode) {
|
||||
|
||||
if (PlacesUtils.nodeIsURI(aNode)) {
|
||||
// Check for url validity.
|
||||
new URL(aNode.uri);
|
||||
if (!URL.canParse(aNode.uri)) {
|
||||
throw new Error(aNode.uri + " is not a valid URL");
|
||||
}
|
||||
data.type = PlacesUtils.TYPE_X_MOZ_PLACE;
|
||||
data.uri = aNode.uri;
|
||||
if (aNode.tags) {
|
||||
@@ -501,7 +503,7 @@ export var PlacesUtils = {
|
||||
},
|
||||
|
||||
/**
|
||||
* Converts a string or n URL object to an nsIURI.
|
||||
* Converts a string or an URL object to an nsIURI.
|
||||
*
|
||||
* @param url (URL) or (String)
|
||||
* the URL to convert.
|
||||
@@ -1142,33 +1144,31 @@ export var PlacesUtils = {
|
||||
if (!uriString) {
|
||||
continue;
|
||||
}
|
||||
let titleString = "";
|
||||
if (parts.length > i + 1) {
|
||||
titleString = parts[i + 1];
|
||||
} else {
|
||||
// for drag and drop of files, try to use the leafName as title
|
||||
try {
|
||||
titleString = Services.io
|
||||
.newURI(uriString)
|
||||
.QueryInterface(Ci.nsIURL).fileName;
|
||||
} catch (ex) {}
|
||||
}
|
||||
|
||||
let uri = null;
|
||||
try {
|
||||
let uri = Services.io.newURI(uriString);
|
||||
if (uri.scheme != "place") {
|
||||
validNodes.push({
|
||||
uri: uriString,
|
||||
title: titleString ? titleString : uriString,
|
||||
type: this.TYPE_X_MOZ_URL,
|
||||
});
|
||||
}
|
||||
uri = Services.io.newURI(uriString);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
invalidNodes.push({
|
||||
uri: uriString,
|
||||
});
|
||||
}
|
||||
if (!uri || uri.scheme == "place") {
|
||||
continue;
|
||||
}
|
||||
let titleString = "";
|
||||
if (parts.length > i + 1) {
|
||||
titleString = parts[i + 1];
|
||||
} else if (uri instanceof Ci.nsIURL) {
|
||||
// for drag and drop of files, use the fileName as title
|
||||
titleString = uri.fileName;
|
||||
}
|
||||
|
||||
validNodes.push({
|
||||
uri: uriString,
|
||||
title: titleString || uriString,
|
||||
type: this.TYPE_X_MOZ_URL,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -1661,10 +1661,9 @@ export var PlacesUtils = {
|
||||
switch (type) {
|
||||
case PlacesUtils.bookmarks.TYPE_BOOKMARK: {
|
||||
item.type = PlacesUtils.TYPE_X_MOZ_PLACE;
|
||||
// If this throws due to an invalid url, the item will be skipped.
|
||||
try {
|
||||
item.uri = new URL(aRow.getResultByName("url")).href;
|
||||
} catch (ex) {
|
||||
// If this fails due to an invalid url, the item will be skipped.
|
||||
item.uri = URL.parse(aRow.getResultByName("url"))?.href;
|
||||
if (!item.uri) {
|
||||
let error = new Error("Invalid bookmark URL");
|
||||
error.becauseInvalidURL = true;
|
||||
throw error;
|
||||
@@ -2941,14 +2940,15 @@ function promiseKeywordsCache() {
|
||||
let brokenKeywords = [];
|
||||
for (let row of rows) {
|
||||
let keyword = row.getResultByName("keyword");
|
||||
try {
|
||||
let url = URL.parse(row.getResultByName("url"));
|
||||
if (url) {
|
||||
let entry = {
|
||||
keyword,
|
||||
url: new URL(row.getResultByName("url")),
|
||||
url,
|
||||
postData: row.getResultByName("post_data") || null,
|
||||
};
|
||||
cache.set(keyword, entry);
|
||||
} catch (ex) {
|
||||
} else {
|
||||
// The url is invalid, don't load the keyword and remove it, or it
|
||||
// would break the whole keywords API.
|
||||
brokenKeywords.push(keyword);
|
||||
|
||||
@@ -363,15 +363,15 @@ export class ReportBrokenSiteChild extends JSWindowActorChild {
|
||||
});
|
||||
|
||||
// If the user enters a URL unrelated to the current tab,
|
||||
// don't bother sending a screnshot or logs/etc
|
||||
// don't bother sending a screenshot or logs/etc
|
||||
let sendRecordedPageSpecificDetails = false;
|
||||
try {
|
||||
const givenUri = new URL(reportUrl);
|
||||
const recordedUri = new URL(url);
|
||||
const givenUri = URL.parse(reportUrl);
|
||||
const recordedUri = URL.parse(url);
|
||||
if (givenUri && recordedUri) {
|
||||
sendRecordedPageSpecificDetails =
|
||||
givenUri.origin == recordedUri.origin &&
|
||||
givenUri.pathname == recordedUri.pathname;
|
||||
} catch (_) {}
|
||||
}
|
||||
|
||||
if (sendRecordedPageSpecificDetails) {
|
||||
payload.screenshot = screenshot;
|
||||
|
||||
@@ -1264,7 +1264,7 @@ export class SearchEngine {
|
||||
// path of the URL from search config is not percent encoded. Thus, we
|
||||
// convert both strings into URL objects to ensure consistent comparisons.
|
||||
let url1 = new URL(url.template);
|
||||
let url2 = new URL(uri.spec);
|
||||
let url2 = URL.fromURI(uri);
|
||||
if (url1.origin != url2.origin || url1.pathname != url2.pathname) {
|
||||
return "";
|
||||
}
|
||||
|
||||
@@ -488,9 +488,8 @@ export class JsonSchemaValidator {
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
parsedParam = new URL(param);
|
||||
|
||||
parsedParam = URL.parse(param);
|
||||
if (parsedParam) {
|
||||
if (parsedParam.protocol == "file:") {
|
||||
// Treat the entire file URL as an origin.
|
||||
// Note this is stricter than the current Firefox policy,
|
||||
@@ -509,7 +508,7 @@ export class JsonSchemaValidator {
|
||||
valid = true;
|
||||
}
|
||||
}
|
||||
} catch (ex) {
|
||||
} else {
|
||||
lazy.log.error(`Ignoring parameter "${param}" - not a valid origin.`);
|
||||
valid = false;
|
||||
}
|
||||
@@ -526,10 +525,10 @@ export class JsonSchemaValidator {
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
parsedParam = new URL(param);
|
||||
parsedParam = URL.parse(param);
|
||||
if (parsedParam) {
|
||||
valid = true;
|
||||
} catch (ex) {
|
||||
} else {
|
||||
if (!param.startsWith("http")) {
|
||||
lazy.log.error(
|
||||
`Ignoring parameter "${param}" - scheme (http or https) must be specified.`
|
||||
|
||||
@@ -292,7 +292,7 @@ class ParseError extends Error {
|
||||
function parseURL() {
|
||||
let options = new URL(document.location.href).searchParams;
|
||||
|
||||
if (!options) {
|
||||
if (!options.size) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1276,12 +1276,12 @@ var ActivityStreamProvider = {
|
||||
if (options.hideWithSearchParam) {
|
||||
let [key, value] = options.hideWithSearchParam.split("=");
|
||||
links = links.filter(link => {
|
||||
try {
|
||||
let { searchParams } = new URL(link.url);
|
||||
let searchParams = URL.parse(link.url)?.searchParams;
|
||||
if (searchParams) {
|
||||
return value === undefined
|
||||
? !searchParams.has(key)
|
||||
: !searchParams.getAll(key).includes(value);
|
||||
} catch (error) {}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -4981,11 +4981,8 @@ AMTelemetry = {
|
||||
* are defined in `AMO_ATTRIBUTION_DATA_KEYS`. Values are strings.
|
||||
*/
|
||||
parseAttributionDataForAMO(sourceURL) {
|
||||
let searchParams;
|
||||
|
||||
try {
|
||||
searchParams = new URL(sourceURL).searchParams;
|
||||
} catch {
|
||||
let searchParams = URL.parse(sourceURL)?.searchParams;
|
||||
if (!searchParams) {
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
@@ -430,7 +430,7 @@ export class AddonInternal {
|
||||
}
|
||||
|
||||
for (const [name, uri] of Object.entries({ installFrom, source })) {
|
||||
if (!installOrigins.includes(new URL(uri.spec).origin)) {
|
||||
if (!installOrigins.includes(URL.fromURI(uri).origin)) {
|
||||
logger.warn(
|
||||
`Addon ${this.id} Installation not allowed, ${name} "${uri.spec}" is not included in the Addon install_origins`
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user