Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save biancadanforth/8172bc4a5a581bdcf94d72a5b6819e56 to your computer and use it in GitHub Desktop.
Save biancadanforth/8172bc4a5a581bdcf94d72a5b6819e56 to your computer and use it in GitHub Desktop.
Breaking up bug_1542035 patch into a READ and READ_AND_WRITE patch https://github.com/biancadanforth/bug_1542035
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {Cc, Ci, Cu, CC} = require("chrome");
const protocol = require("devtools/shared/protocol");
const {LongStringActor} = require("devtools/server/actors/string");
const {DebuggerServer} = require("devtools/server/main");
const Services = require("Services");
const defer = require("devtools/shared/defer");
const {isWindowIncluded} = require("devtools/shared/layout/utils");
const specs = require("devtools/shared/specs/storage");
const {parseItemValue} = require("devtools/shared/storage/utils");
const {ExtensionProcessScript} = require("resource://gre/modules/ExtensionProcessScript.jsm");
const {ExtensionStorageIDB} = require("resource://gre/modules/ExtensionStorageIDB.jsm");
const {WebExtensionPolicy} = Cu.getGlobalForObject(require("resource://gre/modules/XPCOMUtils.jsm"));
const lodash = require("devtools/client/shared/vendor/lodash");
const CHROME_ENABLED_PREF = "devtools.chrome.enabled";
const REMOTE_ENABLED_PREF = "devtools.debugger.remote-enabled";
const EXTENSION_STORAGE_ENABLED_PREF = "devtools.storage.extensionStorage.enabled";
const DEFAULT_VALUE = "value";
const DEVTOOLS_EXT_STORAGELOCAL_CHANGED = "Extension:DevTools:OnStorageLocalChanged";
loader.lazyRequireGetter(this, "naturalSortCaseInsensitive",
"devtools/client/shared/natural-sort", true);
// "Lax", "Strict" and "Unset" are special values of the sameSite property
// that should not be translated.
const COOKIE_SAMESITE = {
LAX: "Lax",
STRICT: "Strict",
UNSET: "Unset",
};
const SAFE_HOSTS_PREFIXES_REGEX =
/^(about\+|https?\+|file\+|moz-extension\+)/;
// GUID to be used as a separator in compound keys. This must match the same
// constant in devtools/client/storage/ui.js,
// devtools/client/storage/test/head.js and
// devtools/server/tests/browser/head.js
const SEPARATOR_GUID = "{9d414cc5-8319-0a04-0586-c0a6ae01670a}";
loader.lazyImporter(this, "OS", "resource://gre/modules/osfile.jsm");
loader.lazyImporter(this, "Sqlite", "resource://gre/modules/Sqlite.jsm");
// We give this a funny name to avoid confusion with the global
// indexedDB.
loader.lazyGetter(this, "indexedDBForStorage", () => {
// On xpcshell, we can't instantiate indexedDB without crashing
try {
const sandbox
= Cu.Sandbox(CC("@mozilla.org/systemprincipal;1", "nsIPrincipal")(),
{wantGlobalProperties: ["indexedDB"]});
return sandbox.indexedDB;
} catch (e) {
return {};
}
});
// Maximum number of cookies/local storage key-value-pairs that can be sent
// over the wire to the client in one request.
const MAX_STORE_OBJECT_COUNT = 50;
// Delay for the batch job that sends the accumulated update packets to the
// client (ms).
const BATCH_DELAY = 200;
// MAX_COOKIE_EXPIRY should be 2^63-1, but JavaScript can't handle that
// precision.
const MAX_COOKIE_EXPIRY = Math.pow(2, 62);
// A RegExp for characters that cannot appear in a file/directory name. This is
// used to sanitize the host name for indexed db to lookup whether the file is
// present in <profileDir>/storage/default/ location
var illegalFileNameCharacters = [
"[",
// Control characters \001 to \036
"\\x00-\\x24",
// Special characters
"/:*?\\\"<>|\\\\",
"]",
].join("");
var ILLEGAL_CHAR_REGEX = new RegExp(illegalFileNameCharacters, "g");
// Holder for all the registered storage actors.
var storageTypePool = new Map();
/**
* An async method equivalent to setTimeout but using Promises
*
* @param {number} time
* The wait time in milliseconds.
*/
function sleep(time) {
const deferred = defer();
setTimeout(() => {
deferred.resolve(null);
}, time);
return deferred.promise;
}
// Helper methods to create a storage actor.
var StorageActors = {};
/**
* Creates a default object with the common methods required by all storage
* actors.
*
* This default object is missing a couple of required methods that should be
* implemented seperately for each actor. They are namely:
* - observe : Method which gets triggered on the notificaiton of the watched
* topic.
* - getNamesForHost : Given a host, get list of all known store names.
* - getValuesForHost : Given a host (and optionally a name) get all known
* store objects.
* - toStoreObject : Given a store object, convert it to the required format
* so that it can be transferred over wire.
* - populateStoresForHost : Given a host, populate the map of all store
* objects for it
* - getFields: Given a subType(optional), get an array of objects containing
* column field info. The info includes,
* "name" is name of colume key.
* "editable" is 1 means editable field; 0 means uneditable.
*
* @param {string} typeName
* The typeName of the actor.
* @param {array} observationTopics
* An array of topics which this actor listens to via Notification Observers.
*/
StorageActors.defaults = function(typeName, observationTopics) {
return {
typeName: typeName,
get conn() {
return this.storageActor.conn;
},
/**
* Returns a list of currently known hosts for the target window. This list
* contains unique hosts from the window + all inner windows. If
* this._internalHosts is defined then these will also be added to the list.
*/
get hosts() {
const hosts = new Set();
for (const {location} of this.storageActor.windows) {
const host = this.getHostName(location);
if (host) {
hosts.add(host);
}
}
if (this._internalHosts) {
for (const host of this._internalHosts) {
hosts.add(host);
}
}
return hosts;
},
/**
* Returns all the windows present on the page. Includes main window + inner
* iframe windows.
*/
get windows() {
return this.storageActor.windows;
},
/**
* Converts the window.location object into a URL (e.g. http://domain.com).
*/
getHostName(location) {
if (!location) {
// Debugging a legacy Firefox extension... no hostname available and no
// storage possible.
return null;
}
switch (location.protocol) {
case "about:":
return `${location.protocol}${location.pathname}`;
case "chrome:":
// chrome: URLs do not support storage of any type.
return null;
case "data:":
// data: URLs do not support storage of any type.
return null;
case "file:":
return `${location.protocol}//${location.pathname}`;
case "javascript:":
return location.href;
case "moz-extension:":
return location.origin;
case "resource:":
return `${location.origin}${location.pathname}`;
default:
// http: or unknown protocol.
return `${location.protocol}//${location.host}`;
}
},
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.populateStoresForHosts();
if (observationTopics) {
observationTopics.forEach((observationTopic) => {
Services.obs.addObserver(this, observationTopic);
});
}
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
if (observationTopics) {
observationTopics.forEach((observationTopic) => {
Services.obs.removeObserver(this, observationTopic);
});
}
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
this.hostVsStores.clear();
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
getNamesForHost(host) {
return [...this.hostVsStores.get(host).keys()];
},
getValuesForHost(host, name) {
if (name) {
return [this.hostVsStores.get(host).get(name)];
}
return [...this.hostVsStores.get(host).values()];
},
getObjectsSize(host, names) {
return names.length;
},
/**
* When a new window is added to the page. This generally means that a new
* iframe is created, or the current window is completely reloaded.
*
* @param {window} window
* The window which was added.
*/
async onWindowReady(window) {
const host = this.getHostName(window.location);
if (host && !this.hostVsStores.has(host)) {
await this.populateStoresForHost(host, window);
const data = {};
data[host] = this.getNamesForHost(host);
this.storageActor.update("added", typeName, data);
}
},
/**
* When a window is removed from the page. This generally means that an
* iframe was removed, or the current window reload is triggered.
*
* @param {window} window
* The window which was removed.
*/
onWindowDestroyed(window) {
if (!window.location) {
// Nothing can be done if location object is null
return;
}
const host = this.getHostName(window.location);
if (host && !this.hosts.has(host)) {
this.hostVsStores.delete(host);
const data = {};
data[host] = [];
this.storageActor.update("deleted", typeName, data);
}
},
form() {
const hosts = {};
for (const host of this.hosts) {
hosts[host] = [];
}
return {
actor: this.actorID,
hosts: hosts,
};
},
/**
* Populates a map of known hosts vs a map of stores vs value.
*/
populateStoresForHosts() {
this.hostVsStores = new Map();
for (const host of this.hosts) {
this.populateStoresForHost(host);
}
},
/**
* Returns a list of requested store objects. Maximum values returned are
* MAX_STORE_OBJECT_COUNT. This method returns paginated values whose
* starting index and total size can be controlled via the options object
*
* @param {string} host
* The host name for which the store values are required.
* @param {array:string} names
* Array containing the names of required store objects. Empty if all
* items are required.
* @param {object} options
* Additional options for the request containing following
* properties:
* - offset {number} : The begin index of the returned array amongst
* the total values
* - size {number} : The number of values required.
* - sortOn {string} : The values should be sorted on this property.
* - index {string} : In case of indexed db, the IDBIndex to be used
* for fetching the values.
*
* @return {object} An object containing following properties:
* - offset - The actual offset of the returned array. This might
* be different from the requested offset if that was
* invalid
* - total - The total number of entries possible.
* - data - The requested values.
*/
async getStoreObjects(host, names, options = {}) {
const offset = options.offset || 0;
let size = options.size || MAX_STORE_OBJECT_COUNT;
if (size > MAX_STORE_OBJECT_COUNT) {
size = MAX_STORE_OBJECT_COUNT;
}
const sortOn = options.sortOn || "name";
const toReturn = {
offset: offset,
total: 0,
data: [],
};
let principal = null;
if (this.typeName === "indexedDB") {
// We only acquire principal when the type of the storage is indexedDB
// because the principal only matters the indexedDB.
const win = this.storageActor.getWindowFromHost(host);
principal = this.getPrincipal(win);
}
if (names) {
for (const name of names) {
const values = await this.getValuesForHost(host, name, options,
this.hostVsStores, principal);
const {result, objectStores} = values;
if (result && typeof result.objectsSize !== "undefined") {
for (const {key, count} of result.objectsSize) {
this.objectsSize[key] = count;
}
}
if (result) {
toReturn.data.push(...result.data);
} else if (objectStores) {
toReturn.data.push(...objectStores);
} else {
toReturn.data.push(...values);
}
}
toReturn.total = this.getObjectsSize(host, names, options);
} else {
let obj = await this.getValuesForHost(host, undefined, undefined,
this.hostVsStores, principal);
if (obj.dbs) {
obj = obj.dbs;
}
toReturn.total = obj.length;
toReturn.data = obj;
}
if (offset > toReturn.total) {
// In this case, toReturn.data is an empty array.
toReturn.offset = toReturn.total;
toReturn.data = [];
} else {
// We need to use natural sort before slicing.
const sorted = toReturn.data.sort((a, b) => {
return naturalSortCaseInsensitive(a[sortOn], b[sortOn]);
});
let sliced;
if (this.typeName === "indexedDB") {
// indexedDB's getValuesForHost never returns *all* values available but only
// a slice, starting at the expected offset. Therefore the result is already
// sliced as expected.
sliced = sorted;
} else {
sliced = sorted.slice(offset, offset + size);
}
toReturn.data = sliced.map(a => this.toStoreObject(a));
}
return toReturn;
},
getPrincipal(win) {
if (win) {
return win.document.nodePrincipal;
}
// We are running in the browser toolbox and viewing system DBs so we
// need to use system principal.
return Cc["@mozilla.org/systemprincipal;1"]
.createInstance(Ci.nsIPrincipal);
},
};
};
/**
* Creates an actor and its corresponding front and registers it to the Storage
* Actor.
*
* @See StorageActors.defaults()
*
* @param {object} options
* Options required by StorageActors.defaults method which are :
* - typeName {string}
* The typeName of the actor.
* - observationTopics {array}
* The topics which this actor listens to via
* Notification Observers.
* @param {object} overrides
* All the methods which you want to be different from the ones in
* StorageActors.defaults method plus the required ones described there.
*/
StorageActors.createActor = function(options = {}, overrides = {}) {
const actorObject = StorageActors.defaults(
options.typeName,
options.observationTopics || null
);
for (const key in overrides) {
actorObject[key] = overrides[key];
}
const actorSpec = specs.childSpecs[options.typeName];
const actor = protocol.ActorClassWithSpec(actorSpec, actorObject);
storageTypePool.set(actorObject.typeName, actor);
};
/**
* The Cookies actor and front.
*/
StorageActors.createActor({
typeName: "cookies",
}, {
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.maybeSetupChildProcess();
this.populateStoresForHosts();
this.addCookieObservers();
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
this.hostVsStores.clear();
// We need to remove the cookie listeners early in E10S mode so we need to
// use a conditional here to ensure that we only attempt to remove them in
// single process mode.
if (!DebuggerServer.isInChildProcess) {
this.removeCookieObservers();
}
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
this._pendingResponse = null;
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
/**
* Given a cookie object, figure out all the matching hosts from the page that
* the cookie belong to.
*/
getMatchingHosts(cookies) {
if (!cookies.length) {
cookies = [cookies];
}
const hosts = new Set();
for (const host of this.hosts) {
for (const cookie of cookies) {
if (this.isCookieAtHost(cookie, host)) {
hosts.add(host);
}
}
}
return [...hosts];
},
/**
* Given a cookie object and a host, figure out if the cookie is valid for
* that host.
*/
isCookieAtHost(cookie, host) {
if (cookie.host == null) {
return host == null;
}
host = trimHttpHttpsPort(host);
if (cookie.host.startsWith(".")) {
return ("." + host).endsWith(cookie.host);
}
if (cookie.host === "") {
return host.startsWith("file://" + cookie.path);
}
return cookie.host == host;
},
toStoreObject(cookie) {
if (!cookie) {
return null;
}
return {
uniqueKey: `${cookie.name}${SEPARATOR_GUID}${cookie.host}` +
`${SEPARATOR_GUID}${cookie.path}`,
name: cookie.name,
host: cookie.host || "",
path: cookie.path || "",
// because expires is in seconds
expires: (cookie.expires || 0) * 1000,
// because creationTime is in micro seconds
creationTime: cookie.creationTime / 1000,
// - do -
lastAccessed: cookie.lastAccessed / 1000,
value: new LongStringActor(this.conn, cookie.value || ""),
hostOnly: !cookie.isDomain,
isSecure: cookie.isSecure,
isHttpOnly: cookie.isHttpOnly,
sameSite: this.getSameSiteStringFromCookie(cookie),
};
},
getSameSiteStringFromCookie(cookie) {
switch (cookie.sameSite) {
case cookie.SAMESITE_LAX:
return COOKIE_SAMESITE.LAX;
case cookie.SAMESITE_STRICT:
return COOKIE_SAMESITE.STRICT;
}
// cookie.SAMESITE_NONE
return COOKIE_SAMESITE.UNSET;
},
populateStoresForHost(host) {
this.hostVsStores.set(host, new Map());
const doc = this.storageActor.document;
const cookies = this.getCookiesFromHost(host, doc.nodePrincipal
.originAttributes);
for (const cookie of cookies) {
if (this.isCookieAtHost(cookie, host)) {
const uniqueKey = `${cookie.name}${SEPARATOR_GUID}${cookie.host}` +
`${SEPARATOR_GUID}${cookie.path}`;
this.hostVsStores.get(host).set(uniqueKey, cookie);
}
}
},
/**
* Notification observer for "cookie-change".
*
* @param subject
* {Cookie|[Array]} A JSON parsed object containing either a single
* cookie representation or an array. Array is only in case of
* a "batch-deleted" action.
* @param {string} topic
* The topic of the notification.
* @param {string} action
* Additional data associated with the notification. Its the type of
* cookie change in the "cookie-change" topic.
*/
onCookieChanged(subject, topic, action) {
if (topic !== "cookie-changed" ||
!this.storageActor ||
!this.storageActor.windows) {
return null;
}
const hosts = this.getMatchingHosts(subject);
const data = {};
switch (action) {
case "added":
case "changed":
if (hosts.length) {
for (const host of hosts) {
const uniqueKey = `${subject.name}${SEPARATOR_GUID}${subject.host}` +
`${SEPARATOR_GUID}${subject.path}`;
this.hostVsStores.get(host).set(uniqueKey, subject);
data[host] = [uniqueKey];
}
this.storageActor.update(action, "cookies", data);
}
break;
case "deleted":
if (hosts.length) {
for (const host of hosts) {
const uniqueKey = `${subject.name}${SEPARATOR_GUID}${subject.host}` +
`${SEPARATOR_GUID}${subject.path}`;
this.hostVsStores.get(host).delete(uniqueKey);
data[host] = [uniqueKey];
}
this.storageActor.update("deleted", "cookies", data);
}
break;
case "batch-deleted":
if (hosts.length) {
for (const host of hosts) {
const stores = [];
for (const cookie of subject) {
const uniqueKey = `${cookie.name}${SEPARATOR_GUID}${cookie.host}` +
`${SEPARATOR_GUID}${cookie.path}`;
this.hostVsStores.get(host).delete(uniqueKey);
stores.push(uniqueKey);
}
data[host] = stores;
}
this.storageActor.update("deleted", "cookies", data);
}
break;
case "cleared":
if (hosts.length) {
for (const host of hosts) {
data[host] = [];
}
this.storageActor.update("cleared", "cookies", data);
}
break;
}
return null;
},
async getFields() {
return [
{ name: "uniqueKey", editable: false, private: true },
{ name: "name", editable: true, hidden: false },
{ name: "host", editable: true, hidden: false },
{ name: "path", editable: true, hidden: false },
{ name: "expires", editable: true, hidden: false },
{ name: "lastAccessed", editable: false, hidden: false },
{ name: "creationTime", editable: false, hidden: true },
{ name: "value", editable: true, hidden: false },
{ name: "hostOnly", editable: false, hidden: true },
{ name: "isSecure", editable: true, hidden: true },
{ name: "isHttpOnly", editable: true, hidden: false },
{ name: "sameSite", editable: false, hidden: false },
];
},
/**
* Pass the editItem command from the content to the chrome process.
*
* @param {Object} data
* See editCookie() for format details.
*/
async editItem(data) {
const doc = this.storageActor.document;
data.originAttributes = doc.nodePrincipal
.originAttributes;
this.editCookie(data);
},
async addItem(guid) {
const doc = this.storageActor.document;
const time = new Date().getTime();
const expiry = new Date(time + 3600 * 24 * 1000).toGMTString();
doc.cookie = `${guid}=${DEFAULT_VALUE};expires=${expiry}`;
},
async removeItem(host, name) {
const doc = this.storageActor.document;
this.removeCookie(host, name, doc.nodePrincipal
.originAttributes);
},
async removeAll(host, domain) {
const doc = this.storageActor.document;
this.removeAllCookies(host, domain, doc.nodePrincipal
.originAttributes);
},
async removeAllSessionCookies(host, domain) {
const doc = this.storageActor.document;
this.removeAllSessionCookies(host, domain, doc.nodePrincipal
.originAttributes);
},
maybeSetupChildProcess() {
cookieHelpers.onCookieChanged = this.onCookieChanged.bind(this);
if (!DebuggerServer.isInChildProcess) {
this.getCookiesFromHost =
cookieHelpers.getCookiesFromHost.bind(cookieHelpers);
this.addCookieObservers =
cookieHelpers.addCookieObservers.bind(cookieHelpers);
this.removeCookieObservers =
cookieHelpers.removeCookieObservers.bind(cookieHelpers);
this.editCookie =
cookieHelpers.editCookie.bind(cookieHelpers);
this.removeCookie =
cookieHelpers.removeCookie.bind(cookieHelpers);
this.removeAllCookies =
cookieHelpers.removeAllCookies.bind(cookieHelpers);
this.removeAllSessionCookies =
cookieHelpers.removeAllSessionCookies.bind(cookieHelpers);
return;
}
const mm = this.conn.parentMessageManager;
this.conn.setupInParent({
module: "devtools/server/actors/storage",
setupParent: "setupParentProcessForCookies",
});
this.getCookiesFromHost =
callParentProcess.bind(null, "getCookiesFromHost");
this.addCookieObservers =
callParentProcess.bind(null, "addCookieObservers");
this.removeCookieObservers =
callParentProcess.bind(null, "removeCookieObservers");
this.editCookie =
callParentProcess.bind(null, "editCookie");
this.removeCookie =
callParentProcess.bind(null, "removeCookie");
this.removeAllCookies =
callParentProcess.bind(null, "removeAllCookies");
this.removeAllSessionCookies =
callParentProcess.bind(null, "removeAllSessionCookies");
mm.addMessageListener("debug:storage-cookie-request-child",
cookieHelpers.handleParentRequest);
function callParentProcess(methodName, ...args) {
const reply = mm.sendSyncMessage("debug:storage-cookie-request-parent", {
method: methodName,
args: args,
});
if (reply.length === 0) {
console.error("ERR_DIRECTOR_CHILD_NO_REPLY from " + methodName);
} else if (reply.length > 1) {
console.error("ERR_DIRECTOR_CHILD_MULTIPLE_REPLIES from " + methodName);
}
const result = reply[0];
if (methodName === "getCookiesFromHost") {
return JSON.parse(result);
}
return result;
}
},
});
var cookieHelpers = {
getCookiesFromHost(host, originAttributes) {
// Local files have no host.
if (host.startsWith("file:///")) {
host = "";
}
host = trimHttpHttpsPort(host);
return Array.from(
Services.cookies.getCookiesFromHost(host, originAttributes));
},
/**
* Apply the results of a cookie edit.
*
* @param {Object} data
* An object in the following format:
* {
* host: "http://www.mozilla.org",
* field: "value",
* editCookie: "name",
* oldValue: "%7BHello%7D",
* newValue: "%7BHelloo%7D",
* items: {
* name: "optimizelyBuckets",
* path: "/",
* host: ".mozilla.org",
* expires: "Mon, 02 Jun 2025 12:37:37 GMT",
* creationTime: "Tue, 18 Nov 2014 16:21:18 GMT",
* lastAccessed: "Wed, 17 Feb 2016 10:06:23 GMT",
* value: "%7BHelloo%7D",
* isDomain: "true",
* isSecure: "false",
* isHttpOnly: "false"
* }
* }
*/
/* eslint-disable complexity */
editCookie(data) {
let {field, oldValue, newValue} = data;
const origName = field === "name" ? oldValue : data.items.name;
const origHost = field === "host" ? oldValue : data.items.host;
const origPath = field === "path" ? oldValue : data.items.path;
let cookie = null;
const cookies = Services.cookies.getCookiesFromHost(origHost,
data.originAttributes || {});
for (const nsiCookie of cookies) {
if (nsiCookie.name === origName &&
nsiCookie.host === origHost &&
nsiCookie.path === origPath) {
cookie = {
host: nsiCookie.host,
path: nsiCookie.path,
name: nsiCookie.name,
value: nsiCookie.value,
isSecure: nsiCookie.isSecure,
isHttpOnly: nsiCookie.isHttpOnly,
isSession: nsiCookie.isSession,
expires: nsiCookie.expires,
originAttributes: nsiCookie.originAttributes,
};
break;
}
}
if (!cookie) {
return;
}
// If the date is expired set it for 10 seconds in the future.
const now = new Date();
if (!cookie.isSession && (cookie.expires * 1000) <= now) {
const tenSecondsFromNow = (now.getTime() + 10 * 1000) / 1000;
cookie.expires = tenSecondsFromNow;
}
switch (field) {
case "isSecure":
case "isHttpOnly":
case "isSession":
newValue = newValue === "true";
break;
case "expires":
newValue = Date.parse(newValue) / 1000;
if (isNaN(newValue)) {
newValue = MAX_COOKIE_EXPIRY;
}
break;
case "host":
case "name":
case "path":
// Remove the edited cookie.
Services.cookies.remove(origHost, origName, origPath,
false, cookie.originAttributes);
break;
}
// Apply changes.
cookie[field] = newValue;
// cookie.isSession is not always set correctly on session cookies so we
// need to trust cookie.expires instead.
cookie.isSession = !cookie.expires;
// Add the edited cookie.
Services.cookies.add(
cookie.host,
cookie.path,
cookie.name,
cookie.value,
cookie.isSecure,
cookie.isHttpOnly,
cookie.isSession,
cookie.isSession ? MAX_COOKIE_EXPIRY : cookie.expires,
cookie.originAttributes,
cookie.sameSite
);
},
/* eslint-enable complexity */
_removeCookies(host, opts = {}) {
// We use a uniqueId to emulate compound keys for cookies. We need to
// extract the cookie name to remove the correct cookie.
if (opts.name) {
const split = opts.name.split(SEPARATOR_GUID);
opts.name = split[0];
opts.path = split[2];
}
host = trimHttpHttpsPort(host);
function hostMatches(cookieHost, matchHost) {
if (cookieHost == null) {
return matchHost == null;
}
if (cookieHost.startsWith(".")) {
return ("." + matchHost).endsWith(cookieHost);
}
return cookieHost == host;
}
const cookies = Services.cookies.getCookiesFromHost(host,
opts.originAttributes || {});
for (const cookie of cookies) {
if (hostMatches(cookie.host, host) &&
(!opts.name || cookie.name === opts.name) &&
(!opts.domain || cookie.host === opts.domain) &&
(!opts.path || cookie.path === opts.path) &&
(!opts.session || (!cookie.expires && !cookie.maxAge))) {
Services.cookies.remove(
cookie.host,
cookie.name,
cookie.path,
false,
cookie.originAttributes
);
}
}
},
removeCookie(host, name, originAttributes) {
if (name !== undefined) {
this._removeCookies(host, { name, originAttributes });
}
},
removeAllCookies(host, domain, originAttributes) {
this._removeCookies(host, { domain, originAttributes });
},
removeAllSessionCookies(host, domain, originAttributes) {
this._removeCookies(host, { domain, originAttributes, session: true });
},
addCookieObservers() {
Services.obs.addObserver(cookieHelpers, "cookie-changed");
return null;
},
removeCookieObservers() {
Services.obs.removeObserver(cookieHelpers, "cookie-changed");
return null;
},
observe(subject, topic, data) {
if (!subject) {
return;
}
switch (topic) {
case "cookie-changed":
if (data === "batch-deleted") {
const cookiesNoInterface = subject.QueryInterface(Ci.nsIArray);
const cookies = [];
for (let i = 0; i < cookiesNoInterface.length; i++) {
const cookie = cookiesNoInterface.queryElementAt(i, Ci.nsICookie);
cookies.push(cookie);
}
cookieHelpers.onCookieChanged(cookies, topic, data);
return;
}
const cookie = subject.QueryInterface(Ci.nsICookie);
cookieHelpers.onCookieChanged(cookie, topic, data);
break;
}
},
handleParentRequest(msg) {
switch (msg.json.method) {
case "onCookieChanged":
let [cookie, topic, data] = msg.data.args;
cookie = JSON.parse(cookie);
cookieHelpers.onCookieChanged(cookie, topic, data);
break;
}
},
handleChildRequest(msg) {
switch (msg.json.method) {
case "getCookiesFromHost": {
const host = msg.data.args[0];
const originAttributes = msg.data.args[1];
const cookies = cookieHelpers.getCookiesFromHost(host, originAttributes);
return JSON.stringify(cookies);
}
case "addCookieObservers": {
return cookieHelpers.addCookieObservers();
}
case "removeCookieObservers": {
return cookieHelpers.removeCookieObservers();
}
case "editCookie": {
const rowdata = msg.data.args[0];
return cookieHelpers.editCookie(rowdata);
}
case "createNewCookie": {
const host = msg.data.args[0];
const guid = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.createNewCookie(host, guid, originAttributes);
}
case "removeCookie": {
const host = msg.data.args[0];
const name = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.removeCookie(host, name, originAttributes);
}
case "removeAllCookies": {
const host = msg.data.args[0];
const domain = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.removeAllCookies(host, domain, originAttributes);
}
case "removeAllSessionCookies": {
const host = msg.data.args[0];
const domain = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.removeAllSessionCookies(host, domain, originAttributes);
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
};
/**
* E10S parent/child setup helpers
*/
exports.setupParentProcessForCookies = function({ mm, prefix }) {
cookieHelpers.onCookieChanged =
callChildProcess.bind(null, "onCookieChanged");
// listen for director-script requests from the child process
setMessageManager(mm);
function callChildProcess(methodName, ...args) {
if (methodName === "onCookieChanged") {
args[0] = JSON.stringify(args[0]);
}
try {
mm.sendAsyncMessage("debug:storage-cookie-request-child", {
method: methodName,
args: args,
});
} catch (e) {
// We may receive a NS_ERROR_NOT_INITIALIZED if the target window has
// been closed. This can legitimately happen in between test runs.
}
}
function setMessageManager(newMM) {
if (mm) {
mm.removeMessageListener("debug:storage-cookie-request-parent",
cookieHelpers.handleChildRequest);
}
mm = newMM;
if (mm) {
mm.addMessageListener("debug:storage-cookie-request-parent",
cookieHelpers.handleChildRequest);
}
}
return {
onBrowserSwap: setMessageManager,
onDisconnected: () => {
// Although "disconnected-from-child" implies that the child is already
// disconnected this is not the case. The disconnection takes place after
// this method has finished. This gives us chance to clean up items within
// the parent process e.g. observers.
cookieHelpers.removeCookieObservers();
setMessageManager(null);
},
};
};
/**
* Helper method to create the overriden object required in
* StorageActors.createActor for Local Storage and Session Storage.
* This method exists as both Local Storage and Session Storage have almost
* identical actors.
*/
function getObjectForLocalOrSessionStorage(type) {
return {
getNamesForHost(host) {
const storage = this.hostVsStores.get(host);
return storage ? Object.keys(storage) : [];
},
getValuesForHost(host, name) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return [];
}
if (name) {
const value = storage ? storage.getItem(name) : null;
return [{ name, value }];
}
if (!storage) {
return [];
}
// local and session storage cannot be iterated over using Object.keys()
// because it skips keys that are duplicated on the prototype
// e.g. "key", "getKeys" so we need to gather the real keys using the
// storage.key() function.
const storageArray = [];
for (let i = 0; i < storage.length; i++) {
const key = storage.key(i);
storageArray.push({
name: key,
value: storage.getItem(key),
});
}
return storageArray;
},
populateStoresForHost(host, window) {
try {
this.hostVsStores.set(host, window[type]);
} catch (ex) {
console.warn(`Failed to enumerate ${type} for host ${host}: ${ex}`);
}
},
populateStoresForHosts() {
this.hostVsStores = new Map();
for (const window of this.windows) {
const host = this.getHostName(window.location);
if (host) {
this.populateStoresForHost(host, window);
}
}
},
async getFields() {
return [
{ name: "name", editable: true },
{ name: "value", editable: true },
];
},
async addItem(guid, host) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
storage.setItem(guid, DEFAULT_VALUE);
},
/**
* Edit localStorage or sessionStorage fields.
*
* @param {Object} data
* See editCookie() for format details.
*/
async editItem({host, field, oldValue, items}) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
if (field === "name") {
storage.removeItem(oldValue);
}
storage.setItem(items.name, items.value);
},
async removeItem(host, name) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
storage.removeItem(name);
},
async removeAll(host) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
storage.clear();
},
observe(subject, topic, data) {
if ((topic != "dom-storage2-changed" &&
topic != "dom-private-storage2-changed") ||
data != type) {
return null;
}
const host = this.getSchemaAndHost(subject.url);
if (!this.hostVsStores.has(host)) {
return null;
}
let action = "changed";
if (subject.key == null) {
return this.storageActor.update("cleared", type, [host]);
} else if (subject.oldValue == null) {
action = "added";
} else if (subject.newValue == null) {
action = "deleted";
}
const updateData = {};
updateData[host] = [subject.key];
return this.storageActor.update(action, type, updateData);
},
/**
* Given a url, correctly determine its protocol + hostname part.
*/
getSchemaAndHost(url) {
const uri = Services.io.newURI(url);
if (!uri.host) {
return uri.spec;
}
return uri.scheme + "://" + uri.hostPort;
},
toStoreObject(item) {
if (!item) {
return null;
}
return {
name: item.name,
value: new LongStringActor(this.conn, item.value || ""),
};
},
};
}
/**
* The Local Storage actor and front.
*/
StorageActors.createActor({
typeName: "localStorage",
observationTopics: ["dom-storage2-changed", "dom-private-storage2-changed"],
}, getObjectForLocalOrSessionStorage("localStorage"));
/**
* The Session Storage actor and front.
*/
StorageActors.createActor({
typeName: "sessionStorage",
observationTopics: ["dom-storage2-changed", "dom-private-storage2-changed"],
}, getObjectForLocalOrSessionStorage("sessionStorage"));
const extensionStorageHelpers = {
unresolvedPromises: new Map(),
onChangedListeners: new Map(),
typesFromString: { // Helper methods to parse string values in editItem
jsonifiable: {
test(str) {
try {
JSON.parse(str);
} catch (e) {
return false;
}
return true;
},
parse(str) {
return JSON.parse(str);
},
},
},
supportedTypes: { // Helper methods to determine the value type of an item in isEditable
array: {
test(value) {
return Array.isArray(value);
},
},
boolean: {
test(value) {
return typeof value === "boolean";
},
},
null: {
test(value) {
return value === null;
},
},
number: {
test(value) {
return typeof value === "number";
},
},
object: {
test(value) {
return lodash.isPlainObject(value);
},
},
string: {
test(value) {
return typeof value === "string";
},
},
},
unsupportedTypes: { // Used to display a string in the client in toStoreObject
arrayBuffer: {
test(value) {
return lodash.isArrayBuffer(value);
},
stringify(value) {
return "[object ArrayBuffer]";
},
},
bigint: {
test(value) {
// eslint-disable-next-line
return typeof value === "bigint";
},
stringify(value) {
return "[value bigint]";
},
},
date: {
test(value) {
return lodash.isDate(value);
},
stringify(value) {
return "[object Date]";
},
},
map: {
test(value) {
return lodash.isMap(value);
},
stringify(value) {
return "[object Map]";
},
},
"set": {
test(value) {
return lodash.isSet(value);
},
stringify(value) {
return "[object Set]";
},
},
regexp: {
test(value) {
return lodash.isRegExp(value);
},
stringify(value) {
return "[object RegExp]";
},
},
undefined: {
test(value) {
return typeof value === "undefined";
},
stringify(value) {
return "[value undefined]";
},
},
},
// Sets the parent process message manager
setPpmm(ppmm) {
this.ppmm = ppmm;
},
// A promise in the main process has resolved, and we need to pass the return value(s)
// back to the child process
backToChild(...args) {
Services.mm.broadcastAsyncMessage("debug:storage-extensionStorage-request-child", {
method: "backToChild",
args: args,
});
},
// The main process does not require an extension context to select the backend
// Bug 1542038, 1542039: Each storage area will need its own implementation, as
// they use different storage backends.
async selectBackendInParent(addonId) {
const {extension} = WebExtensionPolicy.getByID(addonId);
const parentResult = await ExtensionStorageIDB.selectBackend({extension});
const result = {
...parentResult,
// Received as a StructuredCloneHolder, so we need to deserialize
storagePrincipal: parentResult.storagePrincipal.deserialize(this, true),
};
// Subscribe a listener for the storage.onChanged API event notifications
// and keep track of it to remove it when the debugger is being disconnected.
const messageName = `Extension:StorageLocalOnChanged:${extension.uuid}`;
const onChangedListener = ({name, data}) => {
Services.mm.broadcastAsyncMessage(DEVTOOLS_EXT_STORAGELOCAL_CHANGED, {
changes: data,
extensionUUID: extension.uuid,
});
};
Services.ppmm.addMessageListener(messageName, onChangedListener);
this.onChangedListeners.set(messageName, onChangedListener);
return this.backToChild("selectBackendInParent", result);
},
onDisconnected() {
for (const [messageName, listener] of this.onChangedListeners) {
Services.ppmm.removeMessageListener(messageName, listener);
}
},
// Runs in the main process. This determines what code to execute based on the message
// received from the child process.
async handleChildRequest(msg) {
switch (msg.json.method) {
case "selectBackendInParent": {
const addonId = msg.data.args[0];
const result = await extensionStorageHelpers.selectBackendInParent(addonId);
return result;
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
// Runs in the child process. This determines what code to execute based on the message
// received from the parent process.
handleParentRequest(msg) {
switch (msg.json.method) {
case "backToChild": {
const [func, rv] = msg.json.args;
const deferred = this.unresolvedPromises.get(func);
if (deferred) {
this.unresolvedPromises.delete(func);
deferred.resolve(rv);
}
break;
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
callParentProcessAsync(methodName, ...args) {
const deferred = defer();
this.unresolvedPromises.set(methodName, deferred);
this.ppmm.sendAsyncMessage("debug:storage-extensionStorage-request-parent", {
method: methodName,
args: args,
});
return deferred.promise;
},
};
/**
* E10S parent/child setup helpers
* Add a message listener in the parent process to receive messages from the child
* process.
*/
exports.setupParentProcessForExtensionStorage = function({mm, prefix}) {
// listen for director-script requests from the child process
setMessageManager(mm);
function setMessageManager(newMM) {
if (mm) {
mm.removeMessageListener("debug:storage-extensionStorage-request-parent",
extensionStorageHelpers.handleChildRequest);
}
mm = newMM;
if (mm) {
mm.addMessageListener("debug:storage-extensionStorage-request-parent",
extensionStorageHelpers.handleChildRequest);
}
}
return {
onBrowserSwap: setMessageManager,
onDisconnected: () => {
// Although "disconnected-from-child" implies that the child is already
// disconnected this is not the case. The disconnection takes place after
// this method has finished. This gives us chance to clean up items within
// the parent process e.g. observers.
setMessageManager(null);
extensionStorageHelpers.onDisconnected();
},
};
};
/**
* The Extension Storage actor.
*/
if (Services.prefs.getBoolPref(EXTENSION_STORAGE_ENABLED_PREF)) {
StorageActors.createActor({
typeName: "extensionStorage",
}, {
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.addonId = this.storageActor.parentActor.addonId;
// Map<host, ExtensionStorageIDB db connection>
// Bug 1542038, 1542039: Each storage area will need its own
// dbConnectionForHost, as they each have different storage backends.
// Anywhere dbConnectionForHost is used, we need to know the storage
// area to access the correct database.
this.dbConnectionForHost = new Map();
this.setupChildProcess();
this.onStorageChange = this.onStorageChange.bind(this);
this.conn.parentMessageManager.addMessageListener(
DEVTOOLS_EXT_STORAGELOCAL_CHANGED, this.onStorageChange);
this.populateStoresForHosts();
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
this.conn.parentMessageManager.removeMessageListener(
DEVTOOLS_EXT_STORAGELOCAL_CHANGED, this.onStorageChange);
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
this.hostVsStores.clear();
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
setupChildProcess() {
const ppmm = this.conn.parentMessageManager;
extensionStorageHelpers.setPpmm(ppmm);
this.conn.setupInParent({
module: "devtools/server/actors/storage",
setupParent: "setupParentProcessForExtensionStorage",
});
this.selectBackendInParent =
extensionStorageHelpers.callParentProcessAsync.bind(
extensionStorageHelpers,
"selectBackendInParent"
);
// Add a message listener in the child process to receive messages from the parent
// process
ppmm.addMessageListener(
"debug:storage-extensionStorage-request-child",
extensionStorageHelpers.handleParentRequest.bind(extensionStorageHelpers),
);
},
/**
* This fires when the extension changes storage data while the storage
* inspector is open. Ensures this.hostVsStores stays up-to-date and
* passes the change on to update the client.
*/
onStorageChange({name, data}) {
const host = `moz-extension://${data.extensionUUID}`;
const changes = data.changes;
const storeMap = this.hostVsStores.get(host);
if (!storeMap) {
return;
}
for (const key in changes) {
const storageChange = changes[key];
let {newValue, oldValue} = storageChange;
if (newValue && typeof newValue === "object"
&& Cu.getClassName(newValue, true) === "StructuredCloneHolder") {
newValue = newValue.deserialize(this);
}
if (oldValue && typeof oldValue === "object"
&& Cu.getClassName(oldValue, true) === "StructuredCloneHolder") {
oldValue = oldValue.deserialize(this);
}
let action;
if (typeof newValue === "undefined") {
action = "deleted";
storeMap.delete(key);
} else if (typeof oldValue === "undefined") {
action = "added";
storeMap.set(key, newValue);
} else {
action = "changed";
storeMap.set(key, newValue);
}
this.storageActor.update(action, this.typeName, {[host]: [key]});
}
},
/**
* Purpose of this method is same as populateStoresForHosts but this is async.
* This exact same operation cannot be performed in populateStoresForHosts
* method, as that method is called in initialize method of the actor, which
* cannot be asynchronous.
*/
async preListStores() {
// Ensure the actor's target is an extension and it is enabled
if (!this.addonId || !(WebExtensionPolicy.getByID(this.addonId))) {
return;
}
// Bug 1542038, 1542039: Each storage area will need its own this.hostVsStores
// or this actor will need to deviate from how this.hostVsStores is defined in the
// framework to associate each storage item with a storage area. Any methods
// that use it will also need to be updated (e.g. getNamesForHost).
this.hostVsStores = new Map();
const extension = ExtensionProcessScript.getExtensionChild(this.addonId);
await this.populateStoresForHost(`moz-extension://${extension.uuid}`);
},
/**
* This method is overriden and left blank as for extensionStorage, this operation
* cannot be performed synchronously. Thus, the preListStores method exists to
* do the same task asynchronously.
*/
populateStoresForHosts() {},
/**
* This method asynchronously reads the storage data for the target extension
* and caches this data into this.hostVsStores.
* @param {String} host - the hostname for the extension
*/
async populateStoresForHost(host) {
if (this.hostVsStores.has(host)) {
return;
}
const extension = ExtensionProcessScript.getExtensionChild(this.addonId);
if (!extension || !(extension.hasPermission("storage"))) {
return;
}
const storagePrincipal = await this.getStoragePrincipal(extension);
if (!storagePrincipal) {
return;
}
const db = await ExtensionStorageIDB.open(storagePrincipal);
this.dbConnectionForHost.set(host, db);
const data = await db.get();
const storeMap = new Map();
for (const [key, value] of Object.entries(data)) {
storeMap.set(key, value);
}
this.hostVsStores.set(host, storeMap);
// Show the storage actor in the add-on storage inspector even when there
// is no extension page currently open
const storageData = {};
storageData[host] = this.getNamesForHost(host);
this.storageActor.update("added", this.typeName, storageData);
},
async getStoragePrincipal(extension) {
const {
backendEnabled,
storagePrincipal,
} = await this.selectBackendInParent(extension.id);
if (!backendEnabled) {
// IDB backend disabled; give up.
return null;
}
return storagePrincipal;
},
getValuesForHost(host, name) {
const result = [];
if (!this.hostVsStores.has(host)) {
return result;
}
if (name) {
return [{name, value: this.hostVsStores.get(host).get(name)}];
}
for (const [key, value] of Array.from(this.hostVsStores.get(host).entries())) {
result.push({name: key, value});
}
return result;
},
/**
* Converts a storage item to an "extensionobject" as defined in
* devtools/shared/specs/storage.js
* @param {Object} item - The storage item to convert
* @param {String} item.name - The storage item key
* @param {*} item.value - The storage item value
* @return {extensionobject}
*/
toStoreObject(item) {
if (!item) {
return null;
}
const {name, value} = item;
let newValue;
const editable = this.isEditable(value);
if (typeof value === "string") {
newValue = value;
} else if (editable) {
newValue = JSON.stringify(value);
} else {
// We don't cover every possible unsupported JS object explicitly, so assign
// this as a failsafe.
newValue = "[value uneditable]";
const {unsupportedTypes} = extensionStorageHelpers;
for (const {test, stringify} of Object.values(unsupportedTypes)) {
if (test(value)) {
newValue = stringify(value);
break;
}
}
}
// FIXME: Bug 1318029 - Due to a bug that is thrown whenever a
// LongStringActor string reaches DebuggerServer.LONG_STRING_LENGTH we need
// to trim the value. When the bug is fixed we should stop trimming the
// string here.
const maxLength = DebuggerServer.LONG_STRING_LENGTH - 1;
if (newValue.length > maxLength) {
newValue = newValue.substr(0, maxLength);
}
return {
name,
value: new LongStringActor(this.conn, newValue || ""),
area: "local", // Bug 1542038, 1542039: set the correct storage area
editable,
};
},
getFields() {
return [
// name needs to be editable for the addItem case, where a temporary key-value
// pair is created that can later be edited via editItem.
{ name: "name", editable: true },
{ name: "value", editable: true },
{ name: "area", editable: false },
];
},
/**
* Editing is supported only for serializable value types. Examples of unserializable
* types include Map, Set and ArrayBuffer.
*/
isEditable(value) {
// Bug 1542038: the managed storage area is never editable
const {supportedTypes} = extensionStorageHelpers;
for (const {test} of Object.values(supportedTypes)) {
if (test(value)) {
return true;
}
}
return false;
},
async addItem(guid, host) {
// Bug 1542038, 1542039: The user should be able to choose which storage area to
// add an item to (managed excepted).
const db = this.dbConnectionForHost.get(host);
if (!db) {
return;
}
const changes = await db.set({[guid]: DEFAULT_VALUE});
this.fireOnChangedExtensionEvent(host, changes);
},
async editItem({host, field, item, oldValue}) {
const db = this.dbConnectionForHost.get(host);
if (!db) {
return;
}
const {name, value} = item;
// If the name changed, remove the previous entry in storage by the old name first
if (field === "name") {
const changes = await db.remove(oldValue);
this.fireOnChangedExtensionEvent(host, changes);
}
// Attempt to interpret the data type of the value
let parsedValue = parseItemValue(value);
if (parsedValue === value) {
const {typesFromString} = extensionStorageHelpers;
for (const {test, parse} of Object.values(typesFromString)) {
if (test(value)) {
parsedValue = parse(value);
break;
}
}
}
const changes = await db.set({[name]: parsedValue});
this.fireOnChangedExtensionEvent(host, changes);
},
async removeItem(host, name) {
const db = this.dbConnectionForHost.get(host);
if (!db) {
return;
}
const changes = await db.remove(name);
this.fireOnChangedExtensionEvent(host, changes);
},
async removeAll(host) {
const db = this.dbConnectionForHost.get(host);
if (!db) {
return;
}
const changes = await db.clear();
this.fireOnChangedExtensionEvent(host, changes);
},
/**
* Let the extension know that storage data has been changed by the user from
* the storage inspector.
*/
fireOnChangedExtensionEvent(host, changes) {
// Bug 1542038, 1542039: Which message to send depends on the storage area
const uuid = (new URL(host)).host;
Services.cpmm.sendAsyncMessage(`Extension:StorageLocalOnChanged:${uuid}`,
changes);
},
});
}
StorageActors.createActor({
typeName: "Cache",
}, {
async getCachesForHost(host) {
const uri = Services.io.newURI(host);
const attrs = this.storageActor
.document
.nodePrincipal
.originAttributes;
const principal =
Services.scriptSecurityManager.createCodebasePrincipal(uri, attrs);
// The first argument tells if you want to get |content| cache or |chrome|
// cache.
// The |content| cache is the cache explicitely named by the web content
// (service worker or web page).
// The |chrome| cache is the cache implicitely cached by the platform,
// hosting the source file of the service worker.
const { CacheStorage } = this.storageActor.window;
if (!CacheStorage) {
return [];
}
const cache = new CacheStorage("content", principal);
return cache;
},
async preListStores() {
for (const host of this.hosts) {
await this.populateStoresForHost(host);
}
},
form() {
const hosts = {};
for (const host of this.hosts) {
hosts[host] = this.getNamesForHost(host);
}
return {
actor: this.actorID,
hosts: hosts,
};
},
getNamesForHost(host) {
// UI code expect each name to be a JSON string of an array :/
return [...this.hostVsStores.get(host).keys()].map(a => {
return JSON.stringify([a]);
});
},
async getValuesForHost(host, name) {
if (!name) {
return [];
}
// UI is weird and expect a JSON stringified array... and pass it back :/
name = JSON.parse(name)[0];
const cache = this.hostVsStores.get(host).get(name);
const requests = await cache.keys();
const results = [];
for (const request of requests) {
let response = await cache.match(request);
// Unwrap the response to get access to all its properties if the
// response happen to be 'opaque', when it is a Cross Origin Request.
response = response.cloneUnfiltered();
results.push(await this.processEntry(request, response));
}
return results;
},
async processEntry(request, response) {
return {
url: String(request.url),
status: String(response.statusText),
};
},
async getFields() {
return [
{ name: "url", editable: false },
{ name: "status", editable: false },
];
},
async populateStoresForHost(host) {
const storeMap = new Map();
const caches = await this.getCachesForHost(host);
try {
for (const name of (await caches.keys())) {
storeMap.set(name, (await caches.open(name)));
}
} catch (ex) {
console.warn(`Failed to enumerate CacheStorage for host ${host}: ${ex}`);
}
this.hostVsStores.set(host, storeMap);
},
/**
* This method is overriden and left blank as for Cache Storage, this
* operation cannot be performed synchronously. Thus, the preListStores
* method exists to do the same task asynchronously.
*/
populateStoresForHosts() {
this.hostVsStores = new Map();
},
/**
* Given a url, correctly determine its protocol + hostname part.
*/
getSchemaAndHost(url) {
const uri = Services.io.newURI(url);
return uri.scheme + "://" + uri.hostPort;
},
toStoreObject(item) {
return item;
},
async removeItem(host, name) {
const cacheMap = this.hostVsStores.get(host);
if (!cacheMap) {
return;
}
const parsedName = JSON.parse(name);
if (parsedName.length == 1) {
// Delete the whole Cache object
const [ cacheName ] = parsedName;
cacheMap.delete(cacheName);
const cacheStorage = await this.getCachesForHost(host);
await cacheStorage.delete(cacheName);
this.onItemUpdated("deleted", host, [ cacheName ]);
} else if (parsedName.length == 2) {
// Delete one cached request
const [ cacheName, url ] = parsedName;
const cache = cacheMap.get(cacheName);
if (cache) {
await cache.delete(url);
this.onItemUpdated("deleted", host, [ cacheName, url ]);
}
}
},
async removeAll(host, name) {
const cacheMap = this.hostVsStores.get(host);
if (!cacheMap) {
return;
}
const parsedName = JSON.parse(name);
// Only a Cache object is a valid object to clear
if (parsedName.length == 1) {
const [ cacheName ] = parsedName;
const cache = cacheMap.get(cacheName);
if (cache) {
const keys = await cache.keys();
await Promise.all(keys.map(key => cache.delete(key)));
this.onItemUpdated("cleared", host, [ cacheName ]);
}
}
},
/**
* CacheStorage API doesn't support any notifications, we must fake them
*/
onItemUpdated(action, host, path) {
this.storageActor.update(action, "Cache", {
[host]: [ JSON.stringify(path) ],
});
},
});
/**
* Code related to the Indexed DB actor and front
*/
// Metadata holder objects for various components of Indexed DB
/**
* Meta data object for a particular index in an object store
*
* @param {IDBIndex} index
* The particular index from the object store.
*/
function IndexMetadata(index) {
this._name = index.name;
this._keyPath = index.keyPath;
this._unique = index.unique;
this._multiEntry = index.multiEntry;
}
IndexMetadata.prototype = {
toObject() {
return {
name: this._name,
keyPath: this._keyPath,
unique: this._unique,
multiEntry: this._multiEntry,
};
},
};
/**
* Meta data object for a particular object store in a db
*
* @param {IDBObjectStore} objectStore
* The particular object store from the db.
*/
function ObjectStoreMetadata(objectStore) {
this._name = objectStore.name;
this._keyPath = objectStore.keyPath;
this._autoIncrement = objectStore.autoIncrement;
this._indexes = [];
for (let i = 0; i < objectStore.indexNames.length; i++) {
const index = objectStore.index(objectStore.indexNames[i]);
const newIndex = {
keypath: index.keyPath,
multiEntry: index.multiEntry,
name: index.name,
objectStore: {
autoIncrement: index.objectStore.autoIncrement,
indexNames: [...index.objectStore.indexNames],
keyPath: index.objectStore.keyPath,
name: index.objectStore.name,
},
};
this._indexes.push([newIndex, new IndexMetadata(index)]);
}
}
ObjectStoreMetadata.prototype = {
toObject() {
return {
name: this._name,
keyPath: this._keyPath,
autoIncrement: this._autoIncrement,
indexes: JSON.stringify(
[...this._indexes.values()].map(index => index.toObject())
),
};
},
};
/**
* Meta data object for a particular indexed db in a host.
*
* @param {string} origin
* The host associated with this indexed db.
* @param {IDBDatabase} db
* The particular indexed db.
* @param {String} storage
* Storage type, either "temporary", "default" or "persistent".
*/
function DatabaseMetadata(origin, db, storage) {
this._origin = origin;
this._name = db.name;
this._version = db.version;
this._objectStores = [];
this.storage = storage;
if (db.objectStoreNames.length) {
const transaction = db.transaction(db.objectStoreNames, "readonly");
for (let i = 0; i < transaction.objectStoreNames.length; i++) {
const objectStore =
transaction.objectStore(transaction.objectStoreNames[i]);
this._objectStores.push([transaction.objectStoreNames[i],
new ObjectStoreMetadata(objectStore)]);
}
}
}
DatabaseMetadata.prototype = {
get objectStores() {
return this._objectStores;
},
toObject() {
return {
uniqueKey: `${this._name}${SEPARATOR_GUID}${this.storage}`,
name: this._name,
storage: this.storage,
origin: this._origin,
version: this._version,
objectStores: this._objectStores.size,
};
},
};
StorageActors.createActor({
typeName: "indexedDB",
}, {
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.maybeSetupChildProcess();
this.objectsSize = {};
this.storageActor = storageActor;
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
this.hostVsStores.clear();
this.objectsSize = null;
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
/**
* Returns a list of currently known hosts for the target window. This list
* contains unique hosts from the window, all inner windows and all permanent
* indexedDB hosts defined inside the browser.
*/
async getHosts() {
// Add internal hosts to this._internalHosts, which will be picked up by
// the this.hosts getter. Because this.hosts is a property on the default
// storage actor and inherited by all storage actors we have to do it this
// way.
this._internalHosts = await this.getInternalHosts();
return this.hosts;
},
/**
* Remove an indexedDB database from given host with a given name.
*/
async removeDatabase(host, name) {
const win = this.storageActor.getWindowFromHost(host);
if (!win) {
return { error: `Window for host ${host} not found` };
}
const principal = win.document.nodePrincipal;
return this.removeDB(host, principal, name);
},
async removeAll(host, name) {
const [db, store] = JSON.parse(name);
const win = this.storageActor.getWindowFromHost(host);
if (!win) {
return;
}
const principal = win.document.nodePrincipal;
this.clearDBStore(host, principal, db, store);
},
async removeItem(host, name) {
const [db, store, id] = JSON.parse(name);
const win = this.storageActor.getWindowFromHost(host);
if (!win) {
return;
}
const principal = win.document.nodePrincipal;
this.removeDBRecord(host, principal, db, store, id);
},
/**
* This method is overriden and left blank as for indexedDB, this operation
* cannot be performed synchronously. Thus, the preListStores method exists to
* do the same task asynchronously.
*/
populateStoresForHosts() {},
getNamesForHost(host) {
const names = [];
for (const [dbName, {objectStores}] of this.hostVsStores.get(host)) {
if (objectStores.size) {
for (const objectStore of objectStores.keys()) {
names.push(JSON.stringify([dbName, objectStore]));
}
} else {
names.push(JSON.stringify([dbName]));
}
}
return names;
},
/**
* Returns the total number of entries for various types of requests to
* getStoreObjects for Indexed DB actor.
*
* @param {string} host
* The host for the request.
* @param {array:string} names
* Array of stringified name objects for indexed db actor.
* The request type depends on the length of any parsed entry from this
* array. 0 length refers to request for the whole host. 1 length
* refers to request for a particular db in the host. 2 length refers
* to a particular object store in a db in a host. 3 length refers to
* particular items of an object store in a db in a host.
* @param {object} options
* An options object containing following properties:
* - index {string} The IDBIndex for the object store in the db.
*/
getObjectsSize(host, names, options) {
// In Indexed DB, we are interested in only the first name, as the pattern
// should follow in all entries.
const name = names[0];
const parsedName = JSON.parse(name);
if (parsedName.length == 3) {
// This is the case where specific entries from an object store were
// requested
return names.length;
} else if (parsedName.length == 2) {
// This is the case where all entries from an object store are requested.
const index = options.index;
const [db, objectStore] = parsedName;
if (this.objectsSize[host + db + objectStore + index]) {
return this.objectsSize[host + db + objectStore + index];
}
} else if (parsedName.length == 1) {
// This is the case where details of all object stores in a db are
// requested.
if (this.hostVsStores.has(host) &&
this.hostVsStores.get(host).has(parsedName[0])) {
return this.hostVsStores.get(host).get(parsedName[0]).objectStores.size;
}
} else if (!parsedName || !parsedName.length) {
// This is the case were details of all dbs in a host are requested.
if (this.hostVsStores.has(host)) {
return this.hostVsStores.get(host).size;
}
}
return 0;
},
/**
* Purpose of this method is same as populateStoresForHosts but this is async.
* This exact same operation cannot be performed in populateStoresForHosts
* method, as that method is called in initialize method of the actor, which
* cannot be asynchronous.
*/
async preListStores() {
this.hostVsStores = new Map();
for (const host of await this.getHosts()) {
await this.populateStoresForHost(host);
}
},
async populateStoresForHost(host) {
const storeMap = new Map();
const win = this.storageActor.getWindowFromHost(host);
const principal = this.getPrincipal(win);
const {names} = await this.getDBNamesForHost(host, principal);
for (const {name, storage} of names) {
let metadata = await this.getDBMetaData(host, principal, name, storage);
metadata = indexedDBHelpers.patchMetadataMapsAndProtos(metadata);
storeMap.set(`${name} (${storage})`, metadata);
}
this.hostVsStores.set(host, storeMap);
},
/**
* Returns the over-the-wire implementation of the indexed db entity.
*/
toStoreObject(item) {
if (!item) {
return null;
}
if ("indexes" in item) {
// Object store meta data
return {
objectStore: item.name,
keyPath: item.keyPath,
autoIncrement: item.autoIncrement,
indexes: item.indexes,
};
}
if ("objectStores" in item) {
// DB meta data
return {
uniqueKey: `${item.name} (${item.storage})`,
db: item.name,
storage: item.storage,
origin: item.origin,
version: item.version,
objectStores: item.objectStores,
};
}
let value = JSON.stringify(item.value);
// FIXME: Bug 1318029 - Due to a bug that is thrown whenever a
// LongStringActor string reaches DebuggerServer.LONG_STRING_LENGTH we need
// to trim the value. When the bug is fixed we should stop trimming the
// string here.
const maxLength = DebuggerServer.LONG_STRING_LENGTH - 1;
if (value.length > maxLength) {
value = value.substr(0, maxLength);
}
// Indexed db entry
return {
name: item.name,
value: new LongStringActor(this.conn, value),
};
},
form() {
const hosts = {};
for (const host of this.hosts) {
hosts[host] = this.getNamesForHost(host);
}
return {
actor: this.actorID,
hosts: hosts,
};
},
onItemUpdated(action, host, path) {
// Database was removed, remove it from stores map
if (action === "deleted" && path.length === 1) {
if (this.hostVsStores.has(host)) {
this.hostVsStores.get(host).delete(path[0]);
}
}
this.storageActor.update(action, "indexedDB", {
[host]: [ JSON.stringify(path) ],
});
},
maybeSetupChildProcess() {
if (!DebuggerServer.isInChildProcess) {
this.backToChild = (func, rv) => rv;
this.clearDBStore = indexedDBHelpers.clearDBStore;
this.findIDBPathsForHost = indexedDBHelpers.findIDBPathsForHost;
this.findSqlitePathsForHost = indexedDBHelpers.findSqlitePathsForHost;
this.findStorageTypePaths = indexedDBHelpers.findStorageTypePaths;
this.getDBMetaData = indexedDBHelpers.getDBMetaData;
this.getDBNamesForHost = indexedDBHelpers.getDBNamesForHost;
this.getNameFromDatabaseFile = indexedDBHelpers.getNameFromDatabaseFile;
this.getObjectStoreData = indexedDBHelpers.getObjectStoreData;
this.getSanitizedHost = indexedDBHelpers.getSanitizedHost;
this.getValuesForHost = indexedDBHelpers.getValuesForHost;
this.openWithPrincipal = indexedDBHelpers.openWithPrincipal;
this.removeDB = indexedDBHelpers.removeDB;
this.removeDBRecord = indexedDBHelpers.removeDBRecord;
this.splitNameAndStorage = indexedDBHelpers.splitNameAndStorage;
this.getInternalHosts = indexedDBHelpers.getInternalHosts;
return;
}
const mm = this.conn.parentMessageManager;
this.conn.setupInParent({
module: "devtools/server/actors/storage",
setupParent: "setupParentProcessForIndexedDB",
});
this.getDBMetaData = callParentProcessAsync.bind(null, "getDBMetaData");
this.splitNameAndStorage = callParentProcessAsync.bind(null, "splitNameAndStorage");
this.getInternalHosts = callParentProcessAsync.bind(null, "getInternalHosts");
this.getDBNamesForHost = callParentProcessAsync.bind(null, "getDBNamesForHost");
this.getValuesForHost = callParentProcessAsync.bind(null, "getValuesForHost");
this.removeDB = callParentProcessAsync.bind(null, "removeDB");
this.removeDBRecord = callParentProcessAsync.bind(null, "removeDBRecord");
this.clearDBStore = callParentProcessAsync.bind(null, "clearDBStore");
mm.addMessageListener("debug:storage-indexedDB-request-child", msg => {
switch (msg.json.method) {
case "backToChild": {
const [func, rv] = msg.json.args;
const deferred = unresolvedPromises.get(func);
if (deferred) {
unresolvedPromises.delete(func);
deferred.resolve(rv);
}
break;
}
case "onItemUpdated": {
const [action, host, path] = msg.json.args;
this.onItemUpdated(action, host, path);
}
}
});
const unresolvedPromises = new Map();
function callParentProcessAsync(methodName, ...args) {
const deferred = defer();
unresolvedPromises.set(methodName, deferred);
mm.sendAsyncMessage("debug:storage-indexedDB-request-parent", {
method: methodName,
args: args,
});
return deferred.promise;
}
},
async getFields(subType) {
switch (subType) {
// Detail of database
case "database":
return [
{ name: "objectStore", editable: false },
{ name: "keyPath", editable: false },
{ name: "autoIncrement", editable: false },
{ name: "indexes", editable: false },
];
// Detail of object store
case "object store":
return [
{ name: "name", editable: false },
{ name: "value", editable: false },
];
// Detail of indexedDB for one origin
default:
return [
{ name: "uniqueKey", editable: false, private: true },
{ name: "db", editable: false },
{ name: "storage", editable: false },
{ name: "origin", editable: false },
{ name: "version", editable: false },
{ name: "objectStores", editable: false },
];
}
},
});
var indexedDBHelpers = {
backToChild(...args) {
Services.mm.broadcastAsyncMessage("debug:storage-indexedDB-request-child", {
method: "backToChild",
args: args,
});
},
onItemUpdated(action, host, path) {
Services.mm.broadcastAsyncMessage("debug:storage-indexedDB-request-child", {
method: "onItemUpdated",
args: [ action, host, path ],
});
},
/**
* Fetches and stores all the metadata information for the given database
* `name` for the given `host` with its `principal`. The stored metadata
* information is of `DatabaseMetadata` type.
*/
async getDBMetaData(host, principal, name, storage) {
const request = this.openWithPrincipal(principal, name, storage);
const success = defer();
request.onsuccess = event => {
const db = event.target.result;
const dbData = new DatabaseMetadata(host, db, storage);
db.close();
success.resolve(this.backToChild("getDBMetaData", dbData));
};
request.onerror = ({target}) => {
console.error(
`Error opening indexeddb database ${name} for host ${host}`, target.error);
success.resolve(this.backToChild("getDBMetaData", null));
};
return success.promise;
},
splitNameAndStorage: function(name) {
const lastOpenBracketIndex = name.lastIndexOf("(");
const lastCloseBracketIndex = name.lastIndexOf(")");
const delta = lastCloseBracketIndex - lastOpenBracketIndex - 1;
const storage = name.substr(lastOpenBracketIndex + 1, delta);
name = name.substr(0, lastOpenBracketIndex - 1);
return { storage, name };
},
/**
* Get all "internal" hosts. Internal hosts are database namespaces used by
* the browser.
*/
async getInternalHosts() {
// Return an empty array if the browser toolbox is not enabled.
if (!Services.prefs.getBoolPref(CHROME_ENABLED_PREF) ||
!Services.prefs.getBoolPref(REMOTE_ENABLED_PREF)) {
return this.backToChild("getInternalHosts", []);
}
const profileDir = OS.Constants.Path.profileDir;
const storagePath = OS.Path.join(profileDir, "storage", "permanent");
const iterator = new OS.File.DirectoryIterator(storagePath);
const hosts = [];
await iterator.forEach(entry => {
if (entry.isDir && !SAFE_HOSTS_PREFIXES_REGEX.test(entry.name)) {
hosts.push(entry.name);
}
});
iterator.close();
return this.backToChild("getInternalHosts", hosts);
},
/**
* Opens an indexed db connection for the given `principal` and
* database `name`.
*/
openWithPrincipal: function(principal, name, storage) {
return indexedDBForStorage.openForPrincipal(principal, name,
{ storage: storage });
},
async removeDB(host, principal, dbName) {
const result = new Promise(resolve => {
const {name, storage} = this.splitNameAndStorage(dbName);
const request =
indexedDBForStorage.deleteForPrincipal(principal, name,
{ storage: storage });
request.onsuccess = () => {
resolve({});
this.onItemUpdated("deleted", host, [dbName]);
};
request.onblocked = () => {
console.warn(`Deleting indexedDB database ${name} for host ${host} is blocked`);
resolve({ blocked: true });
};
request.onerror = () => {
const { error } = request;
console.warn(
`Error deleting indexedDB database ${name} for host ${host}: ${error}`);
resolve({ error: error.message });
};
// If the database is blocked repeatedly, the onblocked event will not
// be fired again. To avoid waiting forever, report as blocked if nothing
// else happens after 3 seconds.
setTimeout(() => resolve({ blocked: true }), 3000);
});
return this.backToChild("removeDB", await result);
},
async removeDBRecord(host, principal, dbName, storeName, id) {
let db;
const {name, storage} = this.splitNameAndStorage(dbName);
try {
db = await new Promise((resolve, reject) => {
const request = this.openWithPrincipal(principal, name, storage);
request.onsuccess = ev => resolve(ev.target.result);
request.onerror = ev => reject(ev.target.error);
});
const transaction = db.transaction(storeName, "readwrite");
const store = transaction.objectStore(storeName);
await new Promise((resolve, reject) => {
const request = store.delete(id);
request.onsuccess = () => resolve();
request.onerror = ev => reject(ev.target.error);
});
this.onItemUpdated("deleted", host, [dbName, storeName, id]);
} catch (error) {
const recordPath = [dbName, storeName, id].join("/");
console.error(`Failed to delete indexedDB record: ${recordPath}: ${error}`);
}
if (db) {
db.close();
}
return this.backToChild("removeDBRecord", null);
},
async clearDBStore(host, principal, dbName, storeName) {
let db;
const {name, storage} = this.splitNameAndStorage(dbName);
try {
db = await new Promise((resolve, reject) => {
const request = this.openWithPrincipal(principal, name, storage);
request.onsuccess = ev => resolve(ev.target.result);
request.onerror = ev => reject(ev.target.error);
});
const transaction = db.transaction(storeName, "readwrite");
const store = transaction.objectStore(storeName);
await new Promise((resolve, reject) => {
const request = store.clear();
request.onsuccess = () => resolve();
request.onerror = ev => reject(ev.target.error);
});
this.onItemUpdated("cleared", host, [dbName, storeName]);
} catch (error) {
const storePath = [dbName, storeName].join("/");
console.error(`Failed to clear indexedDB store: ${storePath}: ${error}`);
}
if (db) {
db.close();
}
return this.backToChild("clearDBStore", null);
},
/**
* Fetches all the databases and their metadata for the given `host`.
*/
async getDBNamesForHost(host, principal) {
const sanitizedHost = this.getSanitizedHost(host) + principal.originSuffix;
const profileDir = OS.Constants.Path.profileDir;
const files = [];
const names = [];
const storagePath = OS.Path.join(profileDir, "storage");
// We expect sqlite DB paths to look something like this:
// - PathToProfileDir/storage/default/http+++www.example.com/
// idb/1556056096MeysDaabta.sqlite
// - PathToProfileDir/storage/permanent/http+++www.example.com/
// idb/1556056096MeysDaabta.sqlite
// - PathToProfileDir/storage/temporary/http+++www.example.com/
// idb/1556056096MeysDaabta.sqlite
// The subdirectory inside the storage folder is determined by the storage
// type:
// - default: { storage: "default" } or not specified.
// - permanent: { storage: "persistent" }.
// - temporary: { storage: "temporary" }.
const sqliteFiles = await this.findSqlitePathsForHost(storagePath, sanitizedHost);
for (const file of sqliteFiles) {
const splitPath = OS.Path.split(file).components;
const idbIndex = splitPath.indexOf("idb");
const storage = splitPath[idbIndex - 2];
const relative = file.substr(profileDir.length + 1);
files.push({
file: relative,
storage: storage === "permanent" ? "persistent" : storage,
});
}
if (files.length > 0) {
for (const {file, storage} of files) {
const name = await this.getNameFromDatabaseFile(file);
if (name) {
names.push({
name,
storage,
});
}
}
}
return this.backToChild("getDBNamesForHost", {names});
},
/**
* Find all SQLite files that hold IndexedDB data for a host, such as:
* storage/temporary/http+++www.example.com/idb/1556056096MeysDaabta.sqlite
*/
async findSqlitePathsForHost(storagePath, sanitizedHost) {
const sqlitePaths = [];
const idbPaths = await this.findIDBPathsForHost(storagePath, sanitizedHost);
for (const idbPath of idbPaths) {
const iterator = new OS.File.DirectoryIterator(idbPath);
await iterator.forEach(entry => {
if (!entry.isDir && entry.path.endsWith(".sqlite")) {
sqlitePaths.push(entry.path);
}
});
iterator.close();
}
return sqlitePaths;
},
/**
* Find all paths that hold IndexedDB data for a host, such as:
* storage/temporary/http+++www.example.com/idb
*/
async findIDBPathsForHost(storagePath, sanitizedHost) {
const idbPaths = [];
const typePaths = await this.findStorageTypePaths(storagePath);
for (const typePath of typePaths) {
const idbPath = OS.Path.join(typePath, sanitizedHost, "idb");
if (await OS.File.exists(idbPath)) {
idbPaths.push(idbPath);
}
}
return idbPaths;
},
/**
* Find all the storage types, such as "default", "permanent", or "temporary".
* These names have changed over time, so it seems simpler to look through all types
* that currently exist in the profile.
*/
async findStorageTypePaths(storagePath) {
const iterator = new OS.File.DirectoryIterator(storagePath);
const typePaths = [];
await iterator.forEach(entry => {
if (entry.isDir) {
typePaths.push(entry.path);
}
});
iterator.close();
return typePaths;
},
/**
* Removes any illegal characters from the host name to make it a valid file
* name.
*/
getSanitizedHost(host) {
if (host.startsWith("about:")) {
host = "moz-safe-" + host;
}
return host.replace(ILLEGAL_CHAR_REGEX, "+");
},
/**
* Retrieves the proper indexed db database name from the provided .sqlite
* file location.
*/
async getNameFromDatabaseFile(path) {
let connection = null;
let retryCount = 0;
// Content pages might be having an open transaction for the same indexed db
// which this sqlite file belongs to. In that case, sqlite.openConnection
// will throw. Thus we retry for some time to see if lock is removed.
while (!connection && retryCount++ < 25) {
try {
connection = await Sqlite.openConnection({ path: path });
} catch (ex) {
// Continuously retrying is overkill. Waiting for 100ms before next try
await sleep(100);
}
}
if (!connection) {
return null;
}
const rows = await connection.execute("SELECT name FROM database");
if (rows.length != 1) {
return null;
}
const name = rows[0].getResultByName("name");
await connection.close();
return name;
},
async getValuesForHost(host, name = "null", options,
hostVsStores, principal) {
name = JSON.parse(name);
if (!name || !name.length) {
// This means that details about the db in this particular host are
// requested.
const dbs = [];
if (hostVsStores.has(host)) {
for (let [, db] of hostVsStores.get(host)) {
db = indexedDBHelpers.patchMetadataMapsAndProtos(db);
dbs.push(db.toObject());
}
}
return this.backToChild("getValuesForHost", {dbs: dbs});
}
const [db2, objectStore, id] = name;
if (!objectStore) {
// This means that details about all the object stores in this db are
// requested.
const objectStores = [];
if (hostVsStores.has(host) && hostVsStores.get(host).has(db2)) {
let db = hostVsStores.get(host).get(db2);
db = indexedDBHelpers.patchMetadataMapsAndProtos(db);
const objectStores2 = db.objectStores;
for (const objectStore2 of objectStores2) {
objectStores.push(objectStore2[1].toObject());
}
}
return this.backToChild("getValuesForHost", {objectStores: objectStores});
}
// Get either all entries from the object store, or a particular id
const storage = hostVsStores.get(host).get(db2).storage;
const result = await this.getObjectStoreData(host, principal, db2, storage, {
objectStore: objectStore,
id: id,
index: options.index,
offset: options.offset,
size: options.size,
});
return this.backToChild("getValuesForHost", {result: result});
},
/**
* Returns requested entries (or at most MAX_STORE_OBJECT_COUNT) from a particular
* objectStore from the db in the given host.
*
* @param {string} host
* The given host.
* @param {nsIPrincipal} principal
* The principal of the given document.
* @param {string} dbName
* The name of the indexed db from the above host.
* @param {String} storage
* Storage type, either "temporary", "default" or "persistent".
* @param {Object} requestOptions
* An object in the following format:
* {
* objectStore: The name of the object store from the above db,
* id: Id of the requested entry from the above object
* store. null if all entries from the above object
* store are requested,
* index: Name of the IDBIndex to be iterated on while fetching
* entries. null or "name" if no index is to be
* iterated,
* offset: offset of the entries to be fetched,
* size: The intended size of the entries to be fetched
* }
*/
getObjectStoreData(host, principal, dbName, storage, requestOptions) {
const {name} = this.splitNameAndStorage(dbName);
const request = this.openWithPrincipal(principal, name, storage);
const success = defer();
let {objectStore, id, index, offset, size} = requestOptions;
const data = [];
let db;
if (!size || size > MAX_STORE_OBJECT_COUNT) {
size = MAX_STORE_OBJECT_COUNT;
}
request.onsuccess = event => {
db = event.target.result;
const transaction = db.transaction(objectStore, "readonly");
let source = transaction.objectStore(objectStore);
if (index && index != "name") {
source = source.index(index);
}
source.count().onsuccess = event2 => {
const objectsSize = [];
const count = event2.target.result;
objectsSize.push({
key: host + dbName + objectStore + index,
count: count,
});
if (!offset) {
offset = 0;
} else if (offset > count) {
db.close();
success.resolve([]);
return;
}
if (id) {
source.get(id).onsuccess = event3 => {
db.close();
success.resolve([{name: id, value: event3.target.result}]);
};
} else {
source.openCursor().onsuccess = event4 => {
const cursor = event4.target.result;
if (!cursor || data.length >= size) {
db.close();
success.resolve({
data: data,
objectsSize: objectsSize,
});
return;
}
if (offset-- <= 0) {
data.push({name: cursor.key, value: cursor.value});
}
cursor.continue();
};
}
};
};
request.onerror = () => {
db.close();
success.resolve([]);
};
return success.promise;
},
/**
* When indexedDB metadata is parsed to and from JSON then the object's
* prototype is dropped and any Maps are changed to arrays of arrays. This
* method is used to repair the prototypes and fix any broken Maps.
*/
patchMetadataMapsAndProtos(metadata) {
const md = Object.create(DatabaseMetadata.prototype);
Object.assign(md, metadata);
md._objectStores = new Map(metadata._objectStores);
for (const [name, store] of md._objectStores) {
const obj = Object.create(ObjectStoreMetadata.prototype);
Object.assign(obj, store);
md._objectStores.set(name, obj);
if (typeof store._indexes.length !== "undefined") {
obj._indexes = new Map(store._indexes);
}
for (const [name2, value] of obj._indexes) {
const obj2 = Object.create(IndexMetadata.prototype);
Object.assign(obj2, value);
obj._indexes.set(name2, obj2);
}
}
return md;
},
handleChildRequest(msg) {
const args = msg.data.args;
switch (msg.json.method) {
case "getDBMetaData": {
const [host, principal, name, storage] = args;
return indexedDBHelpers.getDBMetaData(host, principal, name, storage);
}
case "getInternalHosts": {
return indexedDBHelpers.getInternalHosts();
}
case "splitNameAndStorage": {
const [name] = args;
return indexedDBHelpers.splitNameAndStorage(name);
}
case "getDBNamesForHost": {
const [host, principal] = args;
return indexedDBHelpers.getDBNamesForHost(host, principal);
}
case "getValuesForHost": {
const [host, name, options, hostVsStores, principal] = args;
return indexedDBHelpers.getValuesForHost(host, name, options,
hostVsStores, principal);
}
case "removeDB": {
const [host, principal, dbName] = args;
return indexedDBHelpers.removeDB(host, principal, dbName);
}
case "removeDBRecord": {
const [host, principal, db, store, id] = args;
return indexedDBHelpers.removeDBRecord(host, principal, db, store, id);
}
case "clearDBStore": {
const [host, principal, db, store] = args;
return indexedDBHelpers.clearDBStore(host, principal, db, store);
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
};
/**
* E10S parent/child setup helpers
*/
exports.setupParentProcessForIndexedDB = function({ mm, prefix }) {
// listen for director-script requests from the child process
setMessageManager(mm);
function setMessageManager(newMM) {
if (mm) {
mm.removeMessageListener("debug:storage-indexedDB-request-parent",
indexedDBHelpers.handleChildRequest);
}
mm = newMM;
if (mm) {
mm.addMessageListener("debug:storage-indexedDB-request-parent",
indexedDBHelpers.handleChildRequest);
}
}
return {
onBrowserSwap: setMessageManager,
onDisconnected: () => setMessageManager(null),
};
};
/**
* General helpers
*/
function trimHttpHttpsPort(url) {
const match = url.match(/(.+):\d+$/);
if (match) {
url = match[1];
}
if (url.startsWith("http://")) {
return url.substr(7);
}
if (url.startsWith("https://")) {
return url.substr(8);
}
return url;
}
/**
* The main Storage Actor.
*/
const StorageActor = protocol.ActorClassWithSpec(specs.storageSpec, {
typeName: "storage",
get window() {
return this.parentActor.window;
},
get document() {
return this.parentActor.window.document;
},
get windows() {
return this.childWindowPool;
},
initialize(conn, targetActor) {
protocol.Actor.prototype.initialize.call(this, conn);
this.parentActor = targetActor;
this.childActorPool = new Map();
this.childWindowPool = new Set();
// Fetch all the inner iframe windows in this tab.
this.fetchChildWindows(this.parentActor.docShell);
// Initialize the registered store types
for (const [store, ActorConstructor] of storageTypePool) {
this.childActorPool.set(store, new ActorConstructor(this));
}
// Notifications that help us keep track of newly added windows and windows
// that got removed
Services.obs.addObserver(this, "content-document-global-created");
Services.obs.addObserver(this, "inner-window-destroyed");
this.onPageChange = this.onPageChange.bind(this);
const handler = targetActor.chromeEventHandler;
handler.addEventListener("pageshow", this.onPageChange, true);
handler.addEventListener("pagehide", this.onPageChange, true);
this.destroyed = false;
this.boundUpdate = {};
},
destroy() {
clearTimeout(this.batchTimer);
this.batchTimer = null;
// Remove observers
Services.obs.removeObserver(this, "content-document-global-created");
Services.obs.removeObserver(this, "inner-window-destroyed");
this.destroyed = true;
if (this.parentActor.browser) {
this.parentActor.browser.removeEventListener("pageshow", this.onPageChange, true);
this.parentActor.browser.removeEventListener("pagehide", this.onPageChange, true);
}
// Destroy the registered store types
for (const actor of this.childActorPool.values()) {
actor.destroy();
}
this.childActorPool.clear();
this.childWindowPool.clear();
this.childActorPool = null;
this.childWindowPool = null;
this.parentActor = null;
this.boundUpdate = null;
this.registeredPool = null;
this._pendingResponse = null;
protocol.Actor.prototype.destroy.call(this);
},
/**
* Given a docshell, recursively find out all the child windows from it.
*
* @param {nsIDocShell} item
* The docshell from which all inner windows need to be extracted.
*/
fetchChildWindows(item) {
const docShell = item.QueryInterface(Ci.nsIDocShell)
.QueryInterface(Ci.nsIDocShellTreeItem);
if (!docShell.contentViewer) {
return null;
}
const window = docShell.contentViewer.DOMDocument.defaultView;
if (window.location.href == "about:blank") {
// Skip out about:blank windows as Gecko creates them multiple times while
// creating any global.
return null;
}
this.childWindowPool.add(window);
for (let i = 0; i < docShell.childCount; i++) {
const child = docShell.getChildAt(i);
this.fetchChildWindows(child);
}
return null;
},
isIncludedInTopLevelWindow(window) {
return isWindowIncluded(this.window, window);
},
getWindowFromInnerWindowID(innerID) {
innerID = innerID.QueryInterface(Ci.nsISupportsPRUint64).data;
for (const win of this.childWindowPool.values()) {
const id = win.windowUtils.currentInnerWindowID;
if (id == innerID) {
return win;
}
}
return null;
},
getWindowFromHost(host) {
for (const win of this.childWindowPool.values()) {
const origin = win.document
.nodePrincipal
.originNoSuffix;
const url = win.document.URL;
if (origin === host || url === host) {
return win;
}
}
return null;
},
/**
* Event handler for any docshell update. This lets us figure out whenever
* any new window is added, or an existing window is removed.
*/
observe(subject, topic) {
if (subject.location &&
(!subject.location.href || subject.location.href == "about:blank")) {
return null;
}
// We don't want to try to find a top level window for an extension page, as
// in many cases (e.g. background page), it is not loaded in a tab, and
// 'isIncludedInTopLevelWindow' throws an error
if (topic == "content-document-global-created"
&& (subject.location.href.startsWith("moz-extension://")
|| this.isIncludedInTopLevelWindow(subject))) {
this.childWindowPool.add(subject);
this.emit("window-ready", subject);
} else if (topic == "inner-window-destroyed") {
const window = this.getWindowFromInnerWindowID(subject);
if (window) {
this.childWindowPool.delete(window);
this.emit("window-destroyed", window);
}
}
return null;
},
/**
* Called on "pageshow" or "pagehide" event on the chromeEventHandler of
* current tab.
*
* @param {event} The event object passed to the handler. We are using these
* three properties from the event:
* - target {document} The document corresponding to the event.
* - type {string} Name of the event - "pageshow" or "pagehide".
* - persisted {boolean} true if there was no
* "content-document-global-created" notification along
* this event.
*/
onPageChange({target, type, persisted}) {
if (this.destroyed) {
return;
}
const window = target.defaultView;
if (type == "pagehide" && this.childWindowPool.delete(window)) {
this.emit("window-destroyed", window);
} else if (type == "pageshow" && persisted && window.location.href &&
window.location.href != "about:blank" &&
this.isIncludedInTopLevelWindow(window)) {
this.childWindowPool.add(window);
this.emit("window-ready", window);
}
},
/**
* Lists the available hosts for all the registered storage types.
*
* @returns {object} An object containing with the following structure:
* - <storageType> : [{
* actor: <actorId>,
* host: <hostname>
* }]
*/
async listStores() {
const toReturn = {};
for (const [name, value] of this.childActorPool) {
// Only list extensionStorage for the add-on storage panel
if (name === "extensionStorage"
&& (!value.storageActor.parentActor.addonId)) {
continue;
}
if (value.preListStores) {
await value.preListStores();
}
toReturn[name] = value;
}
return toReturn;
},
/**
* This method is called by the registered storage types so as to tell the
* Storage Actor that there are some changes in the stores. Storage Actor then
* notifies the client front about these changes at regular (BATCH_DELAY)
* interval.
*
* @param {string} action
* The type of change. One of "added", "changed" or "deleted"
* @param {string} storeType
* The storage actor in which this change has occurred.
* @param {object} data
* The update object. This object is of the following format:
* - {
* <host1>: [<store_names1>, <store_name2>...],
* <host2>: [<store_names34>...],
* }
* Where host1, host2 are the host in which this change happened and
* [<store_namesX] is an array of the names of the changed store objects.
* Pass an empty array if the host itself was affected: either completely
* removed or cleared.
*/
/* eslint-disable complexity */
update(action, storeType, data) {
if (action == "cleared") {
this.emit("stores-cleared", { [storeType]: data });
return null;
}
if (this.batchTimer) {
clearTimeout(this.batchTimer);
}
if (!this.boundUpdate[action]) {
this.boundUpdate[action] = {};
}
if (!this.boundUpdate[action][storeType]) {
this.boundUpdate[action][storeType] = {};
}
for (const host in data) {
if (!this.boundUpdate[action][storeType][host]) {
this.boundUpdate[action][storeType][host] = [];
}
for (const name of data[host]) {
if (!this.boundUpdate[action][storeType][host].includes(name)) {
this.boundUpdate[action][storeType][host].push(name);
}
}
}
if (action == "added") {
// If the same store name was previously deleted or changed, but now is
// added somehow, dont send the deleted or changed update.
this.removeNamesFromUpdateList("deleted", storeType, data);
this.removeNamesFromUpdateList("changed", storeType, data);
} else if (action == "changed" && this.boundUpdate.added &&
this.boundUpdate.added[storeType]) {
// If something got added and changed at the same time, then remove those
// items from changed instead.
this.removeNamesFromUpdateList("changed", storeType,
this.boundUpdate.added[storeType]);
} else if (action == "deleted") {
// If any item got delete, or a host got delete, no point in sending
// added or changed update
this.removeNamesFromUpdateList("added", storeType, data);
this.removeNamesFromUpdateList("changed", storeType, data);
for (const host in data) {
if (data[host].length == 0 && this.boundUpdate.added &&
this.boundUpdate.added[storeType] &&
this.boundUpdate.added[storeType][host]) {
delete this.boundUpdate.added[storeType][host];
}
if (data[host].length == 0 && this.boundUpdate.changed &&
this.boundUpdate.changed[storeType] &&
this.boundUpdate.changed[storeType][host]) {
delete this.boundUpdate.changed[storeType][host];
}
}
}
this.batchTimer = setTimeout(() => {
clearTimeout(this.batchTimer);
this.emit("stores-update", this.boundUpdate);
this.boundUpdate = {};
}, BATCH_DELAY);
return null;
},
/* eslint-enable complexity */
/**
* This method removes data from the this.boundUpdate object in the same
* manner like this.update() adds data to it.
*
* @param {string} action
* The type of change. One of "added", "changed" or "deleted"
* @param {string} storeType
* The storage actor for which you want to remove the updates data.
* @param {object} data
* The update object. This object is of the following format:
* - {
* <host1>: [<store_names1>, <store_name2>...],
* <host2>: [<store_names34>...],
* }
* Where host1, host2 are the hosts which you want to remove and
* [<store_namesX] is an array of the names of the store objects.
*/
removeNamesFromUpdateList(action, storeType, data) {
for (const host in data) {
if (this.boundUpdate[action] && this.boundUpdate[action][storeType] &&
this.boundUpdate[action][storeType][host]) {
for (const name in data[host]) {
const index = this.boundUpdate[action][storeType][host].indexOf(name);
if (index > -1) {
this.boundUpdate[action][storeType][host].splice(index, 1);
}
}
if (!this.boundUpdate[action][storeType][host].length) {
delete this.boundUpdate[action][storeType][host];
}
}
}
return null;
},
});
exports.StorageActor = StorageActor;
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/* globals browser */
"use strict";
const {
AddonTestUtils,
} = ChromeUtils.import("resource://testing-common/AddonTestUtils.jsm");
const {
FileUtils,
} = ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
const {
ExtensionTestUtils,
} = ChromeUtils.import("resource://testing-common/ExtensionXPCShellUtils.jsm");
const {
createAppInfo,
promiseStartupManager,
} = AddonTestUtils;
const LEAVE_UUID_PREF = "extensions.webextensions.keepUuidOnUninstall";
const LEAVE_STORAGE_PREF = "extensions.webextensions.keepStorageOnUninstall";
const EXTENSION_STORAGE_ENABLED_PREF = "devtools.storage.extensionStorage.enabled";
AddonTestUtils.init(this);
createAppInfo("xpcshell@tests.mozilla.org", "XPCShell", "1", "42");
ExtensionTestUtils.init(this);
// This storage actor is gated behind a pref, so make sure it is enabled first
Services.prefs.setBoolPref(EXTENSION_STORAGE_ENABLED_PREF, true);
registerCleanupFunction(() => {
Services.prefs.clearUserPref(EXTENSION_STORAGE_ENABLED_PREF);
});
/**
* Starts up and connects the Debugger server to the DevTools client (both in the main
* process) by listening over an nsIPipe, so that they can send remote debugging
* protocol messages to each other.
*
* @return {Promise} Resolves with a client object when the debugger has started up.
*/
async function startDebugger() {
DebuggerServer.init();
DebuggerServer.registerAllActors();
const transport = DebuggerServer.connectPipe();
const client = new DebuggerClient(transport);
await client.connect();
return client;
}
/**
* Set up the equivalent of an `about:debugging` toolbox for a given extension, minus
* the toolbox.
*
* @param {String} id - The id for the extension to be targeted by the toolbox.
* @return {Object} Resolves with the web extension actor front and target objects when
* the debugger has been connected to the extension.
*/
async function setupExtensionDebugging(id) {
const client = await startDebugger();
const front = await client.mainRoot.getAddon({id});
// Starts a DevTools server in the extension child process.
const target = await front.connect();
return {front, target};
}
/**
* Loads and starts up a test extension given the provided extension configuration.
*
* @param {Object} extConfig - The extension configuration object
* @return {ExtensionWrapper} extension - Resolves with an extension object once the
* extension has started up.
*/
async function startupExtension(extConfig) {
const extension = ExtensionTestUtils.loadExtension(extConfig);
await extension.startup();
return extension;
}
/**
* Opens the addon debugger's storage panel
*
* @param {String} - id, The addon id
* @return {Object} - Resolves with the web extension actor target and extensionStorage
* store objects when the panel has been opened.
*/
async function openAddonStoragePanel(id) {
const {target} = await setupExtensionDebugging(id);
const storageFront = await target.getFront("storage");
const stores = await storageFront.listStores();
const extensionStorage = stores.extensionStorage || null;
return {target, extensionStorage};
}
/**
* Builds the extension configuration object passed into ExtensionTestUtils.loadExtension
*
* @param {Object} options - Options, if any, to add to the configuration
* @param {Function} options.background - A function comprising the test extension's
* background script if provided
* @param {Object} options.files - An object whose keys correspond to file names and
* values map to the file contents
* @param {Object} options.manifest - An object representing the extension's manifest
* @return {Object} - The extension configuration object
*/
function getExtensionConfig(options = {}) {
const {manifest, ...otherOptions} = options;
const baseConfig = {
manifest: {
...manifest,
permissions: ["storage"],
},
useAddonManager: "temporary",
};
return {
...baseConfig,
...otherOptions,
};
}
/**
* An extension script that can be used in any extension context (e.g. as a background
* script or as an extension page script loaded in a tab).
*/
async function extensionScriptWithMessageListener() {
browser.test.onMessage.addListener(async (msg, ...args) => {
switch (msg) {
case "storage-local-set":
await browser.storage.local.set(args[0]);
break;
case "storage-local-get":
const {key, valueType} = args[0];
const value = (await browser.storage.local.get(key))[key];
browser.test.assertTrue(typeof value === valueType,
`Value fetched by extension matches expected value type, ${valueType}`
);
break;
default:
browser.test.fail(`Unexpected test message: ${msg}`);
}
browser.test.sendMessage(`${msg}:done`);
});
browser.test.sendMessage("extension-origin", window.location.origin);
}
/**
* Shared files for a test extension that has no background page but adds storage
* items via a transient extension page in a tab
*/
const ext_no_bg = {
files: {
"extension_page_in_tab.html": `<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
</head>
<body>
<h1>Extension Page in a Tab</h1>
<script src="extension_page_in_tab.js"></script>
</body>
</html>`,
"extension_page_in_tab.js": extensionScriptWithMessageListener,
},
};
/**
* Shutdown procedure common to all tasks.
*
* @param {Object} extension - The test extension
* @param {Object} target - The web extension actor targeted by the DevTools client
*/
async function shutdown(extension, target) {
if (target) {
await target.destroy();
}
await extension.unload();
}
/**
* Mocks the missing 'storage/permanent' directory needed by the "indexedDB"
* storage actor's 'preListStores' method (called when 'listStores' is called). This
* directory exists in a full browser i.e. mochitest.
*/
function createMissingIndexedDBDirs() {
const dir = (Services.dirsvc.get("ProfD", Ci.nsIFile)).clone();
dir.append("storage");
if (!dir.exists()) {
dir.create(dir.DIRECTORY_TYPE, FileUtils.PERMS_DIRECTORY);
}
dir.append("permanent");
if (!dir.exists()) {
dir.create(dir.DIRECTORY_TYPE, FileUtils.PERMS_DIRECTORY);
}
Assert.ok(dir.exists(), "Should have a 'storage/permanent' dir in the profile dir");
}
add_task(async function setup() {
await promiseStartupManager();
createMissingIndexedDBDirs();
});
add_task(async function test_extension_store_exists() {
const extension = await startupExtension(getExtensionConfig());
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
ok(extensionStorage, "Should have an extensionStorage store");
await shutdown(extension, target);
});
add_task(async function test_extension_origin_matches_debugger_target() {
async function background() {
browser.test.sendMessage("extension-origin", window.location.origin);
}
const extension = await startupExtension(getExtensionConfig({background}));
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const {hosts} = extensionStorage;
const expectedHost = await extension.awaitMessage("extension-origin");
ok(expectedHost in hosts,
"Should have the expected extension host in the extensionStorage store");
await shutdown(extension, target);
});
/**
* Test case: Bg page adds item while storage panel is open.
* - Load extension with background page.
* - Open the add-on debugger storage panel.
* - With the panel still open, add an item from the background page.
* - The data in the panel should match the item added by the extension.
*/
add_task(async function test_panel_live_updates() {
const extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener}),
);
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const host = await extension.awaitMessage("extension-origin");
let {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(data, [], "Got the expected results on empty storage.local");
extension.sendMessage("storage-local-set", {
a: 123,
b: [4, 5],
c: {d: 678},
d: true,
e: "hi",
f: null,
});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data, [
{area: "local", name: "a", value: {str: "123"}, editable: true},
{area: "local", name: "b", value: {str: "[4,5]"}, editable: true},
{area: "local", name: "c", value: {str: "{\"d\":678}"}, editable: true},
{area: "local", name: "d", value: {str: "true"}, editable: true},
{area: "local", name: "e", value: {str: "hi"}, editable: true},
{area: "local", name: "f", value: {str: "null"}, editable: true},
],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: No bg page. Transient page adds item before storage panel opened.
* - Load extension with no background page.
* - Open an extension page in a tab that adds a local storage item.
* - With the extension page still open, open the add-on storage panel.
* - The data in the storage panel should match the items added by the extension.
*/
add_task(async function test_panel_data_matches_extension_with_transient_page_open() {
const extension = await startupExtension(getExtensionConfig({files: ext_no_bg.files}));
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await contentPage.close();
await shutdown(extension, target);
});
/**
* Test case: No bg page. Transient page adds item then closes before storage panel opened.
* - Load extension with no background page.
* - Open an extension page in a tab that adds a local storage item.
* - Close all extension pages.
* - Open the add-on storage panel.
* - The data in the storage panel should match the item added by the extension.
*/
add_task(async function test_panel_data_matches_extension_with_no_pages_open() {
const extension = await startupExtension(getExtensionConfig({files: ext_no_bg.files}));
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
await contentPage.close();
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: No bg page. Storage panel live updates when a transient page adds an item.
* - Load extension with no background page.
* - Open the add-on storage panel.
* - With the storage panel still open, open an extension page in a new tab that adds an
* item.
* - Assert:
* - The data in the storage panel should live update to match the item added by the
* extension.
* - If an extension page adds the same data again, the data in the storage panel should
* not change.
*/
add_task(async function test_panel_data_live_updates_for_extension_without_bg_page() {
const extension = await startupExtension(getExtensionConfig({files: ext_no_bg.files}));
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
let {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(data, [], "Got the expected results on empty storage.local");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"The results are unchanged when an extension page adds duplicate items"
);
await contentPage.close();
await shutdown(extension, target);
});
/**
* Test case: Bg page adds item while storage panel is open. Panel edits item's value.
* - Load extension with background page.
* - Open the add-on storage panel.
* - With the storage panel still open, add item from the background page.
* - Edit the value of the item in the storage panel
* - Assert:
* - The data in the storage panel should match the item added by the extension.
* - The storage actor is not mutating the item value's data type
* when the item's value is edited in the storage panel
*/
add_task(async function test_editing_items_in_panel_parses_supported_values_correctly() {
const extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener}),
);
const host = await extension.awaitMessage("extension-origin");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const oldItem = {a: 123};
const key = Object.keys(oldItem)[0];
const oldValue = oldItem[key];
// A tuple representing information for a new value entered into the panel for oldItem:
// [
// typeof value,
// editItem string representation of value,
// toStoreObject string representation of value,
// ]
const valueInfo = [
["boolean", "true", "true"],
["string", "hi", "hi"],
["number", "456", "456"],
["object", "{b: 789}", "{\"b\":789}"],
["object", "[1, 2, 3]", "[1,2,3]"],
["object", "null", "null"],
];
for (const [valueType, editItemValueStr, toStoreObjectValueStr] of valueInfo) {
info("Setting a storage item through the extension");
extension.sendMessage("storage-local-set", oldItem);
await extension.awaitMessage("storage-local-set:done");
info("Editing the storage item in the panel with a new value of a different type");
// When the user edits an item in the panel, they are entering a string into a
// textbox. This string is parsed by the storage actor's editItem method.
await extensionStorage.editItem({
host,
field: "value",
item: {name: key, value: editItemValueStr},
oldValue,
});
info("Verifying item in the storage actor matches the item edited in the panel");
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: key, value: {str: toStoreObjectValueStr}, editable: true}],
"Got the expected results on populated storage.local"
);
// The view layer is separate from the database layer; therefore while values are
// stringified (via toStoreObject) for display in the client, the value's data type in
// the database is unchanged.
info("Verifying the expected new value matches the value fetched in the extension");
extension.sendMessage("storage-local-get", {key, valueType});
await extension.awaitMessage("storage-local-get:done");
}
await shutdown(extension, target);
});
/**
* Test case: Storage panel shows extension storage data added prior to extension startup
* - Load extension that adds a storage item
* - Uninstall the extension
* - Reinstall the extension
* - Open the add-on storage panel.
* - The data in the storage panel should match the data added the first time the extension
* was installed
* Related test case: Storage panel shows extension storage data when an extension that has
* already migrated to the IndexedDB storage backend prior to extension startup adds
* another storage item.
* - (Building from previous steps)
* - The reinstalled extension adds a storage item
* - The data in the storage panel should live update with both items: the item added from
* the first and the item added from the reinstall.
*/
add_task(async function test_panel_data_matches_data_added_prior_to_ext_startup() {
// The pref to leave the addonid->uuid mapping around after uninstall so that we can
// re-attach to the same storage
Services.prefs.setBoolPref(LEAVE_UUID_PREF, true);
// The pref to prevent cleaning up storage on uninstall
Services.prefs.setBoolPref(LEAVE_STORAGE_PREF, true);
let extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener}),
);
const host = await extension.awaitMessage("extension-origin");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
await shutdown(extension);
// Reinstall the same extension
extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener})
);
await extension.awaitMessage("extension-origin");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
let {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
// Related test case
extension.sendMessage("storage-local-set", {b: 456});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data,
[
{area: "local", name: "a", value: {str: "123"}, editable: true},
{area: "local", name: "b", value: {str: "456"}, editable: true},
],
"Got the expected results on populated storage.local"
);
Services.prefs.setBoolPref(LEAVE_STORAGE_PREF, false);
Services.prefs.setBoolPref(LEAVE_UUID_PREF, false);
await shutdown(extension, target);
});
add_task(function cleanup_for_test_panel_data_matches_data_added_prior_to_ext_startup() {
Services.prefs.clearUserPref(LEAVE_UUID_PREF);
Services.prefs.clearUserPref(LEAVE_STORAGE_PREF);
});
/**
* Test case: Bg page adds an item to storage. With storage panel open, reload extension.
* - Load extension with background page that adds a storage item on message.
* - Open the add-on storage panel.
* - With the storage panel still open, reload the extension.
* - The data in the storage panel should match the item added prior to reloading.
*/
add_task(async function test_panel_live_reload() {
const EXTENSION_ID = "test_local_storage_live_reload@xpcshell.mozilla.org";
let manifest = {
version: "1.0",
applications: {
gecko: {
id: EXTENSION_ID,
},
},
};
info("Loading extension version 1.0");
const extension = await startupExtension(
getExtensionConfig({
manifest,
background: extensionScriptWithMessageListener,
})
);
info("Waiting for message from test extension");
const host = await extension.awaitMessage("extension-origin");
info("Adding storage item");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
manifest = {
...manifest,
version: "2.0",
};
// "Reload" is most similar to an upgrade, as e.g. storage data is preserved
info("Update to version 2.0");
await extension.upgrade(
getExtensionConfig({
manifest,
background: extensionScriptWithMessageListener,
}),
);
await extension.awaitMessage("extension-origin");
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: Transient page adds an item to storage. With storage panel open,
* reload extension.
* - Load extension with no background page.
* - Open transient page that adds a storage item on message.
* - Open the add-on storage panel.
* - With the storage panel still open, reload the extension.
* - The data in the storage panel should match the item added prior to reloading.
*/
add_task(async function test_panel_live_reload_for_extension_without_bg_page() {
const EXTENSION_ID = "test_local_storage_live_reload@xpcshell.mozilla.org";
let manifest = {
version: "1.0",
applications: {
gecko: {
id: EXTENSION_ID,
},
},
};
info("Loading and starting extension version 1.0");
const extension = await startupExtension(getExtensionConfig({
manifest,
files: ext_no_bg.files,
}));
info("Opening extension page in a tab");
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
info("Waiting for extension page in a tab to add storage item");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
await contentPage.close();
info("Opening storage panel");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
manifest = {
...manifest,
version: "2.0",
};
// "Reload" is most similar to an upgrade, as e.g. storage data is preserved
info("Updating extension to version 2.0");
await extension.upgrade(
getExtensionConfig({
manifest,
files: ext_no_bg.files,
})
);
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: Bg page auto adds item(s). With storage panel open, reload extension.
* - Load extension with background page that automatically adds a storage item on startup.
* - Open the add-on storage panel.
* - With the storage panel still open, reload the extension.
* - The data in the storage panel should match the item(s) added by the reloaded
* extension.
*/
add_task(async function test_panel_live_reload_when_extension_auto_adds_items() {
async function background() {
await browser.storage.local.set({a: {b: 123}, c: {d: 456}});
browser.test.sendMessage("extension-origin", window.location.origin);
}
const EXTENSION_ID = "test_local_storage_live_reload@xpcshell.mozilla.org";
let manifest = {
version: "1.0",
applications: {
gecko: {
id: EXTENSION_ID,
},
},
};
info("Loading and starting extension version 1.0");
const extension = await startupExtension(getExtensionConfig({manifest, background}));
info("Waiting for message from test extension");
const host = await extension.awaitMessage("extension-origin");
info("Opening storage panel");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
manifest = {
...manifest,
version: "2.0",
};
// "Reload" is most similar to an upgrade, as e.g. storage data is preserved
info("Update to version 2.0");
await extension.upgrade(
getExtensionConfig({
manifest,
background,
}),
);
await extension.awaitMessage("extension-origin");
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[
{area: "local", name: "a", value: {str: "{\"b\":123}"}, editable: true},
{area: "local", name: "c", value: {str: "{\"d\":456}"}, editable: true},
],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/*
* This task should be last, as it sets a pref to disable the extensionStorage
* storage actor. Since this pref is set at the beginning of the file, it
* already will be cleared via registerCleanupFunction when the test finishes.
*/
add_task(async function test_extensionStorage_store_disabled_on_pref() {
Services.prefs.setBoolPref(EXTENSION_STORAGE_ENABLED_PREF, false);
const extension = await startupExtension(getExtensionConfig());
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
ok(
extensionStorage === null,
"Should not have an extensionStorage store when pref disabled"
);
await shutdown(extension, target);
});
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const protocol = require("devtools/shared/protocol");
const { Arg, RetVal, types } = protocol;
const childSpecs = {};
function createStorageSpec(options) {
// common methods for all storage types
const methods = {
getStoreObjects: {
request: {
host: Arg(0),
names: Arg(1, "nullable:array:string"),
options: Arg(2, "nullable:json"),
},
response: RetVal(options.storeObjectType),
},
getFields: {
request: {
subType: Arg(0, "nullable:string"),
},
response: {
value: RetVal("json"),
},
},
};
// extra methods specific for storage type
Object.assign(methods, options.methods);
childSpecs[options.typeName] = protocol.generateActorSpec({
typeName: options.typeName,
methods,
});
}
// Cookies store object
types.addDictType("cookieobject", {
uniqueKey: "string",
name: "string",
value: "longstring",
path: "nullable:string",
host: "string",
hostOnly: "boolean",
isSecure: "boolean",
isHttpOnly: "boolean",
creationTime: "number",
lastAccessed: "number",
expires: "number",
});
// Array of cookie store objects
types.addDictType("cookiestoreobject", {
total: "number",
offset: "number",
data: "array:nullable:cookieobject",
});
// Common methods for edit/remove
const editRemoveMethods = {
getFields: {
request: {},
response: {
value: RetVal("json"),
},
},
editItem: {
request: {
data: Arg(0, "json"),
},
response: {},
},
removeItem: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
};
// Cookies actor spec
createStorageSpec({
typeName: "cookies",
storeObjectType: "cookiestoreobject",
methods: Object.assign({},
editRemoveMethods,
{
addItem: {
request: {
guid: Arg(0, "string"),
},
response: {},
},
}, {
removeAll: {
request: {
host: Arg(0, "string"),
domain: Arg(1, "nullable:string"),
},
response: {},
},
}, {
removeAllSessionCookies: {
request: {
host: Arg(0, "string"),
domain: Arg(1, "nullable:string"),
},
response: {},
},
}
),
});
// Local Storage / Session Storage store object
types.addDictType("storageobject", {
name: "string",
value: "longstring",
});
// Common methods for local/session storage
const storageMethods = Object.assign({},
editRemoveMethods,
{
addItem: {
request: {
guid: Arg(0, "string"),
host: Arg(1, "nullable:string"),
},
response: {},
},
},
{
removeAll: {
request: {
host: Arg(0, "string"),
},
response: {},
},
}
);
// Array of Local Storage / Session Storage store objects
types.addDictType("storagestoreobject", {
total: "number",
offset: "number",
data: "array:nullable:storageobject",
});
createStorageSpec({
typeName: "localStorage",
storeObjectType: "storagestoreobject",
methods: storageMethods,
});
createStorageSpec({
typeName: "sessionStorage",
storeObjectType: "storagestoreobject",
methods: storageMethods,
});
types.addDictType("extensionobject", {
name: "nullable:string",
value: "nullable:longstring",
});
types.addDictType("extensionstoreobject", {
total: "number",
offset: "number",
data: "array:nullable:extensionobject",
});
createStorageSpec({
typeName: "extensionStorage",
storeObjectType: "extensionstoreobject",
methods: {
...storageMethods,
},
});
types.addDictType("cacheobject", {
"url": "string",
"status": "string",
});
// Array of Cache store objects
types.addDictType("cachestoreobject", {
total: "number",
offset: "number",
data: "array:nullable:cacheobject",
});
// Cache storage spec
createStorageSpec({
typeName: "Cache",
storeObjectType: "cachestoreobject",
methods: {
removeAll: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
removeItem: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
},
});
// Indexed DB store object
// This is a union on idb object, db metadata object and object store metadata
// object
types.addDictType("idbobject", {
uniqueKey: "string",
name: "nullable:string",
db: "nullable:string",
objectStore: "nullable:string",
origin: "nullable:string",
version: "nullable:number",
storage: "nullable:string",
objectStores: "nullable:number",
keyPath: "nullable:string",
autoIncrement: "nullable:boolean",
indexes: "nullable:string",
value: "nullable:longstring",
});
// Array of Indexed DB store objects
types.addDictType("idbstoreobject", {
total: "number",
offset: "number",
data: "array:nullable:idbobject",
});
// Result of Indexed DB delete operation: can block or throw error
types.addDictType("idbdeleteresult", {
blocked: "nullable:boolean",
error: "nullable:string",
});
createStorageSpec({
typeName: "indexedDB",
storeObjectType: "idbstoreobject",
methods: {
removeDatabase: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: RetVal("idbdeleteresult"),
},
removeAll: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
removeItem: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
},
});
// Update notification object
types.addDictType("storeUpdateObject", {
changed: "nullable:json",
deleted: "nullable:json",
added: "nullable:json",
});
// Generate a type definition for an object with actors for all storage types.
types.addDictType("storelist", Object.keys(childSpecs).reduce((obj, type) => {
obj[type] = type;
return obj;
}, {}));
exports.childSpecs = childSpecs;
exports.storageSpec = protocol.generateActorSpec({
typeName: "storage",
/**
* List of event notifications that the server can send to the client.
*
* - stores-update : When any store object in any storage type changes.
* - stores-cleared : When all the store objects are removed.
* - stores-reloaded : When all stores are reloaded. This generally mean that
* we should refetch everything again.
*/
events: {
"stores-update": {
type: "storesUpdate",
data: Arg(0, "storeUpdateObject"),
},
"stores-cleared": {
type: "storesCleared",
data: Arg(0, "json"),
},
"stores-reloaded": {
type: "storesReloaded",
data: Arg(0, "json"),
},
},
methods: {
listStores: {
request: {},
response: RetVal("storelist"),
},
},
});
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {Cc, Ci, Cu, CC} = require("chrome");
const protocol = require("devtools/shared/protocol");
const {LongStringActor} = require("devtools/server/actors/string");
const {DebuggerServer} = require("devtools/server/main");
const Services = require("Services");
const defer = require("devtools/shared/defer");
const {isWindowIncluded} = require("devtools/shared/layout/utils");
const specs = require("devtools/shared/specs/storage");
const {ExtensionProcessScript} = require("resource://gre/modules/ExtensionProcessScript.jsm");
const {ExtensionStorageIDB} = require("resource://gre/modules/ExtensionStorageIDB.jsm");
const {WebExtensionPolicy} = Cu.getGlobalForObject(require("resource://gre/modules/XPCOMUtils.jsm"));
const CHROME_ENABLED_PREF = "devtools.chrome.enabled";
const REMOTE_ENABLED_PREF = "devtools.debugger.remote-enabled";
const EXTENSION_STORAGE_ENABLED_PREF = "devtools.storage.extensionStorage.enabled";
const DEFAULT_VALUE = "value";
const DEVTOOLS_EXT_STORAGELOCAL_CHANGED = "Extension:DevTools:OnStorageLocalChanged";
loader.lazyRequireGetter(this, "naturalSortCaseInsensitive",
"devtools/client/shared/natural-sort", true);
// "Lax", "Strict" and "Unset" are special values of the sameSite property
// that should not be translated.
const COOKIE_SAMESITE = {
LAX: "Lax",
STRICT: "Strict",
UNSET: "Unset",
};
const SAFE_HOSTS_PREFIXES_REGEX =
/^(about\+|https?\+|file\+|moz-extension\+)/;
// GUID to be used as a separator in compound keys. This must match the same
// constant in devtools/client/storage/ui.js,
// devtools/client/storage/test/head.js and
// devtools/server/tests/browser/head.js
const SEPARATOR_GUID = "{9d414cc5-8319-0a04-0586-c0a6ae01670a}";
loader.lazyImporter(this, "OS", "resource://gre/modules/osfile.jsm");
loader.lazyImporter(this, "Sqlite", "resource://gre/modules/Sqlite.jsm");
// We give this a funny name to avoid confusion with the global
// indexedDB.
loader.lazyGetter(this, "indexedDBForStorage", () => {
// On xpcshell, we can't instantiate indexedDB without crashing
try {
const sandbox
= Cu.Sandbox(CC("@mozilla.org/systemprincipal;1", "nsIPrincipal")(),
{wantGlobalProperties: ["indexedDB"]});
return sandbox.indexedDB;
} catch (e) {
return {};
}
});
// Maximum number of cookies/local storage key-value-pairs that can be sent
// over the wire to the client in one request.
const MAX_STORE_OBJECT_COUNT = 50;
// Delay for the batch job that sends the accumulated update packets to the
// client (ms).
const BATCH_DELAY = 200;
// MAX_COOKIE_EXPIRY should be 2^63-1, but JavaScript can't handle that
// precision.
const MAX_COOKIE_EXPIRY = Math.pow(2, 62);
// A RegExp for characters that cannot appear in a file/directory name. This is
// used to sanitize the host name for indexed db to lookup whether the file is
// present in <profileDir>/storage/default/ location
var illegalFileNameCharacters = [
"[",
// Control characters \001 to \036
"\\x00-\\x24",
// Special characters
"/:*?\\\"<>|\\\\",
"]",
].join("");
var ILLEGAL_CHAR_REGEX = new RegExp(illegalFileNameCharacters, "g");
// Holder for all the registered storage actors.
var storageTypePool = new Map();
/**
* An async method equivalent to setTimeout but using Promises
*
* @param {number} time
* The wait time in milliseconds.
*/
function sleep(time) {
const deferred = defer();
setTimeout(() => {
deferred.resolve(null);
}, time);
return deferred.promise;
}
// Helper methods to create a storage actor.
var StorageActors = {};
/**
* Creates a default object with the common methods required by all storage
* actors.
*
* This default object is missing a couple of required methods that should be
* implemented seperately for each actor. They are namely:
* - observe : Method which gets triggered on the notificaiton of the watched
* topic.
* - getNamesForHost : Given a host, get list of all known store names.
* - getValuesForHost : Given a host (and optionally a name) get all known
* store objects.
* - toStoreObject : Given a store object, convert it to the required format
* so that it can be transferred over wire.
* - populateStoresForHost : Given a host, populate the map of all store
* objects for it
* - getFields: Given a subType(optional), get an array of objects containing
* column field info. The info includes,
* "name" is name of colume key.
* "editable" is 1 means editable field; 0 means uneditable.
*
* @param {string} typeName
* The typeName of the actor.
* @param {array} observationTopics
* An array of topics which this actor listens to via Notification Observers.
*/
StorageActors.defaults = function(typeName, observationTopics) {
return {
typeName: typeName,
get conn() {
return this.storageActor.conn;
},
/**
* Returns a list of currently known hosts for the target window. This list
* contains unique hosts from the window + all inner windows. If
* this._internalHosts is defined then these will also be added to the list.
*/
get hosts() {
const hosts = new Set();
for (const {location} of this.storageActor.windows) {
const host = this.getHostName(location);
if (host) {
hosts.add(host);
}
}
if (this._internalHosts) {
for (const host of this._internalHosts) {
hosts.add(host);
}
}
return hosts;
},
/**
* Returns all the windows present on the page. Includes main window + inner
* iframe windows.
*/
get windows() {
return this.storageActor.windows;
},
/**
* Converts the window.location object into a URL (e.g. http://domain.com).
*/
getHostName(location) {
if (!location) {
// Debugging a legacy Firefox extension... no hostname available and no
// storage possible.
return null;
}
switch (location.protocol) {
case "about:":
return `${location.protocol}${location.pathname}`;
case "chrome:":
// chrome: URLs do not support storage of any type.
return null;
case "data:":
// data: URLs do not support storage of any type.
return null;
case "file:":
return `${location.protocol}//${location.pathname}`;
case "javascript:":
return location.href;
case "moz-extension:":
return location.origin;
case "resource:":
return `${location.origin}${location.pathname}`;
default:
// http: or unknown protocol.
return `${location.protocol}//${location.host}`;
}
},
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.populateStoresForHosts();
if (observationTopics) {
observationTopics.forEach((observationTopic) => {
Services.obs.addObserver(this, observationTopic);
});
}
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
if (observationTopics) {
observationTopics.forEach((observationTopic) => {
Services.obs.removeObserver(this, observationTopic);
});
}
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
this.hostVsStores.clear();
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
getNamesForHost(host) {
return [...this.hostVsStores.get(host).keys()];
},
getValuesForHost(host, name) {
if (name) {
return [this.hostVsStores.get(host).get(name)];
}
return [...this.hostVsStores.get(host).values()];
},
getObjectsSize(host, names) {
return names.length;
},
/**
* When a new window is added to the page. This generally means that a new
* iframe is created, or the current window is completely reloaded.
*
* @param {window} window
* The window which was added.
*/
async onWindowReady(window) {
const host = this.getHostName(window.location);
if (host && !this.hostVsStores.has(host)) {
await this.populateStoresForHost(host, window);
const data = {};
data[host] = this.getNamesForHost(host);
this.storageActor.update("added", typeName, data);
}
},
/**
* When a window is removed from the page. This generally means that an
* iframe was removed, or the current window reload is triggered.
*
* @param {window} window
* The window which was removed.
*/
onWindowDestroyed(window) {
if (!window.location) {
// Nothing can be done if location object is null
return;
}
const host = this.getHostName(window.location);
if (host && !this.hosts.has(host)) {
this.hostVsStores.delete(host);
const data = {};
data[host] = [];
this.storageActor.update("deleted", typeName, data);
}
},
form() {
const hosts = {};
for (const host of this.hosts) {
hosts[host] = [];
}
return {
actor: this.actorID,
hosts: hosts,
};
},
/**
* Populates a map of known hosts vs a map of stores vs value.
*/
populateStoresForHosts() {
this.hostVsStores = new Map();
for (const host of this.hosts) {
this.populateStoresForHost(host);
}
},
/**
* Returns a list of requested store objects. Maximum values returned are
* MAX_STORE_OBJECT_COUNT. This method returns paginated values whose
* starting index and total size can be controlled via the options object
*
* @param {string} host
* The host name for which the store values are required.
* @param {array:string} names
* Array containing the names of required store objects. Empty if all
* items are required.
* @param {object} options
* Additional options for the request containing following
* properties:
* - offset {number} : The begin index of the returned array amongst
* the total values
* - size {number} : The number of values required.
* - sortOn {string} : The values should be sorted on this property.
* - index {string} : In case of indexed db, the IDBIndex to be used
* for fetching the values.
*
* @return {object} An object containing following properties:
* - offset - The actual offset of the returned array. This might
* be different from the requested offset if that was
* invalid
* - total - The total number of entries possible.
* - data - The requested values.
*/
async getStoreObjects(host, names, options = {}) {
const offset = options.offset || 0;
let size = options.size || MAX_STORE_OBJECT_COUNT;
if (size > MAX_STORE_OBJECT_COUNT) {
size = MAX_STORE_OBJECT_COUNT;
}
const sortOn = options.sortOn || "name";
const toReturn = {
offset: offset,
total: 0,
data: [],
};
let principal = null;
if (this.typeName === "indexedDB") {
// We only acquire principal when the type of the storage is indexedDB
// because the principal only matters the indexedDB.
const win = this.storageActor.getWindowFromHost(host);
principal = this.getPrincipal(win);
}
if (names) {
for (const name of names) {
const values = await this.getValuesForHost(host, name, options,
this.hostVsStores, principal);
const {result, objectStores} = values;
if (result && typeof result.objectsSize !== "undefined") {
for (const {key, count} of result.objectsSize) {
this.objectsSize[key] = count;
}
}
if (result) {
toReturn.data.push(...result.data);
} else if (objectStores) {
toReturn.data.push(...objectStores);
} else {
toReturn.data.push(...values);
}
}
toReturn.total = this.getObjectsSize(host, names, options);
} else {
let obj = await this.getValuesForHost(host, undefined, undefined,
this.hostVsStores, principal);
if (obj.dbs) {
obj = obj.dbs;
}
toReturn.total = obj.length;
toReturn.data = obj;
}
if (offset > toReturn.total) {
// In this case, toReturn.data is an empty array.
toReturn.offset = toReturn.total;
toReturn.data = [];
} else {
// We need to use natural sort before slicing.
const sorted = toReturn.data.sort((a, b) => {
return naturalSortCaseInsensitive(a[sortOn], b[sortOn]);
});
let sliced;
if (this.typeName === "indexedDB") {
// indexedDB's getValuesForHost never returns *all* values available but only
// a slice, starting at the expected offset. Therefore the result is already
// sliced as expected.
sliced = sorted;
} else {
sliced = sorted.slice(offset, offset + size);
}
toReturn.data = sliced.map(a => this.toStoreObject(a));
}
return toReturn;
},
getPrincipal(win) {
if (win) {
return win.document.nodePrincipal;
}
// We are running in the browser toolbox and viewing system DBs so we
// need to use system principal.
return Cc["@mozilla.org/systemprincipal;1"]
.createInstance(Ci.nsIPrincipal);
},
};
};
/**
* Creates an actor and its corresponding front and registers it to the Storage
* Actor.
*
* @See StorageActors.defaults()
*
* @param {object} options
* Options required by StorageActors.defaults method which are :
* - typeName {string}
* The typeName of the actor.
* - observationTopics {array}
* The topics which this actor listens to via
* Notification Observers.
* @param {object} overrides
* All the methods which you want to be different from the ones in
* StorageActors.defaults method plus the required ones described there.
*/
StorageActors.createActor = function(options = {}, overrides = {}) {
const actorObject = StorageActors.defaults(
options.typeName,
options.observationTopics || null
);
for (const key in overrides) {
actorObject[key] = overrides[key];
}
const actorSpec = specs.childSpecs[options.typeName];
const actor = protocol.ActorClassWithSpec(actorSpec, actorObject);
storageTypePool.set(actorObject.typeName, actor);
};
/**
* The Cookies actor and front.
*/
StorageActors.createActor({
typeName: "cookies",
}, {
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.maybeSetupChildProcess();
this.populateStoresForHosts();
this.addCookieObservers();
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
this.hostVsStores.clear();
// We need to remove the cookie listeners early in E10S mode so we need to
// use a conditional here to ensure that we only attempt to remove them in
// single process mode.
if (!DebuggerServer.isInChildProcess) {
this.removeCookieObservers();
}
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
this._pendingResponse = null;
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
/**
* Given a cookie object, figure out all the matching hosts from the page that
* the cookie belong to.
*/
getMatchingHosts(cookies) {
if (!cookies.length) {
cookies = [cookies];
}
const hosts = new Set();
for (const host of this.hosts) {
for (const cookie of cookies) {
if (this.isCookieAtHost(cookie, host)) {
hosts.add(host);
}
}
}
return [...hosts];
},
/**
* Given a cookie object and a host, figure out if the cookie is valid for
* that host.
*/
isCookieAtHost(cookie, host) {
if (cookie.host == null) {
return host == null;
}
host = trimHttpHttpsPort(host);
if (cookie.host.startsWith(".")) {
return ("." + host).endsWith(cookie.host);
}
if (cookie.host === "") {
return host.startsWith("file://" + cookie.path);
}
return cookie.host == host;
},
toStoreObject(cookie) {
if (!cookie) {
return null;
}
return {
uniqueKey: `${cookie.name}${SEPARATOR_GUID}${cookie.host}` +
`${SEPARATOR_GUID}${cookie.path}`,
name: cookie.name,
host: cookie.host || "",
path: cookie.path || "",
// because expires is in seconds
expires: (cookie.expires || 0) * 1000,
// because creationTime is in micro seconds
creationTime: cookie.creationTime / 1000,
// - do -
lastAccessed: cookie.lastAccessed / 1000,
value: new LongStringActor(this.conn, cookie.value || ""),
hostOnly: !cookie.isDomain,
isSecure: cookie.isSecure,
isHttpOnly: cookie.isHttpOnly,
sameSite: this.getSameSiteStringFromCookie(cookie),
};
},
getSameSiteStringFromCookie(cookie) {
switch (cookie.sameSite) {
case cookie.SAMESITE_LAX:
return COOKIE_SAMESITE.LAX;
case cookie.SAMESITE_STRICT:
return COOKIE_SAMESITE.STRICT;
}
// cookie.SAMESITE_NONE
return COOKIE_SAMESITE.UNSET;
},
populateStoresForHost(host) {
this.hostVsStores.set(host, new Map());
const doc = this.storageActor.document;
const cookies = this.getCookiesFromHost(host, doc.nodePrincipal
.originAttributes);
for (const cookie of cookies) {
if (this.isCookieAtHost(cookie, host)) {
const uniqueKey = `${cookie.name}${SEPARATOR_GUID}${cookie.host}` +
`${SEPARATOR_GUID}${cookie.path}`;
this.hostVsStores.get(host).set(uniqueKey, cookie);
}
}
},
/**
* Notification observer for "cookie-change".
*
* @param subject
* {Cookie|[Array]} A JSON parsed object containing either a single
* cookie representation or an array. Array is only in case of
* a "batch-deleted" action.
* @param {string} topic
* The topic of the notification.
* @param {string} action
* Additional data associated with the notification. Its the type of
* cookie change in the "cookie-change" topic.
*/
onCookieChanged(subject, topic, action) {
if (topic !== "cookie-changed" ||
!this.storageActor ||
!this.storageActor.windows) {
return null;
}
const hosts = this.getMatchingHosts(subject);
const data = {};
switch (action) {
case "added":
case "changed":
if (hosts.length) {
for (const host of hosts) {
const uniqueKey = `${subject.name}${SEPARATOR_GUID}${subject.host}` +
`${SEPARATOR_GUID}${subject.path}`;
this.hostVsStores.get(host).set(uniqueKey, subject);
data[host] = [uniqueKey];
}
this.storageActor.update(action, "cookies", data);
}
break;
case "deleted":
if (hosts.length) {
for (const host of hosts) {
const uniqueKey = `${subject.name}${SEPARATOR_GUID}${subject.host}` +
`${SEPARATOR_GUID}${subject.path}`;
this.hostVsStores.get(host).delete(uniqueKey);
data[host] = [uniqueKey];
}
this.storageActor.update("deleted", "cookies", data);
}
break;
case "batch-deleted":
if (hosts.length) {
for (const host of hosts) {
const stores = [];
for (const cookie of subject) {
const uniqueKey = `${cookie.name}${SEPARATOR_GUID}${cookie.host}` +
`${SEPARATOR_GUID}${cookie.path}`;
this.hostVsStores.get(host).delete(uniqueKey);
stores.push(uniqueKey);
}
data[host] = stores;
}
this.storageActor.update("deleted", "cookies", data);
}
break;
case "cleared":
if (hosts.length) {
for (const host of hosts) {
data[host] = [];
}
this.storageActor.update("cleared", "cookies", data);
}
break;
}
return null;
},
async getFields() {
return [
{ name: "uniqueKey", editable: false, private: true },
{ name: "name", editable: true, hidden: false },
{ name: "host", editable: true, hidden: false },
{ name: "path", editable: true, hidden: false },
{ name: "expires", editable: true, hidden: false },
{ name: "lastAccessed", editable: false, hidden: false },
{ name: "creationTime", editable: false, hidden: true },
{ name: "value", editable: true, hidden: false },
{ name: "hostOnly", editable: false, hidden: true },
{ name: "isSecure", editable: true, hidden: true },
{ name: "isHttpOnly", editable: true, hidden: false },
{ name: "sameSite", editable: false, hidden: false },
];
},
/**
* Pass the editItem command from the content to the chrome process.
*
* @param {Object} data
* See editCookie() for format details.
*/
async editItem(data) {
const doc = this.storageActor.document;
data.originAttributes = doc.nodePrincipal
.originAttributes;
this.editCookie(data);
},
async addItem(guid) {
const doc = this.storageActor.document;
const time = new Date().getTime();
const expiry = new Date(time + 3600 * 24 * 1000).toGMTString();
doc.cookie = `${guid}=${DEFAULT_VALUE};expires=${expiry}`;
},
async removeItem(host, name) {
const doc = this.storageActor.document;
this.removeCookie(host, name, doc.nodePrincipal
.originAttributes);
},
async removeAll(host, domain) {
const doc = this.storageActor.document;
this.removeAllCookies(host, domain, doc.nodePrincipal
.originAttributes);
},
async removeAllSessionCookies(host, domain) {
const doc = this.storageActor.document;
this.removeAllSessionCookies(host, domain, doc.nodePrincipal
.originAttributes);
},
maybeSetupChildProcess() {
cookieHelpers.onCookieChanged = this.onCookieChanged.bind(this);
if (!DebuggerServer.isInChildProcess) {
this.getCookiesFromHost =
cookieHelpers.getCookiesFromHost.bind(cookieHelpers);
this.addCookieObservers =
cookieHelpers.addCookieObservers.bind(cookieHelpers);
this.removeCookieObservers =
cookieHelpers.removeCookieObservers.bind(cookieHelpers);
this.editCookie =
cookieHelpers.editCookie.bind(cookieHelpers);
this.removeCookie =
cookieHelpers.removeCookie.bind(cookieHelpers);
this.removeAllCookies =
cookieHelpers.removeAllCookies.bind(cookieHelpers);
this.removeAllSessionCookies =
cookieHelpers.removeAllSessionCookies.bind(cookieHelpers);
return;
}
const mm = this.conn.parentMessageManager;
this.conn.setupInParent({
module: "devtools/server/actors/storage",
setupParent: "setupParentProcessForCookies",
});
this.getCookiesFromHost =
callParentProcess.bind(null, "getCookiesFromHost");
this.addCookieObservers =
callParentProcess.bind(null, "addCookieObservers");
this.removeCookieObservers =
callParentProcess.bind(null, "removeCookieObservers");
this.editCookie =
callParentProcess.bind(null, "editCookie");
this.removeCookie =
callParentProcess.bind(null, "removeCookie");
this.removeAllCookies =
callParentProcess.bind(null, "removeAllCookies");
this.removeAllSessionCookies =
callParentProcess.bind(null, "removeAllSessionCookies");
mm.addMessageListener("debug:storage-cookie-request-child",
cookieHelpers.handleParentRequest);
function callParentProcess(methodName, ...args) {
const reply = mm.sendSyncMessage("debug:storage-cookie-request-parent", {
method: methodName,
args: args,
});
if (reply.length === 0) {
console.error("ERR_DIRECTOR_CHILD_NO_REPLY from " + methodName);
} else if (reply.length > 1) {
console.error("ERR_DIRECTOR_CHILD_MULTIPLE_REPLIES from " + methodName);
}
const result = reply[0];
if (methodName === "getCookiesFromHost") {
return JSON.parse(result);
}
return result;
}
},
});
var cookieHelpers = {
getCookiesFromHost(host, originAttributes) {
// Local files have no host.
if (host.startsWith("file:///")) {
host = "";
}
host = trimHttpHttpsPort(host);
return Array.from(
Services.cookies.getCookiesFromHost(host, originAttributes));
},
/**
* Apply the results of a cookie edit.
*
* @param {Object} data
* An object in the following format:
* {
* host: "http://www.mozilla.org",
* field: "value",
* editCookie: "name",
* oldValue: "%7BHello%7D",
* newValue: "%7BHelloo%7D",
* items: {
* name: "optimizelyBuckets",
* path: "/",
* host: ".mozilla.org",
* expires: "Mon, 02 Jun 2025 12:37:37 GMT",
* creationTime: "Tue, 18 Nov 2014 16:21:18 GMT",
* lastAccessed: "Wed, 17 Feb 2016 10:06:23 GMT",
* value: "%7BHelloo%7D",
* isDomain: "true",
* isSecure: "false",
* isHttpOnly: "false"
* }
* }
*/
/* eslint-disable complexity */
editCookie(data) {
let {field, oldValue, newValue} = data;
const origName = field === "name" ? oldValue : data.items.name;
const origHost = field === "host" ? oldValue : data.items.host;
const origPath = field === "path" ? oldValue : data.items.path;
let cookie = null;
const cookies = Services.cookies.getCookiesFromHost(origHost,
data.originAttributes || {});
for (const nsiCookie of cookies) {
if (nsiCookie.name === origName &&
nsiCookie.host === origHost &&
nsiCookie.path === origPath) {
cookie = {
host: nsiCookie.host,
path: nsiCookie.path,
name: nsiCookie.name,
value: nsiCookie.value,
isSecure: nsiCookie.isSecure,
isHttpOnly: nsiCookie.isHttpOnly,
isSession: nsiCookie.isSession,
expires: nsiCookie.expires,
originAttributes: nsiCookie.originAttributes,
};
break;
}
}
if (!cookie) {
return;
}
// If the date is expired set it for 10 seconds in the future.
const now = new Date();
if (!cookie.isSession && (cookie.expires * 1000) <= now) {
const tenSecondsFromNow = (now.getTime() + 10 * 1000) / 1000;
cookie.expires = tenSecondsFromNow;
}
switch (field) {
case "isSecure":
case "isHttpOnly":
case "isSession":
newValue = newValue === "true";
break;
case "expires":
newValue = Date.parse(newValue) / 1000;
if (isNaN(newValue)) {
newValue = MAX_COOKIE_EXPIRY;
}
break;
case "host":
case "name":
case "path":
// Remove the edited cookie.
Services.cookies.remove(origHost, origName, origPath,
false, cookie.originAttributes);
break;
}
// Apply changes.
cookie[field] = newValue;
// cookie.isSession is not always set correctly on session cookies so we
// need to trust cookie.expires instead.
cookie.isSession = !cookie.expires;
// Add the edited cookie.
Services.cookies.add(
cookie.host,
cookie.path,
cookie.name,
cookie.value,
cookie.isSecure,
cookie.isHttpOnly,
cookie.isSession,
cookie.isSession ? MAX_COOKIE_EXPIRY : cookie.expires,
cookie.originAttributes,
cookie.sameSite
);
},
/* eslint-enable complexity */
_removeCookies(host, opts = {}) {
// We use a uniqueId to emulate compound keys for cookies. We need to
// extract the cookie name to remove the correct cookie.
if (opts.name) {
const split = opts.name.split(SEPARATOR_GUID);
opts.name = split[0];
opts.path = split[2];
}
host = trimHttpHttpsPort(host);
function hostMatches(cookieHost, matchHost) {
if (cookieHost == null) {
return matchHost == null;
}
if (cookieHost.startsWith(".")) {
return ("." + matchHost).endsWith(cookieHost);
}
return cookieHost == host;
}
const cookies = Services.cookies.getCookiesFromHost(host,
opts.originAttributes || {});
for (const cookie of cookies) {
if (hostMatches(cookie.host, host) &&
(!opts.name || cookie.name === opts.name) &&
(!opts.domain || cookie.host === opts.domain) &&
(!opts.path || cookie.path === opts.path) &&
(!opts.session || (!cookie.expires && !cookie.maxAge))) {
Services.cookies.remove(
cookie.host,
cookie.name,
cookie.path,
false,
cookie.originAttributes
);
}
}
},
removeCookie(host, name, originAttributes) {
if (name !== undefined) {
this._removeCookies(host, { name, originAttributes });
}
},
removeAllCookies(host, domain, originAttributes) {
this._removeCookies(host, { domain, originAttributes });
},
removeAllSessionCookies(host, domain, originAttributes) {
this._removeCookies(host, { domain, originAttributes, session: true });
},
addCookieObservers() {
Services.obs.addObserver(cookieHelpers, "cookie-changed");
return null;
},
removeCookieObservers() {
Services.obs.removeObserver(cookieHelpers, "cookie-changed");
return null;
},
observe(subject, topic, data) {
if (!subject) {
return;
}
switch (topic) {
case "cookie-changed":
if (data === "batch-deleted") {
const cookiesNoInterface = subject.QueryInterface(Ci.nsIArray);
const cookies = [];
for (let i = 0; i < cookiesNoInterface.length; i++) {
const cookie = cookiesNoInterface.queryElementAt(i, Ci.nsICookie);
cookies.push(cookie);
}
cookieHelpers.onCookieChanged(cookies, topic, data);
return;
}
const cookie = subject.QueryInterface(Ci.nsICookie);
cookieHelpers.onCookieChanged(cookie, topic, data);
break;
}
},
handleParentRequest(msg) {
switch (msg.json.method) {
case "onCookieChanged":
let [cookie, topic, data] = msg.data.args;
cookie = JSON.parse(cookie);
cookieHelpers.onCookieChanged(cookie, topic, data);
break;
}
},
handleChildRequest(msg) {
switch (msg.json.method) {
case "getCookiesFromHost": {
const host = msg.data.args[0];
const originAttributes = msg.data.args[1];
const cookies = cookieHelpers.getCookiesFromHost(host, originAttributes);
return JSON.stringify(cookies);
}
case "addCookieObservers": {
return cookieHelpers.addCookieObservers();
}
case "removeCookieObservers": {
return cookieHelpers.removeCookieObservers();
}
case "editCookie": {
const rowdata = msg.data.args[0];
return cookieHelpers.editCookie(rowdata);
}
case "createNewCookie": {
const host = msg.data.args[0];
const guid = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.createNewCookie(host, guid, originAttributes);
}
case "removeCookie": {
const host = msg.data.args[0];
const name = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.removeCookie(host, name, originAttributes);
}
case "removeAllCookies": {
const host = msg.data.args[0];
const domain = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.removeAllCookies(host, domain, originAttributes);
}
case "removeAllSessionCookies": {
const host = msg.data.args[0];
const domain = msg.data.args[1];
const originAttributes = msg.data.args[2];
return cookieHelpers.removeAllSessionCookies(host, domain, originAttributes);
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
};
/**
* E10S parent/child setup helpers
*/
exports.setupParentProcessForCookies = function({ mm, prefix }) {
cookieHelpers.onCookieChanged =
callChildProcess.bind(null, "onCookieChanged");
// listen for director-script requests from the child process
setMessageManager(mm);
function callChildProcess(methodName, ...args) {
if (methodName === "onCookieChanged") {
args[0] = JSON.stringify(args[0]);
}
try {
mm.sendAsyncMessage("debug:storage-cookie-request-child", {
method: methodName,
args: args,
});
} catch (e) {
// We may receive a NS_ERROR_NOT_INITIALIZED if the target window has
// been closed. This can legitimately happen in between test runs.
}
}
function setMessageManager(newMM) {
if (mm) {
mm.removeMessageListener("debug:storage-cookie-request-parent",
cookieHelpers.handleChildRequest);
}
mm = newMM;
if (mm) {
mm.addMessageListener("debug:storage-cookie-request-parent",
cookieHelpers.handleChildRequest);
}
}
return {
onBrowserSwap: setMessageManager,
onDisconnected: () => {
// Although "disconnected-from-child" implies that the child is already
// disconnected this is not the case. The disconnection takes place after
// this method has finished. This gives us chance to clean up items within
// the parent process e.g. observers.
cookieHelpers.removeCookieObservers();
setMessageManager(null);
},
};
};
/**
* Helper method to create the overriden object required in
* StorageActors.createActor for Local Storage and Session Storage.
* This method exists as both Local Storage and Session Storage have almost
* identical actors.
*/
function getObjectForLocalOrSessionStorage(type) {
return {
getNamesForHost(host) {
const storage = this.hostVsStores.get(host);
return storage ? Object.keys(storage) : [];
},
getValuesForHost(host, name) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return [];
}
if (name) {
const value = storage ? storage.getItem(name) : null;
return [{ name, value }];
}
if (!storage) {
return [];
}
// local and session storage cannot be iterated over using Object.keys()
// because it skips keys that are duplicated on the prototype
// e.g. "key", "getKeys" so we need to gather the real keys using the
// storage.key() function.
const storageArray = [];
for (let i = 0; i < storage.length; i++) {
const key = storage.key(i);
storageArray.push({
name: key,
value: storage.getItem(key),
});
}
return storageArray;
},
populateStoresForHost(host, window) {
try {
this.hostVsStores.set(host, window[type]);
} catch (ex) {
console.warn(`Failed to enumerate ${type} for host ${host}: ${ex}`);
}
},
populateStoresForHosts() {
this.hostVsStores = new Map();
for (const window of this.windows) {
const host = this.getHostName(window.location);
if (host) {
this.populateStoresForHost(host, window);
}
}
},
async getFields() {
return [
{ name: "name", editable: true },
{ name: "value", editable: true },
];
},
async addItem(guid, host) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
storage.setItem(guid, DEFAULT_VALUE);
},
/**
* Edit localStorage or sessionStorage fields.
*
* @param {Object} data
* See editCookie() for format details.
*/
async editItem({host, field, oldValue, items}) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
if (field === "name") {
storage.removeItem(oldValue);
}
storage.setItem(items.name, items.value);
},
async removeItem(host, name) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
storage.removeItem(name);
},
async removeAll(host) {
const storage = this.hostVsStores.get(host);
if (!storage) {
return;
}
storage.clear();
},
observe(subject, topic, data) {
if ((topic != "dom-storage2-changed" &&
topic != "dom-private-storage2-changed") ||
data != type) {
return null;
}
const host = this.getSchemaAndHost(subject.url);
if (!this.hostVsStores.has(host)) {
return null;
}
let action = "changed";
if (subject.key == null) {
return this.storageActor.update("cleared", type, [host]);
} else if (subject.oldValue == null) {
action = "added";
} else if (subject.newValue == null) {
action = "deleted";
}
const updateData = {};
updateData[host] = [subject.key];
return this.storageActor.update(action, type, updateData);
},
/**
* Given a url, correctly determine its protocol + hostname part.
*/
getSchemaAndHost(url) {
const uri = Services.io.newURI(url);
if (!uri.host) {
return uri.spec;
}
return uri.scheme + "://" + uri.hostPort;
},
toStoreObject(item) {
if (!item) {
return null;
}
return {
name: item.name,
value: new LongStringActor(this.conn, item.value || ""),
};
},
};
}
/**
* The Local Storage actor and front.
*/
StorageActors.createActor({
typeName: "localStorage",
observationTopics: ["dom-storage2-changed", "dom-private-storage2-changed"],
}, getObjectForLocalOrSessionStorage("localStorage"));
/**
* The Session Storage actor and front.
*/
StorageActors.createActor({
typeName: "sessionStorage",
observationTopics: ["dom-storage2-changed", "dom-private-storage2-changed"],
}, getObjectForLocalOrSessionStorage("sessionStorage"));
const extensionStorageHelpers = {
unresolvedPromises: new Map(),
onChangedListeners: new Map(),
// Sets the parent process message manager
setPpmm(ppmm) {
this.ppmm = ppmm;
},
// A promise in the main process has resolved, and we need to pass the return value(s)
// back to the child process
backToChild(...args) {
Services.mm.broadcastAsyncMessage("debug:storage-extensionStorage-request-child", {
method: "backToChild",
args: args,
});
},
// The main process does not require an extension context to select the backend
// Bug 1542038, 1542039: Each storage area will need its own implementation, as
// they use different storage backends.
async selectBackendInParent(addonId) {
const {extension} = WebExtensionPolicy.getByID(addonId);
const parentResult = await ExtensionStorageIDB.selectBackend({extension});
const result = {
...parentResult,
// Received as a StructuredCloneHolder, so we need to deserialize
storagePrincipal: parentResult.storagePrincipal.deserialize(this, true),
};
// Subscribe a listener for the storage.onChanged API event notifications
// and keep track of it to remove it when the debugger is being disconnected.
const messageName = `Extension:StorageLocalOnChanged:${extension.uuid}`;
const onChangedListener = ({name, data}) => {
Services.mm.broadcastAsyncMessage(DEVTOOLS_EXT_STORAGELOCAL_CHANGED, {
changes: data,
extensionUUID: extension.uuid,
});
};
Services.ppmm.addMessageListener(messageName, onChangedListener);
this.onChangedListeners.set(messageName, onChangedListener);
return this.backToChild("selectBackendInParent", result);
},
onDisconnected() {
for (const [messageName, listener] of this.onChangedListeners) {
Services.ppmm.removeMessageListener(messageName, listener);
}
},
// Runs in the main process. This determines what code to execute based on the message
// received from the child process.
async handleChildRequest(msg) {
switch (msg.json.method) {
case "selectBackendInParent": {
const addonId = msg.data.args[0];
const result = await extensionStorageHelpers.selectBackendInParent(addonId);
return result;
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
// Runs in the child process. This determines what code to execute based on the message
// received from the parent process.
handleParentRequest(msg) {
switch (msg.json.method) {
case "backToChild": {
const [func, rv] = msg.json.args;
const deferred = this.unresolvedPromises.get(func);
if (deferred) {
this.unresolvedPromises.delete(func);
deferred.resolve(rv);
}
break;
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
callParentProcessAsync(methodName, ...args) {
const deferred = defer();
this.unresolvedPromises.set(methodName, deferred);
this.ppmm.sendAsyncMessage("debug:storage-extensionStorage-request-parent", {
method: methodName,
args: args,
});
return deferred.promise;
},
};
/**
* E10S parent/child setup helpers
* Add a message listener in the parent process to receive messages from the child
* process.
*/
exports.setupParentProcessForExtensionStorage = function({mm, prefix}) {
// listen for director-script requests from the child process
setMessageManager(mm);
function setMessageManager(newMM) {
if (mm) {
mm.removeMessageListener("debug:storage-extensionStorage-request-parent",
extensionStorageHelpers.handleChildRequest);
}
mm = newMM;
if (mm) {
mm.addMessageListener("debug:storage-extensionStorage-request-parent",
extensionStorageHelpers.handleChildRequest);
}
}
return {
onBrowserSwap: setMessageManager,
onDisconnected: () => {
// Although "disconnected-from-child" implies that the child is already
// disconnected this is not the case. The disconnection takes place after
// this method has finished. This gives us chance to clean up items within
// the parent process e.g. observers.
setMessageManager(null);
extensionStorageHelpers.onDisconnected();
},
};
};
/**
* The Extension Storage actor.
*/
if (Services.prefs.getBoolPref(EXTENSION_STORAGE_ENABLED_PREF)) {
StorageActors.createActor({
typeName: "extensionStorage",
}, {
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.addonId = this.storageActor.parentActor.addonId;
// Map<host, ExtensionStorageIDB db connection>
// Bug 1542038, 1542039: Each storage area will need its own
// dbConnectionForHost, as they each have different storage backends.
// Anywhere dbConnectionForHost is used, we need to know the storage
// area to access the correct database.
this.dbConnectionForHost = new Map();
this.setupChildProcess();
this.onStorageChange = this.onStorageChange.bind(this);
this.conn.parentMessageManager.addMessageListener(
DEVTOOLS_EXT_STORAGELOCAL_CHANGED, this.onStorageChange);
this.populateStoresForHosts();
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
this.conn.parentMessageManager.removeMessageListener(
DEVTOOLS_EXT_STORAGELOCAL_CHANGED, this.onStorageChange);
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
this.hostVsStores.clear();
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
setupChildProcess() {
const ppmm = this.conn.parentMessageManager;
extensionStorageHelpers.setPpmm(ppmm);
this.conn.setupInParent({
module: "devtools/server/actors/storage",
setupParent: "setupParentProcessForExtensionStorage",
});
this.selectBackendInParent =
extensionStorageHelpers.callParentProcessAsync.bind(
extensionStorageHelpers,
"selectBackendInParent"
);
// Add a message listener in the child process to receive messages from the parent
// process
ppmm.addMessageListener(
"debug:storage-extensionStorage-request-child",
extensionStorageHelpers.handleParentRequest.bind(extensionStorageHelpers),
);
},
/**
* This fires when the extension changes storage data while the storage
* inspector is open. Ensures this.hostVsStores stays up-to-date and
* passes the change on to update the client.
*/
onStorageChange({name, data}) {
const host = `moz-extension://${data.extensionUUID}`;
const changes = data.changes;
const storeMap = this.hostVsStores.get(host);
if (!storeMap) {
return;
}
for (const key in changes) {
const storageChange = changes[key];
let {newValue, oldValue} = storageChange;
if (newValue && typeof newValue === "object"
&& Cu.getClassName(newValue, true) === "StructuredCloneHolder") {
newValue = newValue.deserialize(this);
}
if (oldValue && typeof oldValue === "object"
&& Cu.getClassName(oldValue, true) === "StructuredCloneHolder") {
oldValue = oldValue.deserialize(this);
}
let action;
if (typeof newValue === "undefined") {
action = "deleted";
storeMap.delete(key);
} else if (typeof oldValue === "undefined") {
action = "added";
storeMap.set(key, newValue);
} else {
action = "changed";
storeMap.set(key, newValue);
}
this.storageActor.update(action, this.typeName, {[host]: [key]});
}
},
/**
* Purpose of this method is same as populateStoresForHosts but this is async.
* This exact same operation cannot be performed in populateStoresForHosts
* method, as that method is called in initialize method of the actor, which
* cannot be asynchronous.
*/
async preListStores() {
// Ensure the actor's target is an extension and it is enabled
if (!this.addonId || !(WebExtensionPolicy.getByID(this.addonId))) {
return;
}
// Bug 1542038, 1542039: Each storage area will need its own this.hostVsStores
// or this actor will need to deviate from how this.hostVsStores is defined in the
// framework to associate each storage item with a storage area. Any methods
// that use it will also need to be updated (e.g. getNamesForHost).
this.hostVsStores = new Map();
const extension = ExtensionProcessScript.getExtensionChild(this.addonId);
await this.populateStoresForHost(`moz-extension://${extension.uuid}`);
},
/**
* This method is overriden and left blank as for extensionStorage, this operation
* cannot be performed synchronously. Thus, the preListStores method exists to
* do the same task asynchronously.
*/
populateStoresForHosts() {},
/**
* This method asynchronously reads the storage data for the target extension
* and caches this data into this.hostVsStores.
* @param {String} host - the hostname for the extension
*/
async populateStoresForHost(host) {
if (this.hostVsStores.has(host)) {
return;
}
const extension = ExtensionProcessScript.getExtensionChild(this.addonId);
if (!extension || !(extension.hasPermission("storage"))) {
return;
}
const storagePrincipal = await this.getStoragePrincipal(extension);
if (!storagePrincipal) {
return;
}
const db = await ExtensionStorageIDB.open(storagePrincipal);
this.dbConnectionForHost.set(host, db);
const data = await db.get();
const storeMap = new Map();
for (const [key, value] of Object.entries(data)) {
storeMap.set(key, value);
}
this.hostVsStores.set(host, storeMap);
// Show the storage actor in the add-on storage inspector even when there
// is no extension page currently open
const storageData = {};
storageData[host] = this.getNamesForHost(host);
this.storageActor.update("added", this.typeName, storageData);
},
async getStoragePrincipal(extension) {
const {
backendEnabled,
storagePrincipal,
} = await this.selectBackendInParent(extension.id);
if (!backendEnabled) {
// IDB backend disabled; give up.
return null;
}
return storagePrincipal;
},
getValuesForHost(host, name) {
const result = [];
if (!this.hostVsStores.has(host)) {
return result;
}
if (name) {
return [{name, value: this.hostVsStores.get(host).get(name)}];
}
for (const [key, value] of Array.from(this.hostVsStores.get(host).entries())) {
result.push({name: key, value});
}
return result;
},
/**
* Converts a storage item to an "extensionobject" as defined in
* devtools/shared/specs/storage.js
* @param {Object} item - The storage item to convert
* @param {String} item.name - The storage item key
* @param {*} item.value - The storage item value
* @return {extensionobject}
*/
toStoreObject(item) {
if (!item) {
return null;
}
const {name, value} = item;
let newValue;
if (typeof value === "string") {
newValue = value;
} else {
try {
newValue = JSON.stringify(value) || String(value);
} catch (error) {
// throws for bigint
newValue = String(value);
}
// JavaScript objects that are not JSON stringifiable will be represented
// by the string "Object"
if (newValue === "{}") {
newValue = "Object";
}
}
// FIXME: Bug 1318029 - Due to a bug that is thrown whenever a
// LongStringActor string reaches DebuggerServer.LONG_STRING_LENGTH we need
// to trim the value. When the bug is fixed we should stop trimming the
// string here.
const maxLength = DebuggerServer.LONG_STRING_LENGTH - 1;
if (newValue.length > maxLength) {
newValue = newValue.substr(0, maxLength);
}
return {
name,
value: new LongStringActor(this.conn, newValue || ""),
area: "local", // Bug 1542038, 1542039: set the correct storage area
};
},
getFields() {
return [
// name needs to be editable for the addItem case, where a temporary key-value
// pair is created that can later be edited via editItem.
{ name: "name", editable: true },
{ name: "value", editable: true },
{ name: "area", editable: false },
];
},
});
}
StorageActors.createActor({
typeName: "Cache",
}, {
async getCachesForHost(host) {
const uri = Services.io.newURI(host);
const attrs = this.storageActor
.document
.nodePrincipal
.originAttributes;
const principal =
Services.scriptSecurityManager.createCodebasePrincipal(uri, attrs);
// The first argument tells if you want to get |content| cache or |chrome|
// cache.
// The |content| cache is the cache explicitely named by the web content
// (service worker or web page).
// The |chrome| cache is the cache implicitely cached by the platform,
// hosting the source file of the service worker.
const { CacheStorage } = this.storageActor.window;
if (!CacheStorage) {
return [];
}
const cache = new CacheStorage("content", principal);
return cache;
},
async preListStores() {
for (const host of this.hosts) {
await this.populateStoresForHost(host);
}
},
form() {
const hosts = {};
for (const host of this.hosts) {
hosts[host] = this.getNamesForHost(host);
}
return {
actor: this.actorID,
hosts: hosts,
};
},
getNamesForHost(host) {
// UI code expect each name to be a JSON string of an array :/
return [...this.hostVsStores.get(host).keys()].map(a => {
return JSON.stringify([a]);
});
},
async getValuesForHost(host, name) {
if (!name) {
return [];
}
// UI is weird and expect a JSON stringified array... and pass it back :/
name = JSON.parse(name)[0];
const cache = this.hostVsStores.get(host).get(name);
const requests = await cache.keys();
const results = [];
for (const request of requests) {
let response = await cache.match(request);
// Unwrap the response to get access to all its properties if the
// response happen to be 'opaque', when it is a Cross Origin Request.
response = response.cloneUnfiltered();
results.push(await this.processEntry(request, response));
}
return results;
},
async processEntry(request, response) {
return {
url: String(request.url),
status: String(response.statusText),
};
},
async getFields() {
return [
{ name: "url", editable: false },
{ name: "status", editable: false },
];
},
async populateStoresForHost(host) {
const storeMap = new Map();
const caches = await this.getCachesForHost(host);
try {
for (const name of (await caches.keys())) {
storeMap.set(name, (await caches.open(name)));
}
} catch (ex) {
console.warn(`Failed to enumerate CacheStorage for host ${host}: ${ex}`);
}
this.hostVsStores.set(host, storeMap);
},
/**
* This method is overriden and left blank as for Cache Storage, this
* operation cannot be performed synchronously. Thus, the preListStores
* method exists to do the same task asynchronously.
*/
populateStoresForHosts() {
this.hostVsStores = new Map();
},
/**
* Given a url, correctly determine its protocol + hostname part.
*/
getSchemaAndHost(url) {
const uri = Services.io.newURI(url);
return uri.scheme + "://" + uri.hostPort;
},
toStoreObject(item) {
return item;
},
async removeItem(host, name) {
const cacheMap = this.hostVsStores.get(host);
if (!cacheMap) {
return;
}
const parsedName = JSON.parse(name);
if (parsedName.length == 1) {
// Delete the whole Cache object
const [ cacheName ] = parsedName;
cacheMap.delete(cacheName);
const cacheStorage = await this.getCachesForHost(host);
await cacheStorage.delete(cacheName);
this.onItemUpdated("deleted", host, [ cacheName ]);
} else if (parsedName.length == 2) {
// Delete one cached request
const [ cacheName, url ] = parsedName;
const cache = cacheMap.get(cacheName);
if (cache) {
await cache.delete(url);
this.onItemUpdated("deleted", host, [ cacheName, url ]);
}
}
},
async removeAll(host, name) {
const cacheMap = this.hostVsStores.get(host);
if (!cacheMap) {
return;
}
const parsedName = JSON.parse(name);
// Only a Cache object is a valid object to clear
if (parsedName.length == 1) {
const [ cacheName ] = parsedName;
const cache = cacheMap.get(cacheName);
if (cache) {
const keys = await cache.keys();
await Promise.all(keys.map(key => cache.delete(key)));
this.onItemUpdated("cleared", host, [ cacheName ]);
}
}
},
/**
* CacheStorage API doesn't support any notifications, we must fake them
*/
onItemUpdated(action, host, path) {
this.storageActor.update(action, "Cache", {
[host]: [ JSON.stringify(path) ],
});
},
});
/**
* Code related to the Indexed DB actor and front
*/
// Metadata holder objects for various components of Indexed DB
/**
* Meta data object for a particular index in an object store
*
* @param {IDBIndex} index
* The particular index from the object store.
*/
function IndexMetadata(index) {
this._name = index.name;
this._keyPath = index.keyPath;
this._unique = index.unique;
this._multiEntry = index.multiEntry;
}
IndexMetadata.prototype = {
toObject() {
return {
name: this._name,
keyPath: this._keyPath,
unique: this._unique,
multiEntry: this._multiEntry,
};
},
};
/**
* Meta data object for a particular object store in a db
*
* @param {IDBObjectStore} objectStore
* The particular object store from the db.
*/
function ObjectStoreMetadata(objectStore) {
this._name = objectStore.name;
this._keyPath = objectStore.keyPath;
this._autoIncrement = objectStore.autoIncrement;
this._indexes = [];
for (let i = 0; i < objectStore.indexNames.length; i++) {
const index = objectStore.index(objectStore.indexNames[i]);
const newIndex = {
keypath: index.keyPath,
multiEntry: index.multiEntry,
name: index.name,
objectStore: {
autoIncrement: index.objectStore.autoIncrement,
indexNames: [...index.objectStore.indexNames],
keyPath: index.objectStore.keyPath,
name: index.objectStore.name,
},
};
this._indexes.push([newIndex, new IndexMetadata(index)]);
}
}
ObjectStoreMetadata.prototype = {
toObject() {
return {
name: this._name,
keyPath: this._keyPath,
autoIncrement: this._autoIncrement,
indexes: JSON.stringify(
[...this._indexes.values()].map(index => index.toObject())
),
};
},
};
/**
* Meta data object for a particular indexed db in a host.
*
* @param {string} origin
* The host associated with this indexed db.
* @param {IDBDatabase} db
* The particular indexed db.
* @param {String} storage
* Storage type, either "temporary", "default" or "persistent".
*/
function DatabaseMetadata(origin, db, storage) {
this._origin = origin;
this._name = db.name;
this._version = db.version;
this._objectStores = [];
this.storage = storage;
if (db.objectStoreNames.length) {
const transaction = db.transaction(db.objectStoreNames, "readonly");
for (let i = 0; i < transaction.objectStoreNames.length; i++) {
const objectStore =
transaction.objectStore(transaction.objectStoreNames[i]);
this._objectStores.push([transaction.objectStoreNames[i],
new ObjectStoreMetadata(objectStore)]);
}
}
}
DatabaseMetadata.prototype = {
get objectStores() {
return this._objectStores;
},
toObject() {
return {
uniqueKey: `${this._name}${SEPARATOR_GUID}${this.storage}`,
name: this._name,
storage: this.storage,
origin: this._origin,
version: this._version,
objectStores: this._objectStores.size,
};
},
};
StorageActors.createActor({
typeName: "indexedDB",
}, {
initialize(storageActor) {
protocol.Actor.prototype.initialize.call(this, null);
this.storageActor = storageActor;
this.maybeSetupChildProcess();
this.objectsSize = {};
this.storageActor = storageActor;
this.onWindowReady = this.onWindowReady.bind(this);
this.onWindowDestroyed = this.onWindowDestroyed.bind(this);
this.storageActor.on("window-ready", this.onWindowReady);
this.storageActor.on("window-destroyed", this.onWindowDestroyed);
},
destroy() {
this.hostVsStores.clear();
this.objectsSize = null;
this.storageActor.off("window-ready", this.onWindowReady);
this.storageActor.off("window-destroyed", this.onWindowDestroyed);
protocol.Actor.prototype.destroy.call(this);
this.storageActor = null;
},
/**
* Returns a list of currently known hosts for the target window. This list
* contains unique hosts from the window, all inner windows and all permanent
* indexedDB hosts defined inside the browser.
*/
async getHosts() {
// Add internal hosts to this._internalHosts, which will be picked up by
// the this.hosts getter. Because this.hosts is a property on the default
// storage actor and inherited by all storage actors we have to do it this
// way.
this._internalHosts = await this.getInternalHosts();
return this.hosts;
},
/**
* Remove an indexedDB database from given host with a given name.
*/
async removeDatabase(host, name) {
const win = this.storageActor.getWindowFromHost(host);
if (!win) {
return { error: `Window for host ${host} not found` };
}
const principal = win.document.nodePrincipal;
return this.removeDB(host, principal, name);
},
async removeAll(host, name) {
const [db, store] = JSON.parse(name);
const win = this.storageActor.getWindowFromHost(host);
if (!win) {
return;
}
const principal = win.document.nodePrincipal;
this.clearDBStore(host, principal, db, store);
},
async removeItem(host, name) {
const [db, store, id] = JSON.parse(name);
const win = this.storageActor.getWindowFromHost(host);
if (!win) {
return;
}
const principal = win.document.nodePrincipal;
this.removeDBRecord(host, principal, db, store, id);
},
/**
* This method is overriden and left blank as for indexedDB, this operation
* cannot be performed synchronously. Thus, the preListStores method exists to
* do the same task asynchronously.
*/
populateStoresForHosts() {},
getNamesForHost(host) {
const names = [];
for (const [dbName, {objectStores}] of this.hostVsStores.get(host)) {
if (objectStores.size) {
for (const objectStore of objectStores.keys()) {
names.push(JSON.stringify([dbName, objectStore]));
}
} else {
names.push(JSON.stringify([dbName]));
}
}
return names;
},
/**
* Returns the total number of entries for various types of requests to
* getStoreObjects for Indexed DB actor.
*
* @param {string} host
* The host for the request.
* @param {array:string} names
* Array of stringified name objects for indexed db actor.
* The request type depends on the length of any parsed entry from this
* array. 0 length refers to request for the whole host. 1 length
* refers to request for a particular db in the host. 2 length refers
* to a particular object store in a db in a host. 3 length refers to
* particular items of an object store in a db in a host.
* @param {object} options
* An options object containing following properties:
* - index {string} The IDBIndex for the object store in the db.
*/
getObjectsSize(host, names, options) {
// In Indexed DB, we are interested in only the first name, as the pattern
// should follow in all entries.
const name = names[0];
const parsedName = JSON.parse(name);
if (parsedName.length == 3) {
// This is the case where specific entries from an object store were
// requested
return names.length;
} else if (parsedName.length == 2) {
// This is the case where all entries from an object store are requested.
const index = options.index;
const [db, objectStore] = parsedName;
if (this.objectsSize[host + db + objectStore + index]) {
return this.objectsSize[host + db + objectStore + index];
}
} else if (parsedName.length == 1) {
// This is the case where details of all object stores in a db are
// requested.
if (this.hostVsStores.has(host) &&
this.hostVsStores.get(host).has(parsedName[0])) {
return this.hostVsStores.get(host).get(parsedName[0]).objectStores.size;
}
} else if (!parsedName || !parsedName.length) {
// This is the case were details of all dbs in a host are requested.
if (this.hostVsStores.has(host)) {
return this.hostVsStores.get(host).size;
}
}
return 0;
},
/**
* Purpose of this method is same as populateStoresForHosts but this is async.
* This exact same operation cannot be performed in populateStoresForHosts
* method, as that method is called in initialize method of the actor, which
* cannot be asynchronous.
*/
async preListStores() {
this.hostVsStores = new Map();
for (const host of await this.getHosts()) {
await this.populateStoresForHost(host);
}
},
async populateStoresForHost(host) {
const storeMap = new Map();
const win = this.storageActor.getWindowFromHost(host);
const principal = this.getPrincipal(win);
const {names} = await this.getDBNamesForHost(host, principal);
for (const {name, storage} of names) {
let metadata = await this.getDBMetaData(host, principal, name, storage);
metadata = indexedDBHelpers.patchMetadataMapsAndProtos(metadata);
storeMap.set(`${name} (${storage})`, metadata);
}
this.hostVsStores.set(host, storeMap);
},
/**
* Returns the over-the-wire implementation of the indexed db entity.
*/
toStoreObject(item) {
if (!item) {
return null;
}
if ("indexes" in item) {
// Object store meta data
return {
objectStore: item.name,
keyPath: item.keyPath,
autoIncrement: item.autoIncrement,
indexes: item.indexes,
};
}
if ("objectStores" in item) {
// DB meta data
return {
uniqueKey: `${item.name} (${item.storage})`,
db: item.name,
storage: item.storage,
origin: item.origin,
version: item.version,
objectStores: item.objectStores,
};
}
let value = JSON.stringify(item.value);
// FIXME: Bug 1318029 - Due to a bug that is thrown whenever a
// LongStringActor string reaches DebuggerServer.LONG_STRING_LENGTH we need
// to trim the value. When the bug is fixed we should stop trimming the
// string here.
const maxLength = DebuggerServer.LONG_STRING_LENGTH - 1;
if (value.length > maxLength) {
value = value.substr(0, maxLength);
}
// Indexed db entry
return {
name: item.name,
value: new LongStringActor(this.conn, value),
};
},
form() {
const hosts = {};
for (const host of this.hosts) {
hosts[host] = this.getNamesForHost(host);
}
return {
actor: this.actorID,
hosts: hosts,
};
},
onItemUpdated(action, host, path) {
// Database was removed, remove it from stores map
if (action === "deleted" && path.length === 1) {
if (this.hostVsStores.has(host)) {
this.hostVsStores.get(host).delete(path[0]);
}
}
this.storageActor.update(action, "indexedDB", {
[host]: [ JSON.stringify(path) ],
});
},
maybeSetupChildProcess() {
if (!DebuggerServer.isInChildProcess) {
this.backToChild = (func, rv) => rv;
this.clearDBStore = indexedDBHelpers.clearDBStore;
this.findIDBPathsForHost = indexedDBHelpers.findIDBPathsForHost;
this.findSqlitePathsForHost = indexedDBHelpers.findSqlitePathsForHost;
this.findStorageTypePaths = indexedDBHelpers.findStorageTypePaths;
this.getDBMetaData = indexedDBHelpers.getDBMetaData;
this.getDBNamesForHost = indexedDBHelpers.getDBNamesForHost;
this.getNameFromDatabaseFile = indexedDBHelpers.getNameFromDatabaseFile;
this.getObjectStoreData = indexedDBHelpers.getObjectStoreData;
this.getSanitizedHost = indexedDBHelpers.getSanitizedHost;
this.getValuesForHost = indexedDBHelpers.getValuesForHost;
this.openWithPrincipal = indexedDBHelpers.openWithPrincipal;
this.removeDB = indexedDBHelpers.removeDB;
this.removeDBRecord = indexedDBHelpers.removeDBRecord;
this.splitNameAndStorage = indexedDBHelpers.splitNameAndStorage;
this.getInternalHosts = indexedDBHelpers.getInternalHosts;
return;
}
const mm = this.conn.parentMessageManager;
this.conn.setupInParent({
module: "devtools/server/actors/storage",
setupParent: "setupParentProcessForIndexedDB",
});
this.getDBMetaData = callParentProcessAsync.bind(null, "getDBMetaData");
this.splitNameAndStorage = callParentProcessAsync.bind(null, "splitNameAndStorage");
this.getInternalHosts = callParentProcessAsync.bind(null, "getInternalHosts");
this.getDBNamesForHost = callParentProcessAsync.bind(null, "getDBNamesForHost");
this.getValuesForHost = callParentProcessAsync.bind(null, "getValuesForHost");
this.removeDB = callParentProcessAsync.bind(null, "removeDB");
this.removeDBRecord = callParentProcessAsync.bind(null, "removeDBRecord");
this.clearDBStore = callParentProcessAsync.bind(null, "clearDBStore");
mm.addMessageListener("debug:storage-indexedDB-request-child", msg => {
switch (msg.json.method) {
case "backToChild": {
const [func, rv] = msg.json.args;
const deferred = unresolvedPromises.get(func);
if (deferred) {
unresolvedPromises.delete(func);
deferred.resolve(rv);
}
break;
}
case "onItemUpdated": {
const [action, host, path] = msg.json.args;
this.onItemUpdated(action, host, path);
}
}
});
const unresolvedPromises = new Map();
function callParentProcessAsync(methodName, ...args) {
const deferred = defer();
unresolvedPromises.set(methodName, deferred);
mm.sendAsyncMessage("debug:storage-indexedDB-request-parent", {
method: methodName,
args: args,
});
return deferred.promise;
}
},
async getFields(subType) {
switch (subType) {
// Detail of database
case "database":
return [
{ name: "objectStore", editable: false },
{ name: "keyPath", editable: false },
{ name: "autoIncrement", editable: false },
{ name: "indexes", editable: false },
];
// Detail of object store
case "object store":
return [
{ name: "name", editable: false },
{ name: "value", editable: false },
];
// Detail of indexedDB for one origin
default:
return [
{ name: "uniqueKey", editable: false, private: true },
{ name: "db", editable: false },
{ name: "storage", editable: false },
{ name: "origin", editable: false },
{ name: "version", editable: false },
{ name: "objectStores", editable: false },
];
}
},
});
var indexedDBHelpers = {
backToChild(...args) {
Services.mm.broadcastAsyncMessage("debug:storage-indexedDB-request-child", {
method: "backToChild",
args: args,
});
},
onItemUpdated(action, host, path) {
Services.mm.broadcastAsyncMessage("debug:storage-indexedDB-request-child", {
method: "onItemUpdated",
args: [ action, host, path ],
});
},
/**
* Fetches and stores all the metadata information for the given database
* `name` for the given `host` with its `principal`. The stored metadata
* information is of `DatabaseMetadata` type.
*/
async getDBMetaData(host, principal, name, storage) {
const request = this.openWithPrincipal(principal, name, storage);
const success = defer();
request.onsuccess = event => {
const db = event.target.result;
const dbData = new DatabaseMetadata(host, db, storage);
db.close();
success.resolve(this.backToChild("getDBMetaData", dbData));
};
request.onerror = ({target}) => {
console.error(
`Error opening indexeddb database ${name} for host ${host}`, target.error);
success.resolve(this.backToChild("getDBMetaData", null));
};
return success.promise;
},
splitNameAndStorage: function(name) {
const lastOpenBracketIndex = name.lastIndexOf("(");
const lastCloseBracketIndex = name.lastIndexOf(")");
const delta = lastCloseBracketIndex - lastOpenBracketIndex - 1;
const storage = name.substr(lastOpenBracketIndex + 1, delta);
name = name.substr(0, lastOpenBracketIndex - 1);
return { storage, name };
},
/**
* Get all "internal" hosts. Internal hosts are database namespaces used by
* the browser.
*/
async getInternalHosts() {
// Return an empty array if the browser toolbox is not enabled.
if (!Services.prefs.getBoolPref(CHROME_ENABLED_PREF) ||
!Services.prefs.getBoolPref(REMOTE_ENABLED_PREF)) {
return this.backToChild("getInternalHosts", []);
}
const profileDir = OS.Constants.Path.profileDir;
const storagePath = OS.Path.join(profileDir, "storage", "permanent");
const iterator = new OS.File.DirectoryIterator(storagePath);
const hosts = [];
await iterator.forEach(entry => {
if (entry.isDir && !SAFE_HOSTS_PREFIXES_REGEX.test(entry.name)) {
hosts.push(entry.name);
}
});
iterator.close();
return this.backToChild("getInternalHosts", hosts);
},
/**
* Opens an indexed db connection for the given `principal` and
* database `name`.
*/
openWithPrincipal: function(principal, name, storage) {
return indexedDBForStorage.openForPrincipal(principal, name,
{ storage: storage });
},
async removeDB(host, principal, dbName) {
const result = new Promise(resolve => {
const {name, storage} = this.splitNameAndStorage(dbName);
const request =
indexedDBForStorage.deleteForPrincipal(principal, name,
{ storage: storage });
request.onsuccess = () => {
resolve({});
this.onItemUpdated("deleted", host, [dbName]);
};
request.onblocked = () => {
console.warn(`Deleting indexedDB database ${name} for host ${host} is blocked`);
resolve({ blocked: true });
};
request.onerror = () => {
const { error } = request;
console.warn(
`Error deleting indexedDB database ${name} for host ${host}: ${error}`);
resolve({ error: error.message });
};
// If the database is blocked repeatedly, the onblocked event will not
// be fired again. To avoid waiting forever, report as blocked if nothing
// else happens after 3 seconds.
setTimeout(() => resolve({ blocked: true }), 3000);
});
return this.backToChild("removeDB", await result);
},
async removeDBRecord(host, principal, dbName, storeName, id) {
let db;
const {name, storage} = this.splitNameAndStorage(dbName);
try {
db = await new Promise((resolve, reject) => {
const request = this.openWithPrincipal(principal, name, storage);
request.onsuccess = ev => resolve(ev.target.result);
request.onerror = ev => reject(ev.target.error);
});
const transaction = db.transaction(storeName, "readwrite");
const store = transaction.objectStore(storeName);
await new Promise((resolve, reject) => {
const request = store.delete(id);
request.onsuccess = () => resolve();
request.onerror = ev => reject(ev.target.error);
});
this.onItemUpdated("deleted", host, [dbName, storeName, id]);
} catch (error) {
const recordPath = [dbName, storeName, id].join("/");
console.error(`Failed to delete indexedDB record: ${recordPath}: ${error}`);
}
if (db) {
db.close();
}
return this.backToChild("removeDBRecord", null);
},
async clearDBStore(host, principal, dbName, storeName) {
let db;
const {name, storage} = this.splitNameAndStorage(dbName);
try {
db = await new Promise((resolve, reject) => {
const request = this.openWithPrincipal(principal, name, storage);
request.onsuccess = ev => resolve(ev.target.result);
request.onerror = ev => reject(ev.target.error);
});
const transaction = db.transaction(storeName, "readwrite");
const store = transaction.objectStore(storeName);
await new Promise((resolve, reject) => {
const request = store.clear();
request.onsuccess = () => resolve();
request.onerror = ev => reject(ev.target.error);
});
this.onItemUpdated("cleared", host, [dbName, storeName]);
} catch (error) {
const storePath = [dbName, storeName].join("/");
console.error(`Failed to clear indexedDB store: ${storePath}: ${error}`);
}
if (db) {
db.close();
}
return this.backToChild("clearDBStore", null);
},
/**
* Fetches all the databases and their metadata for the given `host`.
*/
async getDBNamesForHost(host, principal) {
const sanitizedHost = this.getSanitizedHost(host) + principal.originSuffix;
const profileDir = OS.Constants.Path.profileDir;
const files = [];
const names = [];
const storagePath = OS.Path.join(profileDir, "storage");
// We expect sqlite DB paths to look something like this:
// - PathToProfileDir/storage/default/http+++www.example.com/
// idb/1556056096MeysDaabta.sqlite
// - PathToProfileDir/storage/permanent/http+++www.example.com/
// idb/1556056096MeysDaabta.sqlite
// - PathToProfileDir/storage/temporary/http+++www.example.com/
// idb/1556056096MeysDaabta.sqlite
// The subdirectory inside the storage folder is determined by the storage
// type:
// - default: { storage: "default" } or not specified.
// - permanent: { storage: "persistent" }.
// - temporary: { storage: "temporary" }.
const sqliteFiles = await this.findSqlitePathsForHost(storagePath, sanitizedHost);
for (const file of sqliteFiles) {
const splitPath = OS.Path.split(file).components;
const idbIndex = splitPath.indexOf("idb");
const storage = splitPath[idbIndex - 2];
const relative = file.substr(profileDir.length + 1);
files.push({
file: relative,
storage: storage === "permanent" ? "persistent" : storage,
});
}
if (files.length > 0) {
for (const {file, storage} of files) {
const name = await this.getNameFromDatabaseFile(file);
if (name) {
names.push({
name,
storage,
});
}
}
}
return this.backToChild("getDBNamesForHost", {names});
},
/**
* Find all SQLite files that hold IndexedDB data for a host, such as:
* storage/temporary/http+++www.example.com/idb/1556056096MeysDaabta.sqlite
*/
async findSqlitePathsForHost(storagePath, sanitizedHost) {
const sqlitePaths = [];
const idbPaths = await this.findIDBPathsForHost(storagePath, sanitizedHost);
for (const idbPath of idbPaths) {
const iterator = new OS.File.DirectoryIterator(idbPath);
await iterator.forEach(entry => {
if (!entry.isDir && entry.path.endsWith(".sqlite")) {
sqlitePaths.push(entry.path);
}
});
iterator.close();
}
return sqlitePaths;
},
/**
* Find all paths that hold IndexedDB data for a host, such as:
* storage/temporary/http+++www.example.com/idb
*/
async findIDBPathsForHost(storagePath, sanitizedHost) {
const idbPaths = [];
const typePaths = await this.findStorageTypePaths(storagePath);
for (const typePath of typePaths) {
const idbPath = OS.Path.join(typePath, sanitizedHost, "idb");
if (await OS.File.exists(idbPath)) {
idbPaths.push(idbPath);
}
}
return idbPaths;
},
/**
* Find all the storage types, such as "default", "permanent", or "temporary".
* These names have changed over time, so it seems simpler to look through all types
* that currently exist in the profile.
*/
async findStorageTypePaths(storagePath) {
const iterator = new OS.File.DirectoryIterator(storagePath);
const typePaths = [];
await iterator.forEach(entry => {
if (entry.isDir) {
typePaths.push(entry.path);
}
});
iterator.close();
return typePaths;
},
/**
* Removes any illegal characters from the host name to make it a valid file
* name.
*/
getSanitizedHost(host) {
if (host.startsWith("about:")) {
host = "moz-safe-" + host;
}
return host.replace(ILLEGAL_CHAR_REGEX, "+");
},
/**
* Retrieves the proper indexed db database name from the provided .sqlite
* file location.
*/
async getNameFromDatabaseFile(path) {
let connection = null;
let retryCount = 0;
// Content pages might be having an open transaction for the same indexed db
// which this sqlite file belongs to. In that case, sqlite.openConnection
// will throw. Thus we retry for some time to see if lock is removed.
while (!connection && retryCount++ < 25) {
try {
connection = await Sqlite.openConnection({ path: path });
} catch (ex) {
// Continuously retrying is overkill. Waiting for 100ms before next try
await sleep(100);
}
}
if (!connection) {
return null;
}
const rows = await connection.execute("SELECT name FROM database");
if (rows.length != 1) {
return null;
}
const name = rows[0].getResultByName("name");
await connection.close();
return name;
},
async getValuesForHost(host, name = "null", options,
hostVsStores, principal) {
name = JSON.parse(name);
if (!name || !name.length) {
// This means that details about the db in this particular host are
// requested.
const dbs = [];
if (hostVsStores.has(host)) {
for (let [, db] of hostVsStores.get(host)) {
db = indexedDBHelpers.patchMetadataMapsAndProtos(db);
dbs.push(db.toObject());
}
}
return this.backToChild("getValuesForHost", {dbs: dbs});
}
const [db2, objectStore, id] = name;
if (!objectStore) {
// This means that details about all the object stores in this db are
// requested.
const objectStores = [];
if (hostVsStores.has(host) && hostVsStores.get(host).has(db2)) {
let db = hostVsStores.get(host).get(db2);
db = indexedDBHelpers.patchMetadataMapsAndProtos(db);
const objectStores2 = db.objectStores;
for (const objectStore2 of objectStores2) {
objectStores.push(objectStore2[1].toObject());
}
}
return this.backToChild("getValuesForHost", {objectStores: objectStores});
}
// Get either all entries from the object store, or a particular id
const storage = hostVsStores.get(host).get(db2).storage;
const result = await this.getObjectStoreData(host, principal, db2, storage, {
objectStore: objectStore,
id: id,
index: options.index,
offset: options.offset,
size: options.size,
});
return this.backToChild("getValuesForHost", {result: result});
},
/**
* Returns requested entries (or at most MAX_STORE_OBJECT_COUNT) from a particular
* objectStore from the db in the given host.
*
* @param {string} host
* The given host.
* @param {nsIPrincipal} principal
* The principal of the given document.
* @param {string} dbName
* The name of the indexed db from the above host.
* @param {String} storage
* Storage type, either "temporary", "default" or "persistent".
* @param {Object} requestOptions
* An object in the following format:
* {
* objectStore: The name of the object store from the above db,
* id: Id of the requested entry from the above object
* store. null if all entries from the above object
* store are requested,
* index: Name of the IDBIndex to be iterated on while fetching
* entries. null or "name" if no index is to be
* iterated,
* offset: offset of the entries to be fetched,
* size: The intended size of the entries to be fetched
* }
*/
getObjectStoreData(host, principal, dbName, storage, requestOptions) {
const {name} = this.splitNameAndStorage(dbName);
const request = this.openWithPrincipal(principal, name, storage);
const success = defer();
let {objectStore, id, index, offset, size} = requestOptions;
const data = [];
let db;
if (!size || size > MAX_STORE_OBJECT_COUNT) {
size = MAX_STORE_OBJECT_COUNT;
}
request.onsuccess = event => {
db = event.target.result;
const transaction = db.transaction(objectStore, "readonly");
let source = transaction.objectStore(objectStore);
if (index && index != "name") {
source = source.index(index);
}
source.count().onsuccess = event2 => {
const objectsSize = [];
const count = event2.target.result;
objectsSize.push({
key: host + dbName + objectStore + index,
count: count,
});
if (!offset) {
offset = 0;
} else if (offset > count) {
db.close();
success.resolve([]);
return;
}
if (id) {
source.get(id).onsuccess = event3 => {
db.close();
success.resolve([{name: id, value: event3.target.result}]);
};
} else {
source.openCursor().onsuccess = event4 => {
const cursor = event4.target.result;
if (!cursor || data.length >= size) {
db.close();
success.resolve({
data: data,
objectsSize: objectsSize,
});
return;
}
if (offset-- <= 0) {
data.push({name: cursor.key, value: cursor.value});
}
cursor.continue();
};
}
};
};
request.onerror = () => {
db.close();
success.resolve([]);
};
return success.promise;
},
/**
* When indexedDB metadata is parsed to and from JSON then the object's
* prototype is dropped and any Maps are changed to arrays of arrays. This
* method is used to repair the prototypes and fix any broken Maps.
*/
patchMetadataMapsAndProtos(metadata) {
const md = Object.create(DatabaseMetadata.prototype);
Object.assign(md, metadata);
md._objectStores = new Map(metadata._objectStores);
for (const [name, store] of md._objectStores) {
const obj = Object.create(ObjectStoreMetadata.prototype);
Object.assign(obj, store);
md._objectStores.set(name, obj);
if (typeof store._indexes.length !== "undefined") {
obj._indexes = new Map(store._indexes);
}
for (const [name2, value] of obj._indexes) {
const obj2 = Object.create(IndexMetadata.prototype);
Object.assign(obj2, value);
obj._indexes.set(name2, obj2);
}
}
return md;
},
handleChildRequest(msg) {
const args = msg.data.args;
switch (msg.json.method) {
case "getDBMetaData": {
const [host, principal, name, storage] = args;
return indexedDBHelpers.getDBMetaData(host, principal, name, storage);
}
case "getInternalHosts": {
return indexedDBHelpers.getInternalHosts();
}
case "splitNameAndStorage": {
const [name] = args;
return indexedDBHelpers.splitNameAndStorage(name);
}
case "getDBNamesForHost": {
const [host, principal] = args;
return indexedDBHelpers.getDBNamesForHost(host, principal);
}
case "getValuesForHost": {
const [host, name, options, hostVsStores, principal] = args;
return indexedDBHelpers.getValuesForHost(host, name, options,
hostVsStores, principal);
}
case "removeDB": {
const [host, principal, dbName] = args;
return indexedDBHelpers.removeDB(host, principal, dbName);
}
case "removeDBRecord": {
const [host, principal, db, store, id] = args;
return indexedDBHelpers.removeDBRecord(host, principal, db, store, id);
}
case "clearDBStore": {
const [host, principal, db, store] = args;
return indexedDBHelpers.clearDBStore(host, principal, db, store);
}
default:
console.error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD", msg.json.method);
throw new Error("ERR_DIRECTOR_PARENT_UNKNOWN_METHOD");
}
},
};
/**
* E10S parent/child setup helpers
*/
exports.setupParentProcessForIndexedDB = function({ mm, prefix }) {
// listen for director-script requests from the child process
setMessageManager(mm);
function setMessageManager(newMM) {
if (mm) {
mm.removeMessageListener("debug:storage-indexedDB-request-parent",
indexedDBHelpers.handleChildRequest);
}
mm = newMM;
if (mm) {
mm.addMessageListener("debug:storage-indexedDB-request-parent",
indexedDBHelpers.handleChildRequest);
}
}
return {
onBrowserSwap: setMessageManager,
onDisconnected: () => setMessageManager(null),
};
};
/**
* General helpers
*/
function trimHttpHttpsPort(url) {
const match = url.match(/(.+):\d+$/);
if (match) {
url = match[1];
}
if (url.startsWith("http://")) {
return url.substr(7);
}
if (url.startsWith("https://")) {
return url.substr(8);
}
return url;
}
/**
* The main Storage Actor.
*/
const StorageActor = protocol.ActorClassWithSpec(specs.storageSpec, {
typeName: "storage",
get window() {
return this.parentActor.window;
},
get document() {
return this.parentActor.window.document;
},
get windows() {
return this.childWindowPool;
},
initialize(conn, targetActor) {
protocol.Actor.prototype.initialize.call(this, conn);
this.parentActor = targetActor;
this.childActorPool = new Map();
this.childWindowPool = new Set();
// Fetch all the inner iframe windows in this tab.
this.fetchChildWindows(this.parentActor.docShell);
// Initialize the registered store types
for (const [store, ActorConstructor] of storageTypePool) {
this.childActorPool.set(store, new ActorConstructor(this));
}
// Notifications that help us keep track of newly added windows and windows
// that got removed
Services.obs.addObserver(this, "content-document-global-created");
Services.obs.addObserver(this, "inner-window-destroyed");
this.onPageChange = this.onPageChange.bind(this);
const handler = targetActor.chromeEventHandler;
handler.addEventListener("pageshow", this.onPageChange, true);
handler.addEventListener("pagehide", this.onPageChange, true);
this.destroyed = false;
this.boundUpdate = {};
},
destroy() {
clearTimeout(this.batchTimer);
this.batchTimer = null;
// Remove observers
Services.obs.removeObserver(this, "content-document-global-created");
Services.obs.removeObserver(this, "inner-window-destroyed");
this.destroyed = true;
if (this.parentActor.browser) {
this.parentActor.browser.removeEventListener("pageshow", this.onPageChange, true);
this.parentActor.browser.removeEventListener("pagehide", this.onPageChange, true);
}
// Destroy the registered store types
for (const actor of this.childActorPool.values()) {
actor.destroy();
}
this.childActorPool.clear();
this.childWindowPool.clear();
this.childActorPool = null;
this.childWindowPool = null;
this.parentActor = null;
this.boundUpdate = null;
this.registeredPool = null;
this._pendingResponse = null;
protocol.Actor.prototype.destroy.call(this);
},
/**
* Given a docshell, recursively find out all the child windows from it.
*
* @param {nsIDocShell} item
* The docshell from which all inner windows need to be extracted.
*/
fetchChildWindows(item) {
const docShell = item.QueryInterface(Ci.nsIDocShell)
.QueryInterface(Ci.nsIDocShellTreeItem);
if (!docShell.contentViewer) {
return null;
}
const window = docShell.contentViewer.DOMDocument.defaultView;
if (window.location.href == "about:blank") {
// Skip out about:blank windows as Gecko creates them multiple times while
// creating any global.
return null;
}
this.childWindowPool.add(window);
for (let i = 0; i < docShell.childCount; i++) {
const child = docShell.getChildAt(i);
this.fetchChildWindows(child);
}
return null;
},
isIncludedInTopLevelWindow(window) {
return isWindowIncluded(this.window, window);
},
getWindowFromInnerWindowID(innerID) {
innerID = innerID.QueryInterface(Ci.nsISupportsPRUint64).data;
for (const win of this.childWindowPool.values()) {
const id = win.windowUtils.currentInnerWindowID;
if (id == innerID) {
return win;
}
}
return null;
},
getWindowFromHost(host) {
for (const win of this.childWindowPool.values()) {
const origin = win.document
.nodePrincipal
.originNoSuffix;
const url = win.document.URL;
if (origin === host || url === host) {
return win;
}
}
return null;
},
/**
* Event handler for any docshell update. This lets us figure out whenever
* any new window is added, or an existing window is removed.
*/
observe(subject, topic) {
if (subject.location &&
(!subject.location.href || subject.location.href == "about:blank")) {
return null;
}
// We don't want to try to find a top level window for an extension page, as
// in many cases (e.g. background page), it is not loaded in a tab, and
// 'isIncludedInTopLevelWindow' throws an error
if (topic == "content-document-global-created"
&& (subject.location.href.startsWith("moz-extension://")
|| this.isIncludedInTopLevelWindow(subject))) {
this.childWindowPool.add(subject);
this.emit("window-ready", subject);
} else if (topic == "inner-window-destroyed") {
const window = this.getWindowFromInnerWindowID(subject);
if (window) {
this.childWindowPool.delete(window);
this.emit("window-destroyed", window);
}
}
return null;
},
/**
* Called on "pageshow" or "pagehide" event on the chromeEventHandler of
* current tab.
*
* @param {event} The event object passed to the handler. We are using these
* three properties from the event:
* - target {document} The document corresponding to the event.
* - type {string} Name of the event - "pageshow" or "pagehide".
* - persisted {boolean} true if there was no
* "content-document-global-created" notification along
* this event.
*/
onPageChange({target, type, persisted}) {
if (this.destroyed) {
return;
}
const window = target.defaultView;
if (type == "pagehide" && this.childWindowPool.delete(window)) {
this.emit("window-destroyed", window);
} else if (type == "pageshow" && persisted && window.location.href &&
window.location.href != "about:blank" &&
this.isIncludedInTopLevelWindow(window)) {
this.childWindowPool.add(window);
this.emit("window-ready", window);
}
},
/**
* Lists the available hosts for all the registered storage types.
*
* @returns {object} An object containing with the following structure:
* - <storageType> : [{
* actor: <actorId>,
* host: <hostname>
* }]
*/
async listStores() {
const toReturn = {};
for (const [name, value] of this.childActorPool) {
// Only list extensionStorage for the add-on storage panel
if (name === "extensionStorage"
&& (!value.storageActor.parentActor.addonId)) {
continue;
}
if (value.preListStores) {
await value.preListStores();
}
toReturn[name] = value;
}
return toReturn;
},
/**
* This method is called by the registered storage types so as to tell the
* Storage Actor that there are some changes in the stores. Storage Actor then
* notifies the client front about these changes at regular (BATCH_DELAY)
* interval.
*
* @param {string} action
* The type of change. One of "added", "changed" or "deleted"
* @param {string} storeType
* The storage actor in which this change has occurred.
* @param {object} data
* The update object. This object is of the following format:
* - {
* <host1>: [<store_names1>, <store_name2>...],
* <host2>: [<store_names34>...],
* }
* Where host1, host2 are the host in which this change happened and
* [<store_namesX] is an array of the names of the changed store objects.
* Pass an empty array if the host itself was affected: either completely
* removed or cleared.
*/
/* eslint-disable complexity */
update(action, storeType, data) {
if (action == "cleared") {
this.emit("stores-cleared", { [storeType]: data });
return null;
}
if (this.batchTimer) {
clearTimeout(this.batchTimer);
}
if (!this.boundUpdate[action]) {
this.boundUpdate[action] = {};
}
if (!this.boundUpdate[action][storeType]) {
this.boundUpdate[action][storeType] = {};
}
for (const host in data) {
if (!this.boundUpdate[action][storeType][host]) {
this.boundUpdate[action][storeType][host] = [];
}
for (const name of data[host]) {
if (!this.boundUpdate[action][storeType][host].includes(name)) {
this.boundUpdate[action][storeType][host].push(name);
}
}
}
if (action == "added") {
// If the same store name was previously deleted or changed, but now is
// added somehow, dont send the deleted or changed update.
this.removeNamesFromUpdateList("deleted", storeType, data);
this.removeNamesFromUpdateList("changed", storeType, data);
} else if (action == "changed" && this.boundUpdate.added &&
this.boundUpdate.added[storeType]) {
// If something got added and changed at the same time, then remove those
// items from changed instead.
this.removeNamesFromUpdateList("changed", storeType,
this.boundUpdate.added[storeType]);
} else if (action == "deleted") {
// If any item got delete, or a host got delete, no point in sending
// added or changed update
this.removeNamesFromUpdateList("added", storeType, data);
this.removeNamesFromUpdateList("changed", storeType, data);
for (const host in data) {
if (data[host].length == 0 && this.boundUpdate.added &&
this.boundUpdate.added[storeType] &&
this.boundUpdate.added[storeType][host]) {
delete this.boundUpdate.added[storeType][host];
}
if (data[host].length == 0 && this.boundUpdate.changed &&
this.boundUpdate.changed[storeType] &&
this.boundUpdate.changed[storeType][host]) {
delete this.boundUpdate.changed[storeType][host];
}
}
}
this.batchTimer = setTimeout(() => {
clearTimeout(this.batchTimer);
this.emit("stores-update", this.boundUpdate);
this.boundUpdate = {};
}, BATCH_DELAY);
return null;
},
/* eslint-enable complexity */
/**
* This method removes data from the this.boundUpdate object in the same
* manner like this.update() adds data to it.
*
* @param {string} action
* The type of change. One of "added", "changed" or "deleted"
* @param {string} storeType
* The storage actor for which you want to remove the updates data.
* @param {object} data
* The update object. This object is of the following format:
* - {
* <host1>: [<store_names1>, <store_name2>...],
* <host2>: [<store_names34>...],
* }
* Where host1, host2 are the hosts which you want to remove and
* [<store_namesX] is an array of the names of the store objects.
*/
removeNamesFromUpdateList(action, storeType, data) {
for (const host in data) {
if (this.boundUpdate[action] && this.boundUpdate[action][storeType] &&
this.boundUpdate[action][storeType][host]) {
for (const name in data[host]) {
const index = this.boundUpdate[action][storeType][host].indexOf(name);
if (index > -1) {
this.boundUpdate[action][storeType][host].splice(index, 1);
}
}
if (!this.boundUpdate[action][storeType][host].length) {
delete this.boundUpdate[action][storeType][host];
}
}
}
return null;
},
});
exports.StorageActor = StorageActor;
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/* globals browser */
"use strict";
const {
AddonTestUtils,
} = ChromeUtils.import("resource://testing-common/AddonTestUtils.jsm");
const {
FileUtils,
} = ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
const {
ExtensionTestUtils,
} = ChromeUtils.import("resource://testing-common/ExtensionXPCShellUtils.jsm");
const {
createAppInfo,
promiseStartupManager,
} = AddonTestUtils;
const LEAVE_UUID_PREF = "extensions.webextensions.keepUuidOnUninstall";
const LEAVE_STORAGE_PREF = "extensions.webextensions.keepStorageOnUninstall";
const EXTENSION_STORAGE_ENABLED_PREF = "devtools.storage.extensionStorage.enabled";
AddonTestUtils.init(this);
createAppInfo("xpcshell@tests.mozilla.org", "XPCShell", "1", "42");
ExtensionTestUtils.init(this);
// This storage actor is gated behind a pref, so make sure it is enabled first
Services.prefs.setBoolPref(EXTENSION_STORAGE_ENABLED_PREF, true);
registerCleanupFunction(() => {
Services.prefs.clearUserPref(EXTENSION_STORAGE_ENABLED_PREF);
});
/**
* Starts up and connects the Debugger server to the DevTools client (both in the main
* process) by listening over an nsIPipe, so that they can send remote debugging
* protocol messages to each other.
*
* @return {Promise} Resolves with a client object when the debugger has started up.
*/
async function startDebugger() {
DebuggerServer.init();
DebuggerServer.registerAllActors();
const transport = DebuggerServer.connectPipe();
const client = new DebuggerClient(transport);
await client.connect();
return client;
}
/**
* Set up the equivalent of an `about:debugging` toolbox for a given extension, minus
* the toolbox.
*
* @param {String} id - The id for the extension to be targeted by the toolbox.
* @return {Object} Resolves with the web extension actor front and target objects when
* the debugger has been connected to the extension.
*/
async function setupExtensionDebugging(id) {
const client = await startDebugger();
const front = await client.mainRoot.getAddon({id});
// Starts a DevTools server in the extension child process.
const target = await front.connect();
return {front, target};
}
/**
* Loads and starts up a test extension given the provided extension configuration.
*
* @param {Object} extConfig - The extension configuration object
* @return {ExtensionWrapper} extension - Resolves with an extension object once the
* extension has started up.
*/
async function startupExtension(extConfig) {
const extension = ExtensionTestUtils.loadExtension(extConfig);
await extension.startup();
return extension;
}
/**
* Opens the addon debugger's storage panel
*
* @param {String} - id, The addon id
* @return {Object} - Resolves with the web extension actor target and extensionStorage
* store objects when the panel has been opened.
*/
async function openAddonStoragePanel(id) {
const {target} = await setupExtensionDebugging(id);
const storageFront = await target.getFront("storage");
const stores = await storageFront.listStores();
const extensionStorage = stores.extensionStorage || null;
return {target, extensionStorage};
}
/**
* Builds the extension configuration object passed into ExtensionTestUtils.loadExtension
*
* @param {Object} options - Options, if any, to add to the configuration
* @param {Function} options.background - A function comprising the test extension's
* background script if provided
* @param {Object} options.files - An object whose keys correspond to file names and
* values map to the file contents
* @param {Object} options.manifest - An object representing the extension's manifest
* @return {Object} - The extension configuration object
*/
function getExtensionConfig(options = {}) {
const {manifest, ...otherOptions} = options;
const baseConfig = {
manifest: {
...manifest,
permissions: ["storage"],
},
useAddonManager: "temporary",
};
return {
...baseConfig,
...otherOptions,
};
}
/**
* An extension script that can be used in any extension context (e.g. as a background
* script or as an extension page script loaded in a tab).
*/
async function extensionScriptWithMessageListener() {
browser.test.onMessage.addListener(async (msg, ...args) => {
switch (msg) {
case "storage-local-set":
await browser.storage.local.set(args[0]);
break;
case "storage-local-get":
const {key, valueType} = args[0];
const value = (await browser.storage.local.get(key))[key];
browser.test.assertTrue(typeof value === valueType,
`Value fetched by extension matches expected value type, ${valueType}`
);
break;
default:
browser.test.fail(`Unexpected test message: ${msg}`);
}
browser.test.sendMessage(`${msg}:done`);
});
browser.test.sendMessage("extension-origin", window.location.origin);
}
/**
* Shared files for a test extension that has no background page but adds storage
* items via a transient extension page in a tab
*/
const ext_no_bg = {
files: {
"extension_page_in_tab.html": `<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
</head>
<body>
<h1>Extension Page in a Tab</h1>
<script src="extension_page_in_tab.js"></script>
</body>
</html>`,
"extension_page_in_tab.js": extensionScriptWithMessageListener,
},
};
/**
* Shutdown procedure common to all tasks.
*
* @param {Object} extension - The test extension
* @param {Object} target - The web extension actor targeted by the DevTools client
*/
async function shutdown(extension, target) {
if (target) {
await target.destroy();
}
await extension.unload();
}
/**
* Mocks the missing 'storage/permanent' directory needed by the "indexedDB"
* storage actor's 'preListStores' method (called when 'listStores' is called). This
* directory exists in a full browser i.e. mochitest.
*/
function createMissingIndexedDBDirs() {
const dir = (Services.dirsvc.get("ProfD", Ci.nsIFile)).clone();
dir.append("storage");
if (!dir.exists()) {
dir.create(dir.DIRECTORY_TYPE, FileUtils.PERMS_DIRECTORY);
}
dir.append("permanent");
if (!dir.exists()) {
dir.create(dir.DIRECTORY_TYPE, FileUtils.PERMS_DIRECTORY);
}
Assert.ok(dir.exists(), "Should have a 'storage/permanent' dir in the profile dir");
}
add_task(async function setup() {
await promiseStartupManager();
createMissingIndexedDBDirs();
});
add_task(async function test_extension_store_exists() {
const extension = await startupExtension(getExtensionConfig());
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
ok(extensionStorage, "Should have an extensionStorage store");
await shutdown(extension, target);
});
add_task(async function test_extension_origin_matches_debugger_target() {
async function background() {
browser.test.sendMessage("extension-origin", window.location.origin);
}
const extension = await startupExtension(getExtensionConfig({background}));
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const {hosts} = extensionStorage;
const expectedHost = await extension.awaitMessage("extension-origin");
ok(expectedHost in hosts,
"Should have the expected extension host in the extensionStorage store");
await shutdown(extension, target);
});
/**
* Test case: Bg page adds item while storage panel is open.
* - Load extension with background page.
* - Open the add-on debugger storage panel.
* - With the panel still open, add an item from the background page.
* - The data in the panel should match the item added by the extension.
*/
add_task(async function test_panel_live_updates() {
const extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener}),
);
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const host = await extension.awaitMessage("extension-origin");
let {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(data, [], "Got the expected results on empty storage.local");
extension.sendMessage("storage-local-set", {
a: 123,
b: [4, 5],
c: {d: 678},
d: true,
e: "hi",
f: null,
});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data, [
{area: "local", name: "a", value: {str: "123"}, editable: true},
{area: "local", name: "b", value: {str: "[4,5]"}, editable: true},
{area: "local", name: "c", value: {str: "{\"d\":678}"}, editable: true},
{area: "local", name: "d", value: {str: "true"}, editable: true},
{area: "local", name: "e", value: {str: "hi"}, editable: true},
{area: "local", name: "f", value: {str: "null"}, editable: true},
],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: No bg page. Transient page adds item before storage panel opened.
* - Load extension with no background page.
* - Open an extension page in a tab that adds a local storage item.
* - With the extension page still open, open the add-on storage panel.
* - The data in the storage panel should match the items added by the extension.
*/
add_task(async function test_panel_data_matches_extension_with_transient_page_open() {
const extension = await startupExtension(getExtensionConfig({files: ext_no_bg.files}));
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await contentPage.close();
await shutdown(extension, target);
});
/**
* Test case: No bg page. Transient page adds item then closes before storage panel opened.
* - Load extension with no background page.
* - Open an extension page in a tab that adds a local storage item.
* - Close all extension pages.
* - Open the add-on storage panel.
* - The data in the storage panel should match the item added by the extension.
*/
add_task(async function test_panel_data_matches_extension_with_no_pages_open() {
const extension = await startupExtension(getExtensionConfig({files: ext_no_bg.files}));
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
await contentPage.close();
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: No bg page. Storage panel live updates when a transient page adds an item.
* - Load extension with no background page.
* - Open the add-on storage panel.
* - With the storage panel still open, open an extension page in a new tab that adds an
* item.
* - Assert:
* - The data in the storage panel should live update to match the item added by the
* extension.
* - If an extension page adds the same data again, the data in the storage panel should
* not change.
*/
add_task(async function test_panel_data_live_updates_for_extension_without_bg_page() {
const extension = await startupExtension(getExtensionConfig({files: ext_no_bg.files}));
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
let {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(data, [], "Got the expected results on empty storage.local");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"The results are unchanged when an extension page adds duplicate items"
);
await contentPage.close();
await shutdown(extension, target);
});
/**
* Test case: Storage panel shows extension storage data added prior to extension startup
* - Load extension that adds a storage item
* - Uninstall the extension
* - Reinstall the extension
* - Open the add-on storage panel.
* - The data in the storage panel should match the data added the first time the extension
* was installed
* Related test case: Storage panel shows extension storage data when an extension that has
* already migrated to the IndexedDB storage backend prior to extension startup adds
* another storage item.
* - (Building from previous steps)
* - The reinstalled extension adds a storage item
* - The data in the storage panel should live update with both items: the item added from
* the first and the item added from the reinstall.
*/
add_task(async function test_panel_data_matches_data_added_prior_to_ext_startup() {
// The pref to leave the addonid->uuid mapping around after uninstall so that we can
// re-attach to the same storage
Services.prefs.setBoolPref(LEAVE_UUID_PREF, true);
// The pref to prevent cleaning up storage on uninstall
Services.prefs.setBoolPref(LEAVE_STORAGE_PREF, true);
let extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener}),
);
const host = await extension.awaitMessage("extension-origin");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
await shutdown(extension);
// Reinstall the same extension
extension = await startupExtension(
getExtensionConfig({background: extensionScriptWithMessageListener})
);
await extension.awaitMessage("extension-origin");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
let {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
// Related test case
extension.sendMessage("storage-local-set", {b: 456});
await extension.awaitMessage("storage-local-set:done");
data = (await extensionStorage.getStoreObjects(host)).data;
Assert.deepEqual(
data,
[
{area: "local", name: "a", value: {str: "123"}, editable: true},
{area: "local", name: "b", value: {str: "456"}, editable: true},
],
"Got the expected results on populated storage.local"
);
Services.prefs.setBoolPref(LEAVE_STORAGE_PREF, false);
Services.prefs.setBoolPref(LEAVE_UUID_PREF, false);
await shutdown(extension, target);
});
add_task(function cleanup_for_test_panel_data_matches_data_added_prior_to_ext_startup() {
Services.prefs.clearUserPref(LEAVE_UUID_PREF);
Services.prefs.clearUserPref(LEAVE_STORAGE_PREF);
});
/**
* Test case: Bg page adds an item to storage. With storage panel open, reload extension.
* - Load extension with background page that adds a storage item on message.
* - Open the add-on storage panel.
* - With the storage panel still open, reload the extension.
* - The data in the storage panel should match the item added prior to reloading.
*/
add_task(async function test_panel_live_reload() {
const EXTENSION_ID = "test_local_storage_live_reload@xpcshell.mozilla.org";
let manifest = {
version: "1.0",
applications: {
gecko: {
id: EXTENSION_ID,
},
},
};
info("Loading extension version 1.0");
const extension = await startupExtension(
getExtensionConfig({
manifest,
background: extensionScriptWithMessageListener,
})
);
info("Waiting for message from test extension");
const host = await extension.awaitMessage("extension-origin");
info("Adding storage item");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
manifest = {
...manifest,
version: "2.0",
};
// "Reload" is most similar to an upgrade, as e.g. storage data is preserved
info("Update to version 2.0");
await extension.upgrade(
getExtensionConfig({
manifest,
background: extensionScriptWithMessageListener,
}),
);
await extension.awaitMessage("extension-origin");
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: Transient page adds an item to storage. With storage panel open,
* reload extension.
* - Load extension with no background page.
* - Open transient page that adds a storage item on message.
* - Open the add-on storage panel.
* - With the storage panel still open, reload the extension.
* - The data in the storage panel should match the item added prior to reloading.
*/
add_task(async function test_panel_live_reload_for_extension_without_bg_page() {
const EXTENSION_ID = "test_local_storage_live_reload@xpcshell.mozilla.org";
let manifest = {
version: "1.0",
applications: {
gecko: {
id: EXTENSION_ID,
},
},
};
info("Loading and starting extension version 1.0");
const extension = await startupExtension(getExtensionConfig({
manifest,
files: ext_no_bg.files,
}));
info("Opening extension page in a tab");
const url = extension.extension.baseURI.resolve("extension_page_in_tab.html");
const contentPage = await ExtensionTestUtils.loadContentPage(url, {extension});
const host = await extension.awaitMessage("extension-origin");
info("Waiting for extension page in a tab to add storage item");
extension.sendMessage("storage-local-set", {a: 123});
await extension.awaitMessage("storage-local-set:done");
await contentPage.close();
info("Opening storage panel");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
manifest = {
...manifest,
version: "2.0",
};
// "Reload" is most similar to an upgrade, as e.g. storage data is preserved
info("Updating extension to version 2.0");
await extension.upgrade(
getExtensionConfig({
manifest,
files: ext_no_bg.files,
})
);
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[{area: "local", name: "a", value: {str: "123"}, editable: true}],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/**
* Test case: Bg page auto adds item(s). With storage panel open, reload extension.
* - Load extension with background page that automatically adds a storage item on startup.
* - Open the add-on storage panel.
* - With the storage panel still open, reload the extension.
* - The data in the storage panel should match the item(s) added by the reloaded
* extension.
*/
add_task(async function test_panel_live_reload_when_extension_auto_adds_items() {
async function background() {
await browser.storage.local.set({a: {b: 123}, c: {d: 456}});
browser.test.sendMessage("extension-origin", window.location.origin);
}
const EXTENSION_ID = "test_local_storage_live_reload@xpcshell.mozilla.org";
let manifest = {
version: "1.0",
applications: {
gecko: {
id: EXTENSION_ID,
},
},
};
info("Loading and starting extension version 1.0");
const extension = await startupExtension(getExtensionConfig({manifest, background}));
info("Waiting for message from test extension");
const host = await extension.awaitMessage("extension-origin");
info("Opening storage panel");
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
manifest = {
...manifest,
version: "2.0",
};
// "Reload" is most similar to an upgrade, as e.g. storage data is preserved
info("Update to version 2.0");
await extension.upgrade(
getExtensionConfig({
manifest,
background,
}),
);
await extension.awaitMessage("extension-origin");
const {data} = await extensionStorage.getStoreObjects(host);
Assert.deepEqual(
data,
[
{area: "local", name: "a", value: {str: "{\"b\":123}"}, editable: true},
{area: "local", name: "c", value: {str: "{\"d\":456}"}, editable: true},
],
"Got the expected results on populated storage.local"
);
await shutdown(extension, target);
});
/*
* This task should be last, as it sets a pref to disable the extensionStorage
* storage actor. Since this pref is set at the beginning of the file, it
* already will be cleared via registerCleanupFunction when the test finishes.
*/
add_task(async function test_extensionStorage_store_disabled_on_pref() {
Services.prefs.setBoolPref(EXTENSION_STORAGE_ENABLED_PREF, false);
const extension = await startupExtension(getExtensionConfig());
const {target, extensionStorage} = await openAddonStoragePanel(extension.id);
ok(
extensionStorage === null,
"Should not have an extensionStorage store when pref disabled"
);
await shutdown(extension, target);
});
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const protocol = require("devtools/shared/protocol");
const { Arg, RetVal, types } = protocol;
const childSpecs = {};
function createStorageSpec(options) {
// common methods for all storage types
const methods = {
getStoreObjects: {
request: {
host: Arg(0),
names: Arg(1, "nullable:array:string"),
options: Arg(2, "nullable:json"),
},
response: RetVal(options.storeObjectType),
},
getFields: {
request: {
subType: Arg(0, "nullable:string"),
},
response: {
value: RetVal("json"),
},
},
};
// extra methods specific for storage type
Object.assign(methods, options.methods);
childSpecs[options.typeName] = protocol.generateActorSpec({
typeName: options.typeName,
methods,
});
}
// Cookies store object
types.addDictType("cookieobject", {
uniqueKey: "string",
name: "string",
value: "longstring",
path: "nullable:string",
host: "string",
hostOnly: "boolean",
isSecure: "boolean",
isHttpOnly: "boolean",
creationTime: "number",
lastAccessed: "number",
expires: "number",
});
// Array of cookie store objects
types.addDictType("cookiestoreobject", {
total: "number",
offset: "number",
data: "array:nullable:cookieobject",
});
// Common methods for edit/remove
const editRemoveMethods = {
getFields: {
request: {},
response: {
value: RetVal("json"),
},
},
editItem: {
request: {
data: Arg(0, "json"),
},
response: {},
},
removeItem: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
};
// Cookies actor spec
createStorageSpec({
typeName: "cookies",
storeObjectType: "cookiestoreobject",
methods: Object.assign({},
editRemoveMethods,
{
addItem: {
request: {
guid: Arg(0, "string"),
},
response: {},
},
}, {
removeAll: {
request: {
host: Arg(0, "string"),
domain: Arg(1, "nullable:string"),
},
response: {},
},
}, {
removeAllSessionCookies: {
request: {
host: Arg(0, "string"),
domain: Arg(1, "nullable:string"),
},
response: {},
},
}
),
});
// Local Storage / Session Storage store object
types.addDictType("storageobject", {
name: "string",
value: "longstring",
});
// Common methods for local/session storage
const storageMethods = Object.assign({},
editRemoveMethods,
{
addItem: {
request: {
guid: Arg(0, "string"),
host: Arg(1, "nullable:string"),
},
response: {},
},
},
{
removeAll: {
request: {
host: Arg(0, "string"),
},
response: {},
},
}
);
// Array of Local Storage / Session Storage store objects
types.addDictType("storagestoreobject", {
total: "number",
offset: "number",
data: "array:nullable:storageobject",
});
createStorageSpec({
typeName: "localStorage",
storeObjectType: "storagestoreobject",
methods: storageMethods,
});
createStorageSpec({
typeName: "sessionStorage",
storeObjectType: "storagestoreobject",
methods: storageMethods,
});
types.addDictType("extensionobject", {
name: "nullable:string",
value: "nullable:longstring",
});
types.addDictType("extensionstoreobject", {
total: "number",
offset: "number",
data: "array:nullable:extensionobject",
});
createStorageSpec({
typeName: "extensionStorage",
storeObjectType: "extensionstoreobject",
methods: {},
});
types.addDictType("cacheobject", {
"url": "string",
"status": "string",
});
// Array of Cache store objects
types.addDictType("cachestoreobject", {
total: "number",
offset: "number",
data: "array:nullable:cacheobject",
});
// Cache storage spec
createStorageSpec({
typeName: "Cache",
storeObjectType: "cachestoreobject",
methods: {
removeAll: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
removeItem: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
},
});
// Indexed DB store object
// This is a union on idb object, db metadata object and object store metadata
// object
types.addDictType("idbobject", {
uniqueKey: "string",
name: "nullable:string",
db: "nullable:string",
objectStore: "nullable:string",
origin: "nullable:string",
version: "nullable:number",
storage: "nullable:string",
objectStores: "nullable:number",
keyPath: "nullable:string",
autoIncrement: "nullable:boolean",
indexes: "nullable:string",
value: "nullable:longstring",
});
// Array of Indexed DB store objects
types.addDictType("idbstoreobject", {
total: "number",
offset: "number",
data: "array:nullable:idbobject",
});
// Result of Indexed DB delete operation: can block or throw error
types.addDictType("idbdeleteresult", {
blocked: "nullable:boolean",
error: "nullable:string",
});
createStorageSpec({
typeName: "indexedDB",
storeObjectType: "idbstoreobject",
methods: {
removeDatabase: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: RetVal("idbdeleteresult"),
},
removeAll: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
removeItem: {
request: {
host: Arg(0, "string"),
name: Arg(1, "string"),
},
response: {},
},
},
});
// Update notification object
types.addDictType("storeUpdateObject", {
changed: "nullable:json",
deleted: "nullable:json",
added: "nullable:json",
});
// Generate a type definition for an object with actors for all storage types.
types.addDictType("storelist", Object.keys(childSpecs).reduce((obj, type) => {
obj[type] = type;
return obj;
}, {}));
exports.childSpecs = childSpecs;
exports.storageSpec = protocol.generateActorSpec({
typeName: "storage",
/**
* List of event notifications that the server can send to the client.
*
* - stores-update : When any store object in any storage type changes.
* - stores-cleared : When all the store objects are removed.
* - stores-reloaded : When all stores are reloaded. This generally mean that
* we should refetch everything again.
*/
events: {
"stores-update": {
type: "storesUpdate",
data: Arg(0, "storeUpdateObject"),
},
"stores-cleared": {
type: "storesCleared",
data: Arg(0, "json"),
},
"stores-reloaded": {
type: "storesReloaded",
data: Arg(0, "json"),
},
},
methods: {
listStores: {
request: {},
response: RetVal("storelist"),
},
},
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment