Skip to content

Instantly share code, notes, and snippets.

@caoglish
Last active July 29, 2024 06:40
Show Gist options
  • Save caoglish/01b38db2b676502ca6b894becac947e8 to your computer and use it in GitHub Desktop.
Save caoglish/01b38db2b676502ca6b894becac947e8 to your computer and use it in GitHub Desktop.
indexedDB exporter and importer for read it later extension (all tables in one JSON file)
// ==UserScript==
// @name IndexedDB Exporter
// @namespace http://tampermonkey.net/
// @version 0.1
// @description Export all IndexedDB tables into a single JSON file
// @author You
// @match *://*/*
// @grant none
// ==/UserScript==
(function() {
'use strict';
// Create and append the export button to the body
const exportButton = document.createElement('button');
exportButton.textContent = 'Export IndexedDB';
exportButton.style.position = 'fixed';
exportButton.style.top = '10px';
exportButton.style.right = '10px';
exportButton.style.zIndex = 1000;
document.body.appendChild(exportButton);
exportButton.addEventListener('click', exportIndexedDB, false);
async function exportIndexedDB() {
const dbName = 'read-it-later';
const openRequest = indexedDB.open(dbName);
openRequest.onsuccess = async function(event) {
const db = event.target.result;
const exportData = {};
const objectStoreNames = Array.from(db.objectStoreNames);
for (const storeName of objectStoreNames) {
const transaction = db.transaction(storeName, 'readonly');
const objectStore = transaction.objectStore(storeName);
const data = await getAllData(objectStore);
exportData[storeName] = data;
}
const jsonString = JSON.stringify(exportData, null, 2);
const blob = new Blob([jsonString], { type: 'application/json' });
const link = document.createElement('a');
link.href = URL.createObjectURL(blob);
link.download = `${dbName}_export.json`;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
};
openRequest.onerror = function(event) {
console.error('Error opening database:', event.target.error);
};
}
function getAllData(objectStore) {
return new Promise((resolve, reject) => {
const getAllRequest = objectStore.getAll();
getAllRequest.onsuccess = function(event) {
resolve(event.target.result);
};
getAllRequest.onerror = function(event) {
reject(event.target.error);
};
});
}
})();
// ==UserScript==
// @name IndexedDB Importer
// @namespace http://tampermonkey.net/
// @version 0.1
// @description Import data from a single JSON file into IndexedDB
// @author You
// @match *://*/*
// @grant none
// ==/UserScript==
(function() {
'use strict';
// Create and append the file input element to the body
const fileInput = document.createElement('input');
fileInput.type = 'file';
fileInput.id = 'fileInput';
fileInput.style.position = 'fixed';
fileInput.style.top = '10px';
fileInput.style.right = '10px';
fileInput.style.zIndex = 1000;
document.body.appendChild(fileInput);
fileInput.addEventListener('change', handleFileSelect, false);
async function handleFileSelect(event) {
const file = event.target.files[0];
const dbName = 'read-it-later';
const data = await readFileAsJson(file);
const openRequest = indexedDB.open(dbName);
openRequest.onsuccess = function(event) {
const db = event.target.result;
const existingStores = Array.from(db.objectStoreNames);
const storesToCreate = Object.keys(data).filter(storeName => !existingStores.includes(storeName));
if (storesToCreate.length > 0) {
// Close the current database and upgrade it to create the missing stores
db.close();
const newVersion = db.version + 1;
const upgradeRequest = indexedDB.open(dbName, newVersion);
upgradeRequest.onupgradeneeded = function(event) {
const upgradeDb = event.target.result;
storesToCreate.forEach(storeName => {
upgradeDb.createObjectStore(storeName, { keyPath: 'id', autoIncrement: true });
});
};
upgradeRequest.onsuccess = function(event) {
const upgradedDb = event.target.result;
importData(upgradedDb, data);
};
upgradeRequest.onerror = function(event) {
console.error('Error upgrading database:', event.target.error);
};
} else {
// All stores already exist, proceed with importing data
importData(db, data);
}
};
openRequest.onerror = function(event) {
console.error('Error opening database:', event.target.error);
};
}
function importData(db, data) {
for (const storeName in data) {
const transaction = db.transaction(storeName, 'readwrite');
const objectStore = transaction.objectStore(storeName);
data[storeName].forEach(item => {
objectStore.put(item);
});
transaction.oncomplete = function() {
console.log(`Data imported into ${storeName} successfully.`);
};
transaction.onerror = function(event) {
console.error(`Error importing data into ${storeName}:`, event.target.error);
};
}
}
function readFileAsJson(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = function(event) {
try {
const json = JSON.parse(event.target.result);
resolve(json);
} catch (error) {
reject(error);
}
};
reader.onerror = function(event) {
reject(event.target.error);
};
reader.readAsText(file);
});
}
})();
@caoglish
Copy link
Author

Read it later extension do not have exporter and importer, read-it-later.

I made these scripts to exporting and importing the data as backup or migration.

As TamperMonkey is working under chrome-extension domain. the script need to be run in console manually under read it later page.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment