Programmatically edit browser history
Hi Firefox,
A site I visited recently moved to another URL. Is there a way to edit my browser history so I can still be able to keep track of which pages I have visited?
For example:
if I have visited
https://site-old.com/topic1/page1.html https://site-old.com/topic1/page2.html
I want my browser history to also contain
https://site-new.com/topic1/page1.html https://site-new.com/topic1/page2.html
So I can see that I have already visited the page1 and page2 on topic1 as the link color would be purple
Would I be able to script something in a developer console and achieve that? Or would I need to program a plugin?
thanks!
Ausgewählte Lösung
First, use I use python to save the history into a json file
import os import sqlite3
from urllib.parse import urlparse # fill in your own values user = "<user>" profile_dir = "<profile_dir>" source_site = "<source_site>" target_site = "<target_site>" ### data_path = f"C:\\Users\\{user}\AppData\Roaming\Mozilla\Firefox\Profiles\{profile_dir}" files = os.listdir(data_path) history_db = os.path.join(data_path, 'places.sqlite') c = sqlite3.connect(history_db) ## the following is from trial an error - super messy and require cleanup... but it works cursor = c.cursor() select_statement = f"select title, url, rev_host, visit_count, favicon_id, frecency, last_visit_date, description, site_name from moz_places where url like \"https://{source_site}%\";" cursor.execute(select_statement) query_results = cursor.fetchall() result = [] file_counter = 0 for title, url, rev_host, visit_count, favicon_id, frecency, last_visit_date, description, site_name in query_results: parsed = urlparse(url) replaced_raw = parsed._replace(netloc=target_site) replaced = replaced_raw.geturl() site_name = replaced_raw.scheme + "://" + replaced_raw.hostname item = { "site": site_name, "title": title, "url": replaced[len(site_name):] } result.append(item) if len(result) == 300: with open(f"result{str(file_counter).rjust(5, '0')}.json", "w") as out: json.dump(result, out) result = [] file_counter = file_counter + 1
with open(f"result{str(file_counter).rjust(5, '0')}.json", "w") as out: json.dump(result, out)
Then assign that variable to a variable named pages in console window
pages = <ctrl + v> pages.forEach(page => { site = page.site title = page.title url = page.url history.pushState(site, title, url) })Diese Antwort im Kontext lesen 👍 0
Alle Antworten (3)
First, use I use python to save the history into a json file
import os import sqlite3 from urllib.parse import urlparse # fill in your own values user = "<user>" profile_dir = "<profile_dir>" source_site = "<source_site>" target_site = "<target_site>" ### data_path = f"C:\\Users\\{user}\AppData\Roaming\Mozilla\Firefox\Profiles\{profile_dir}" files = os.listdir(data_path) history_db = os.path.join(data_path, 'places.sqlite') c = sqlite3.connect(history_db) ## the following is from trial an error - super messy and require cleanup... but it works cursor = c.cursor() select_statement = f"select title, url, rev_host, visit_count, favicon_id, frecency, last_visit_date, description, site_name from moz_places where url like \"https://{source_site}%\";" cursor.execute(select_statement) query_results = cursor.fetchall() result = [] file_counter = 0 for title, url, rev_host, visit_count, favicon_id, frecency, last_visit_date, description, site_name in query_results: parsed = urlparse(url) replaced_raw = parsed._replace(netloc=target_site) replaced = replaced_raw.geturl() site_name = replaced_raw.scheme + "://" + replaced_raw.hostname item = { "site": site_name, "title": title, "url": replaced[len(site_name):] } result.append(item) if len(result) == 200: with open(f"result{str(file_counter).rjust(5, '0')}.json", "w") as out: json.dump(result, out) result = [] file_counter = file_counter + 1 with open(f"result{str(file_counter).rjust(5, '0')}.json", "w") as out: json.dump(result, out)
Then assign that variable to a variable named pages in console window
pages = <ctrl + v> pages.forEach(page => { site = page.site title = page.title url = page.url history.pushState(site, title, url) })
Geändert am
updated as followed
Note that each iteration can only insert 200 entries in history otherwise we get "Too many calls to Location or History APIs within a short timeframe." I tried to find any configuration that fixes this issue but I cannot
Geändert am
Ausgewählte Lösung
First, use I use python to save the history into a json file
import os import sqlite3
from urllib.parse import urlparse # fill in your own values user = "<user>" profile_dir = "<profile_dir>" source_site = "<source_site>" target_site = "<target_site>" ### data_path = f"C:\\Users\\{user}\AppData\Roaming\Mozilla\Firefox\Profiles\{profile_dir}" files = os.listdir(data_path) history_db = os.path.join(data_path, 'places.sqlite') c = sqlite3.connect(history_db) ## the following is from trial an error - super messy and require cleanup... but it works cursor = c.cursor() select_statement = f"select title, url, rev_host, visit_count, favicon_id, frecency, last_visit_date, description, site_name from moz_places where url like \"https://{source_site}%\";" cursor.execute(select_statement) query_results = cursor.fetchall() result = [] file_counter = 0 for title, url, rev_host, visit_count, favicon_id, frecency, last_visit_date, description, site_name in query_results: parsed = urlparse(url) replaced_raw = parsed._replace(netloc=target_site) replaced = replaced_raw.geturl() site_name = replaced_raw.scheme + "://" + replaced_raw.hostname item = { "site": site_name, "title": title, "url": replaced[len(site_name):] } result.append(item) if len(result) == 300: with open(f"result{str(file_counter).rjust(5, '0')}.json", "w") as out: json.dump(result, out) result = [] file_counter = file_counter + 1
with open(f"result{str(file_counter).rjust(5, '0')}.json", "w") as out: json.dump(result, out)
Then assign that variable to a variable named pages in console window
pages = <ctrl + v> pages.forEach(page => { site = page.site title = page.title url = page.url history.pushState(site, title, url) })
Geändert am