2016-03-15 08:51:06 -05:00
|
|
|
#!/usr/bin/env python3
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# Bookmark management utility
|
|
|
|
#
|
|
|
|
# Copyright (C) 2015 Arun Prakash Jana <engineerarun@gmail.com>
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2015-12-19 14:16:24 -06:00
|
|
|
# along with buku. If not, see <http://www.gnu.org/licenses/>.
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2015-11-04 08:11:16 -06:00
|
|
|
import sys
|
2015-11-06 14:26:08 -06:00
|
|
|
import os
|
2015-11-01 14:04:41 -06:00
|
|
|
import sqlite3
|
2015-11-04 08:11:16 -06:00
|
|
|
from getopt import getopt, GetoptError
|
2015-11-06 13:26:20 -06:00
|
|
|
import readline
|
2015-11-06 13:59:57 -06:00
|
|
|
import webbrowser
|
2015-11-06 16:32:08 -06:00
|
|
|
import html.parser as HTMLParser
|
2015-11-07 07:29:38 -06:00
|
|
|
from http.client import HTTPConnection
|
2015-11-06 17:07:15 -06:00
|
|
|
from http.client import HTTPSConnection
|
2015-11-07 13:19:59 -06:00
|
|
|
from urllib.parse import urljoin, unquote
|
2016-03-16 10:10:55 -05:00
|
|
|
import signal
|
2015-11-04 08:11:16 -06:00
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
# Import libraries needed for encryption
|
2015-12-19 09:02:55 -06:00
|
|
|
try:
|
2015-12-19 10:23:29 -06:00
|
|
|
import getpass
|
2015-12-19 09:02:55 -06:00
|
|
|
import hashlib
|
|
|
|
from Crypto.Cipher import AES
|
|
|
|
from Crypto import Random
|
2015-12-19 10:23:29 -06:00
|
|
|
import struct
|
2015-12-19 09:02:55 -06:00
|
|
|
|
|
|
|
no_crypto = False
|
2015-12-19 13:55:05 -06:00
|
|
|
BLOCKSIZE = 65536
|
2015-12-20 13:00:34 -06:00
|
|
|
SALT_SIZE = 32
|
|
|
|
CHUNKSIZE = 0x80000 # Read/write 512 KB chunks
|
2015-12-19 09:02:55 -06:00
|
|
|
except ImportError:
|
|
|
|
no_crypto = True
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
|
|
|
|
|
2015-11-04 13:07:10 -06:00
|
|
|
# Globals
|
2015-11-05 09:18:51 -06:00
|
|
|
addurl = False
|
2015-11-07 12:10:21 -06:00
|
|
|
addindex = None
|
2015-11-07 01:19:13 -06:00
|
|
|
online = False
|
2015-11-08 12:19:12 -06:00
|
|
|
delete = False
|
2015-11-10 05:20:30 -06:00
|
|
|
openurl = None
|
2015-11-08 12:19:12 -06:00
|
|
|
show = False
|
2015-11-09 12:32:10 -06:00
|
|
|
showindex = None
|
2015-11-11 01:28:37 -06:00
|
|
|
showOpt = 0
|
2016-03-16 13:03:15 -05:00
|
|
|
showTags = False
|
2015-11-05 12:26:02 -06:00
|
|
|
search = False
|
2015-11-11 05:41:53 -06:00
|
|
|
searchAll = False
|
2015-11-07 07:29:38 -06:00
|
|
|
entry = None
|
2015-11-08 12:19:12 -06:00
|
|
|
update = False
|
|
|
|
debug = False
|
2015-11-10 17:07:14 -06:00
|
|
|
titleData = None
|
2016-03-03 11:49:43 -06:00
|
|
|
titleManual = None
|
2015-11-13 05:12:12 -06:00
|
|
|
refresh = False
|
2016-03-19 10:55:13 -05:00
|
|
|
replace = False
|
2015-12-22 12:10:24 -06:00
|
|
|
encrypt = False
|
|
|
|
decrypt = False
|
|
|
|
iterations = int(8)
|
2015-11-04 13:07:10 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
|
2015-12-19 14:16:24 -06:00
|
|
|
# Show usage of buku and exit
|
2015-11-04 08:11:16 -06:00
|
|
|
def usage():
|
2016-03-20 14:53:57 -05:00
|
|
|
print("Usage: OPTIONS [URL] [TAGS] [KEYWORDS ...]\n")
|
2015-11-07 07:29:38 -06:00
|
|
|
print("Bookmark manager. Your private Google.\n")
|
2015-11-04 08:11:16 -06:00
|
|
|
print("Options")
|
2015-11-08 17:51:25 -06:00
|
|
|
print(" -a URL tag 1, tag 2, ... add URL as bookmark with comma separated tags")
|
2016-03-20 00:59:06 -05:00
|
|
|
print(" -d N delete entry at DB index N (from -P), move last entry to N")
|
2015-11-07 12:10:21 -06:00
|
|
|
print(" -D delete ALL bookmarks")
|
2016-03-16 13:03:15 -05:00
|
|
|
print(" -g show all tags (sorted alphabetically)")
|
2015-11-10 05:20:30 -06:00
|
|
|
print(" -i N insert entry at DB index N, useful to fill deleted index")
|
2015-12-19 12:15:22 -06:00
|
|
|
print(" -k decrypt (unlock) database file")
|
|
|
|
print(" -l encrypt (lock) database file")
|
2016-03-03 12:18:04 -06:00
|
|
|
print(" -m manually add or update the title offline")
|
2015-11-10 05:20:30 -06:00
|
|
|
print(" -o N open URL at DB index N in browser")
|
|
|
|
print(" -p N show details of bookmark record at DB index N")
|
|
|
|
print(" -P show all bookmarks along with index from DB")
|
2015-11-13 05:12:12 -06:00
|
|
|
print(" -R refresh all bookmarks, tags retained")
|
2016-03-19 10:55:13 -05:00
|
|
|
print(" -r oldtag [newtag] replace oldtag with newtag in DB, deletes oldtag if newtag empty")
|
2015-11-11 05:41:53 -06:00
|
|
|
print(" -s keyword(s) search all bookmarks for a (partial) tag or any keyword")
|
|
|
|
print(" -S keyword(s) search all bookmarks for a (partial) tag or all keywords")
|
2015-12-22 12:33:22 -06:00
|
|
|
print(" -t N use N (> 0) hash iterations to generate key, works with -k, -l")
|
2016-03-19 20:41:40 -05:00
|
|
|
print(" -u N update all fields of entry at DB index N")
|
2015-11-10 03:11:05 -06:00
|
|
|
print(" -w fetch title info from web, works with -a, -i, -u")
|
2015-11-11 01:28:37 -06:00
|
|
|
print(" -x N works with -P, N=1: show only URL, N=2: show URL and tag")
|
2015-11-08 12:19:12 -06:00
|
|
|
print(" -z show debug information")
|
2015-12-19 14:16:24 -06:00
|
|
|
print(" any other option shows help and exits buku\n")
|
2015-11-08 15:16:01 -06:00
|
|
|
print("Keys")
|
2015-12-19 14:16:24 -06:00
|
|
|
print(" 1-N open Nth search result in browser. Enter exits buku.\n")
|
2016-03-19 12:21:21 -05:00
|
|
|
print("Version 1.8")
|
2015-11-04 08:11:16 -06:00
|
|
|
print("Copyright (C) 2015 Arun Prakash Jana <engineerarun@gmail.com>")
|
2016-03-20 14:53:57 -05:00
|
|
|
print("License: GPLv3")
|
2015-12-19 14:16:24 -06:00
|
|
|
print("Webpage: https://github.com/jarun/buku")
|
2015-11-04 08:11:16 -06:00
|
|
|
sys.exit(1)
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2016-03-23 09:58:21 -05:00
|
|
|
# Get page response data
|
|
|
|
def getPageResp(url, redir=False):
|
|
|
|
if url.find("https://") >= 0: # Secure connection
|
2016-03-22 15:23:46 -05:00
|
|
|
server = url[8:]
|
2016-03-23 09:58:21 -05:00
|
|
|
marker = server.find("/")
|
|
|
|
if marker > 0:
|
|
|
|
if redir == False:
|
|
|
|
url = server[marker:]
|
|
|
|
server = server[:marker]
|
|
|
|
urlconn = HTTPSConnection(server, timeout=30)
|
|
|
|
elif url.find("http://") >= 0: # Insecure connection
|
2016-03-22 15:23:46 -05:00
|
|
|
server = url[7:]
|
2016-03-23 09:58:21 -05:00
|
|
|
marker = server.find("/")
|
|
|
|
if marker > 0:
|
|
|
|
if redir == False:
|
|
|
|
url = server[marker:]
|
|
|
|
server = server[:marker]
|
|
|
|
urlconn = HTTPConnection(server, timeout=30)
|
2016-03-22 15:23:46 -05:00
|
|
|
else:
|
2016-03-23 09:58:21 -05:00
|
|
|
return (None, None)
|
2016-03-22 15:23:46 -05:00
|
|
|
|
2016-03-23 09:58:21 -05:00
|
|
|
if debug:
|
|
|
|
print("server: [%s]" % server)
|
|
|
|
print("URL: [%s]" % unquote(url))
|
|
|
|
|
|
|
|
urlconn.request("GET", unquote(url))
|
|
|
|
resp = urlconn.getresponse()
|
|
|
|
return (resp, urlconn)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Fetch title from URL
|
|
|
|
def fetchTitle(url):
|
|
|
|
global titleData
|
|
|
|
|
|
|
|
urlconn = None
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
try:
|
2016-03-23 09:58:21 -05:00
|
|
|
resp, urlconn = getPageResp(url, False)
|
|
|
|
if resp is None:
|
|
|
|
return ''
|
|
|
|
|
2016-03-22 15:23:46 -05:00
|
|
|
if resp.status != 200:
|
|
|
|
# Handle first redirection
|
|
|
|
if resp.status in (301,302,):
|
|
|
|
if debug:
|
2016-03-23 09:58:21 -05:00
|
|
|
print("Location header: %s" % resp.getheader('location', ''))
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
redirurl = urljoin(url, resp.getheader('location', ''))
|
|
|
|
if redirurl.find("sorry/IndexRedirect?") >= 0:
|
|
|
|
print("ERROR: Connection blocked due to unusual activity.")
|
|
|
|
else:
|
|
|
|
if debug:
|
2016-03-23 09:58:21 -05:00
|
|
|
print("Trying to fetch redirected URL.")
|
|
|
|
urlconn.close()
|
|
|
|
resp, urlconn = getPageResp(redirurl, True)
|
|
|
|
if resp is not None:
|
|
|
|
if resp.status != 200:
|
|
|
|
print("ERROR on retry:", str(resp.status), ": ", resp.reason)
|
|
|
|
else:
|
|
|
|
getTitleData(resp)
|
2016-03-22 15:23:46 -05:00
|
|
|
else: # if resp.status in (301,302,):
|
|
|
|
print("ERROR:", str(resp.status), ": ", resp.reason)
|
|
|
|
else: # if resp.status != 200:
|
|
|
|
getTitleData(resp)
|
|
|
|
except Exception as e:
|
|
|
|
print("Exception: %s" % e)
|
|
|
|
finally:
|
2016-03-23 09:58:21 -05:00
|
|
|
if urlconn is not None:
|
|
|
|
urlconn.close()
|
|
|
|
if titleData is None:
|
|
|
|
return ''
|
2016-03-22 15:38:35 -05:00
|
|
|
return titleData.strip().replace("\n","")
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Initialize the database connection
|
|
|
|
# Create bookmarks table is not existing
|
2015-11-05 09:18:51 -06:00
|
|
|
def initdb():
|
2015-12-19 14:16:24 -06:00
|
|
|
dbpath = os.path.join(os.environ.get('HOME'), '.cache', 'buku')
|
2015-11-09 23:04:34 -06:00
|
|
|
if not os.path.exists(dbpath):
|
|
|
|
os.makedirs(dbpath)
|
|
|
|
|
2016-03-20 03:55:08 -05:00
|
|
|
dbfile = os.path.join(dbpath, 'bookmarks.db')
|
|
|
|
|
2015-12-19 12:49:44 -06:00
|
|
|
encpath = os.path.join(dbpath, 'bookmarks.db.enc')
|
2016-03-20 03:11:52 -05:00
|
|
|
# Notify if DB file needs to be decrypted first
|
2016-03-20 03:55:08 -05:00
|
|
|
if os.path.exists(encpath) and not os.path.exists(dbfile):
|
2015-12-19 12:49:44 -06:00
|
|
|
print("Unlock database first")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-03-20 03:55:08 -05:00
|
|
|
# Show info on first creation
|
|
|
|
if no_crypto == False and not os.path.exists(dbfile):
|
|
|
|
print("DB file is being created. You may want to encrypt it later.")
|
|
|
|
|
2015-11-05 09:18:51 -06:00
|
|
|
# Create a connection
|
2016-03-20 03:55:08 -05:00
|
|
|
conn = sqlite3.connect(dbfile)
|
2015-11-05 09:18:51 -06:00
|
|
|
cur = conn.cursor()
|
|
|
|
|
|
|
|
# Create table if it doesn't exist
|
2015-11-07 11:47:24 -06:00
|
|
|
cur.execute('''CREATE TABLE if not exists bookmarks \
|
2015-11-09 04:18:44 -06:00
|
|
|
(id integer PRIMARY KEY, URL text NOT NULL UNIQUE, metadata text, tags text)''')
|
2015-11-05 09:18:51 -06:00
|
|
|
conn.commit()
|
|
|
|
return (conn, cur)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
# Add a new bookmark or update an existing record at index
|
|
|
|
def AddUpdateEntry(conn, cur, keywords, index):
|
2016-03-03 11:49:43 -06:00
|
|
|
global titleManual
|
2015-11-07 01:19:13 -06:00
|
|
|
global online
|
|
|
|
|
2015-11-05 09:18:51 -06:00
|
|
|
tags = ','
|
2016-03-07 07:37:33 -06:00
|
|
|
meta = ''
|
2015-11-05 09:18:51 -06:00
|
|
|
url = keywords[0]
|
2016-03-07 07:37:33 -06:00
|
|
|
|
2016-03-22 15:23:46 -05:00
|
|
|
# Cleanse and get the tags
|
2015-11-09 04:18:44 -06:00
|
|
|
if len(keywords) > 1:
|
|
|
|
for tag in keywords[1:]:
|
2016-03-16 10:49:35 -05:00
|
|
|
if tag[-1] == ',':
|
|
|
|
tag = tag.strip(',') + ','
|
|
|
|
else:
|
|
|
|
tag = tag.strip(',')
|
|
|
|
|
2016-03-16 11:35:34 -05:00
|
|
|
if tag == ',':
|
|
|
|
continue
|
|
|
|
|
2016-03-16 10:49:35 -05:00
|
|
|
if tags[-1] == ',':
|
2015-11-09 04:18:44 -06:00
|
|
|
tags += tag
|
|
|
|
else:
|
2016-03-16 10:49:35 -05:00
|
|
|
tags += ' ' + tag
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2016-03-16 10:49:35 -05:00
|
|
|
if tags[-1] != ',':
|
|
|
|
tags += ','
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2016-03-03 11:49:43 -06:00
|
|
|
if titleManual != None:
|
|
|
|
meta = titleManual
|
|
|
|
elif online == True:
|
2016-03-22 15:23:46 -05:00
|
|
|
meta = fetchTitle(url)
|
|
|
|
if meta == '':
|
|
|
|
print("\x1B[91mTitle: []\x1B[0m")
|
2015-11-07 01:19:13 -06:00
|
|
|
else:
|
2016-03-22 15:23:46 -05:00
|
|
|
print("Title: [%s]" % meta)
|
2015-11-06 16:32:08 -06:00
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
if index == None: # Insert a new entry
|
2015-11-07 07:29:38 -06:00
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
if addindex == None: # addindex is index number to insert record at
|
2015-11-09 04:18:44 -06:00
|
|
|
cur.execute('INSERT INTO bookmarks(URL, metadata, tags) VALUES (?, ?, ?)', (url, meta, tags,))
|
2015-11-07 12:10:21 -06:00
|
|
|
else:
|
2015-11-09 04:18:44 -06:00
|
|
|
cur.execute('INSERT INTO bookmarks(id, URL, metadata, tags) VALUES (?, ?, ?, ?)', (int(addindex), url, meta, tags,))
|
2015-11-07 07:29:38 -06:00
|
|
|
conn.commit()
|
2015-11-10 00:24:30 -06:00
|
|
|
print("Added at index %d" % cur.lastrowid)
|
2015-11-07 07:29:38 -06:00
|
|
|
except sqlite3.IntegrityError:
|
2015-11-08 17:29:06 -06:00
|
|
|
for row in cur.execute("SELECT id from bookmarks where URL LIKE ?", (url,)):
|
|
|
|
print("URL already exists at index %s" % row[0])
|
|
|
|
return
|
|
|
|
|
|
|
|
print("Index %s exists" % addindex)
|
2015-11-07 07:29:38 -06:00
|
|
|
else: # Update an existing entry
|
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
cur.execute("UPDATE bookmarks SET URL = ?, metadata = ?, tags = ? WHERE id = ?", (url, meta, tags, int(index),))
|
2015-11-07 07:29:38 -06:00
|
|
|
conn.commit()
|
|
|
|
if cur.rowcount == 1:
|
2015-11-11 02:41:05 -06:00
|
|
|
print("Updated index %d" % int(index))
|
2015-11-07 07:29:38 -06:00
|
|
|
else:
|
|
|
|
print("No matching index")
|
|
|
|
except sqlite3.IntegrityError:
|
|
|
|
print("URL already exists")
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2016-03-07 07:14:36 -06:00
|
|
|
# Search the database for a tag or matching URL or Title info
|
2015-11-05 12:26:02 -06:00
|
|
|
def searchdb(cur, keywords):
|
|
|
|
searchtag = ''
|
|
|
|
for token in keywords:
|
|
|
|
searchtag += token + " "
|
2015-11-08 11:44:35 -06:00
|
|
|
searchtag = searchtag[0:-1]
|
2015-11-05 12:26:02 -06:00
|
|
|
|
2015-11-08 11:44:35 -06:00
|
|
|
arguments = []
|
|
|
|
arguments.append(searchtag)
|
|
|
|
placeholder = "'%' || ? || '%'"
|
2015-11-11 06:04:57 -06:00
|
|
|
query = "SELECT id, url, metadata, tags FROM bookmarks WHERE tags LIKE (%s)" % placeholder
|
2015-11-11 05:41:53 -06:00
|
|
|
if searchAll == True: # Match all keywords in URL or Title
|
|
|
|
query += " OR ("
|
|
|
|
for token in keywords:
|
|
|
|
query += "URL LIKE (%s) AND " % (placeholder)
|
|
|
|
arguments.append(token)
|
|
|
|
|
|
|
|
query = query[:-5] + ") OR ("
|
|
|
|
|
|
|
|
for token in keywords:
|
|
|
|
query += "metadata LIKE (%s) AND " % (placeholder)
|
|
|
|
arguments.append(token)
|
|
|
|
|
|
|
|
query = query[:-5] + ")"
|
|
|
|
|
|
|
|
else: # Match any keyword in URL or Title
|
|
|
|
for token in keywords:
|
|
|
|
query += " OR URL LIKE (%s) OR metadata LIKE (%s)" % (placeholder, placeholder)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
if debug:
|
2015-11-11 05:41:53 -06:00
|
|
|
print("\"%s\", (%s)" % (query, arguments))
|
2015-11-05 12:26:02 -06:00
|
|
|
|
2015-11-05 12:33:00 -06:00
|
|
|
count = 0
|
2015-11-06 13:59:57 -06:00
|
|
|
results = []
|
2015-11-08 11:44:35 -06:00
|
|
|
for row in cur.execute(query, arguments):
|
2015-11-11 06:04:57 -06:00
|
|
|
results.append(row[1])
|
2015-11-05 12:33:00 -06:00
|
|
|
count += 1
|
2015-11-11 06:04:57 -06:00
|
|
|
print("\x1B[1m\x1B[93m%d. \x1B[0m\x1B[92m%s\x1B[0m (%d)\n\t%s\n\t\x1B[91m[TAGS]\x1B[0m %s" % (count, row[1], row[0], row[2], row[3][1:-1]))
|
2015-11-05 12:26:02 -06:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
if count == 0:
|
|
|
|
return
|
|
|
|
|
2015-11-07 09:08:15 -06:00
|
|
|
print("")
|
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
while True:
|
2016-03-10 06:33:54 -06:00
|
|
|
try:
|
|
|
|
nav = input("Result number to open: ")
|
|
|
|
except EOFError:
|
|
|
|
return
|
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
if is_int(nav):
|
2015-11-06 14:26:08 -06:00
|
|
|
index = int(nav) - 1
|
2016-03-23 10:57:57 -05:00
|
|
|
if index < 0 or index >= count:
|
2015-11-09 12:32:10 -06:00
|
|
|
print("Index out of bound")
|
2015-11-06 14:26:08 -06:00
|
|
|
continue
|
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
try:
|
2015-11-10 03:11:05 -06:00
|
|
|
openurl = unquote(results[int(nav) - 1])
|
|
|
|
browser_open(openurl)
|
2016-03-23 10:57:57 -05:00
|
|
|
except Exception as e:
|
|
|
|
print("Exception: %s" % e)
|
2015-11-06 13:59:57 -06:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2016-03-20 00:39:07 -05:00
|
|
|
# Move last row to empty position to compact DB
|
|
|
|
def compactDB(conn, cur, index):
|
|
|
|
cur.execute('SELECT MAX(id) from bookmarks')
|
|
|
|
results = cur.fetchall()
|
|
|
|
for row in results:
|
|
|
|
if row[0] > index:
|
|
|
|
cur.execute('SELECT id, URL, metadata, tags FROM bookmarks WHERE id = ?', (row[0],))
|
|
|
|
results = cur.fetchall()
|
|
|
|
for row in results:
|
|
|
|
cur.execute('DELETE FROM bookmarks WHERE id = ?', (row[0],))
|
|
|
|
conn.commit()
|
|
|
|
cur.execute('INSERT INTO bookmarks(id, URL, metadata, tags) VALUES (?, ?, ?, ?)', (index, row[1], row[2], row[3],))
|
|
|
|
conn.commit()
|
|
|
|
print("Index %d moved to %d" % (row[0], index))
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Delete a single record or remove the table
|
2015-11-10 05:20:30 -06:00
|
|
|
def cleardb(conn, cur, index):
|
|
|
|
if index == None: # Remove the table
|
2016-03-19 23:18:13 -05:00
|
|
|
resp = input("ALL bookmarks will be removed. Enter \x1b[1my\x1b[21m to confirm: ")
|
|
|
|
if resp != 'y':
|
|
|
|
print("No bookmarks deleted")
|
|
|
|
return
|
|
|
|
|
2015-11-07 07:29:38 -06:00
|
|
|
cur.execute('DROP TABLE if exists bookmarks')
|
|
|
|
conn.commit()
|
2016-03-19 23:18:13 -05:00
|
|
|
print("All bookmarks deleted")
|
2015-11-07 07:29:38 -06:00
|
|
|
else: # Remove a single entry
|
|
|
|
try:
|
2016-03-20 00:39:07 -05:00
|
|
|
cur.execute('DELETE FROM bookmarks WHERE id = ?', (int(index),))
|
2015-11-07 07:29:38 -06:00
|
|
|
conn.commit()
|
|
|
|
if cur.rowcount == 1:
|
2015-11-11 03:00:23 -06:00
|
|
|
print("Removed index %d" % int(index))
|
2016-03-20 00:39:07 -05:00
|
|
|
compactDB(conn, cur, int(index))
|
2015-11-07 07:29:38 -06:00
|
|
|
else:
|
|
|
|
print("No matching index")
|
|
|
|
except IndexError:
|
2015-11-09 12:32:10 -06:00
|
|
|
print("Index out of bound")
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
# Print all records in the table
|
2015-11-10 05:20:30 -06:00
|
|
|
def printdb(cur, index):
|
2015-11-11 01:28:37 -06:00
|
|
|
global showOpt
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
if index == None: # Show all entries
|
2015-11-09 12:32:10 -06:00
|
|
|
for row in cur.execute('SELECT * FROM bookmarks'):
|
2015-11-11 01:28:37 -06:00
|
|
|
if showOpt == 1:
|
|
|
|
print("%s %s" % (row[0], row[1]))
|
|
|
|
elif showOpt == 2:
|
|
|
|
print("%s %s %s" % (row[0], row[1], row[3][1:-1]))
|
|
|
|
else:
|
|
|
|
print("\x1B[1m\x1B[93m%s. \x1B[0m\x1B[92m%s\x1B[0m\n\t%s\n\t\x1B[91m[TAGS]\x1B[0m %s" % (row[0], row[1], row[2], row[3][1:-1]))
|
2015-11-10 05:20:30 -06:00
|
|
|
else: # Show record at index
|
2015-11-09 12:32:10 -06:00
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
for row in cur.execute("SELECT * FROM bookmarks WHERE id = ?", (int(index),)):
|
2015-11-10 00:55:54 -06:00
|
|
|
print("\x1B[1m\x1B[93m%s. \x1B[0m\x1B[92m%s\x1B[0m\n\t%s\n\t\x1B[91m[TAGS]\x1B[0m %s" % (row[0], row[1], row[2], row[3][1:-1]))
|
2015-11-09 12:32:10 -06:00
|
|
|
return
|
|
|
|
print("No matching index")
|
|
|
|
except IndexError:
|
|
|
|
print("Index out of bound")
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2016-03-19 13:08:18 -05:00
|
|
|
# Show all unique tags ordered alphabetically
|
2016-03-16 13:03:15 -05:00
|
|
|
def showUniqueTags(cur):
|
|
|
|
count = 1
|
|
|
|
Tags = []
|
|
|
|
uniqueTags = []
|
|
|
|
for row in cur.execute('SELECT DISTINCT tags FROM bookmarks'):
|
|
|
|
if row[0] == ',':
|
|
|
|
continue
|
|
|
|
|
|
|
|
Tags.extend(row[0].strip(',').split(','))
|
|
|
|
|
|
|
|
for tag in Tags:
|
|
|
|
if tag not in uniqueTags:
|
|
|
|
uniqueTags.append(tag)
|
|
|
|
|
|
|
|
Tags = sorted(uniqueTags, key=str.lower)
|
|
|
|
for tag in Tags:
|
|
|
|
print("%6d. %s" % (count, tag))
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-03-19 10:55:13 -05:00
|
|
|
# Replace or delete tags in DB
|
|
|
|
def replaceTags(conn, cur, orig, new):
|
2016-03-19 12:06:44 -05:00
|
|
|
update = False
|
|
|
|
delete = False
|
|
|
|
|
2016-03-19 10:55:13 -05:00
|
|
|
orig = ',' + orig + ','
|
|
|
|
new = new.strip(',')
|
|
|
|
if new == '':
|
|
|
|
new = ','
|
2016-03-19 12:06:44 -05:00
|
|
|
delete = True
|
2016-03-19 10:55:13 -05:00
|
|
|
else:
|
|
|
|
new = ',' + new + ','
|
|
|
|
|
2016-03-19 12:19:58 -05:00
|
|
|
if orig == new:
|
|
|
|
print("Tags are same.")
|
|
|
|
return
|
|
|
|
|
2016-03-19 10:55:13 -05:00
|
|
|
cur.execute("SELECT id, tags FROM bookmarks WHERE tags LIKE ?", ('%' + orig + '%',))
|
|
|
|
results = cur.fetchall()
|
|
|
|
|
|
|
|
for row in results:
|
2016-03-19 12:06:44 -05:00
|
|
|
if delete == False:
|
|
|
|
if row[1].find(new) >= 0:
|
|
|
|
new = ','
|
|
|
|
|
2016-03-19 10:55:13 -05:00
|
|
|
newtags = row[1].replace(orig, new)
|
|
|
|
cur.execute("UPDATE bookmarks SET tags = ? WHERE id = ?", (newtags, row[0],))
|
2016-03-19 12:06:44 -05:00
|
|
|
print("Updated index %d" % row[0])
|
|
|
|
update = True
|
2016-03-19 10:55:13 -05:00
|
|
|
|
2016-03-19 12:06:44 -05:00
|
|
|
if update:
|
|
|
|
conn.commit()
|
2016-03-19 10:55:13 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
# Fetch index and open URL in browser
|
|
|
|
def fetchopen(index):
|
|
|
|
try:
|
|
|
|
for row in cur.execute("SELECT URL FROM bookmarks WHERE id = ?", (int(index),)):
|
|
|
|
url = unquote(row[0])
|
|
|
|
browser_open(url)
|
|
|
|
return
|
|
|
|
print("No matching index")
|
|
|
|
except IndexError:
|
|
|
|
print("Index out of bound")
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Check if a value is a digit
|
2015-11-06 13:59:57 -06:00
|
|
|
def is_int(string):
|
|
|
|
try:
|
|
|
|
int(string)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2015-11-10 17:07:14 -06:00
|
|
|
# Fetch titleData from GET response
|
|
|
|
def getTitleData(resp):
|
|
|
|
charset = ''
|
|
|
|
charset = resp.headers.get_content_charset()
|
|
|
|
if charset == None:
|
|
|
|
charset = 'utf-8'
|
|
|
|
if debug:
|
2016-03-04 08:45:49 -06:00
|
|
|
print("charset: %s" % charset)
|
2015-11-10 17:07:14 -06:00
|
|
|
|
|
|
|
parser = BMHTMLParser()
|
|
|
|
try:
|
|
|
|
if charset == 'utf-8':
|
|
|
|
parser.feed(resp.read().decode(charset, "replace"))
|
|
|
|
else:
|
|
|
|
parser.feed(resp.read().decode(charset))
|
|
|
|
except Exception as e:
|
|
|
|
if debug:
|
|
|
|
print("Exception: %s" % e)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Parse HTML page for Title info
|
2015-11-06 16:32:08 -06:00
|
|
|
class BMHTMLParser(HTMLParser.HTMLParser):
|
|
|
|
def __init__(self):
|
|
|
|
HTMLParser.HTMLParser.__init__(self)
|
|
|
|
self.inTitle = False
|
|
|
|
self.data = ""
|
|
|
|
self.lasttag = None
|
|
|
|
|
|
|
|
def handle_starttag(self, tag, attrs):
|
|
|
|
self.inTitle = False
|
|
|
|
if tag == "title":
|
|
|
|
self.inTitle = True
|
|
|
|
self.lasttag = tag
|
|
|
|
|
|
|
|
def handle_endtag(self, tag):
|
2015-11-10 17:07:14 -06:00
|
|
|
global titleData
|
|
|
|
|
2015-11-06 16:32:08 -06:00
|
|
|
if tag == "title":
|
|
|
|
self.inTitle = False
|
2015-11-11 01:29:46 -06:00
|
|
|
if self.data != "":
|
2015-11-10 17:07:14 -06:00
|
|
|
titleData = self.data
|
|
|
|
self.reset() # We have received title data, exit parsing
|
2015-11-06 16:32:08 -06:00
|
|
|
|
|
|
|
def handle_data(self, data):
|
|
|
|
if self.lasttag == "title" and self.inTitle == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
self.data += data
|
2015-11-06 16:32:08 -06:00
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
# Open a URL in browser
|
|
|
|
def browser_open(url):
|
2015-11-10 05:20:30 -06:00
|
|
|
url = url.replace("%22", "\"")
|
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
_stderr = os.dup(2)
|
|
|
|
os.close(2)
|
|
|
|
_stdout = os.dup(1)
|
|
|
|
os.close(1)
|
|
|
|
fd = os.open(os.devnull, os.O_RDWR)
|
|
|
|
os.dup2(fd, 2)
|
|
|
|
os.dup2(fd, 1)
|
|
|
|
try:
|
|
|
|
webbrowser.open(url)
|
|
|
|
except Exception as e:
|
|
|
|
print("Browser Exception: %s" % e)
|
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
os.dup2(_stderr, 2)
|
|
|
|
os.dup2(_stdout, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Get the SHA256 hash of a file
|
|
|
|
def get_filehash(filepath):
|
|
|
|
with open(filepath, 'rb') as f:
|
|
|
|
hasher = hashlib.sha256()
|
|
|
|
buf = f.read(BLOCKSIZE)
|
|
|
|
while len(buf) > 0:
|
|
|
|
hasher.update(buf)
|
|
|
|
buf = f.read(BLOCKSIZE)
|
|
|
|
|
|
|
|
return hasher.digest()
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
# Encrypt the bookmarks database file
|
|
|
|
def encrypt_file():
|
2015-12-19 14:16:24 -06:00
|
|
|
dbpath = os.path.join(os.environ.get('HOME'), '.cache', 'buku', 'bookmarks.db')
|
2016-03-20 03:11:52 -05:00
|
|
|
encpath = dbpath + '.enc'
|
2015-12-19 10:23:29 -06:00
|
|
|
if not os.path.exists(dbpath):
|
2015-12-19 12:49:44 -06:00
|
|
|
print("%s missing. Already encrypted?" % dbpath)
|
2015-12-19 10:23:29 -06:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-03-20 03:11:52 -05:00
|
|
|
# If both encrypted file and flat file exist, error out
|
|
|
|
if os.path.exists(dbpath) and os.path.exists(encpath):
|
|
|
|
print("ERROR: Both encrypted and flat DB files exist!")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
password = ''
|
|
|
|
password = getpass.getpass()
|
|
|
|
passconfirm = getpass.getpass()
|
|
|
|
if password == '':
|
|
|
|
print("Empty password");
|
|
|
|
sys.exit(1)
|
|
|
|
if password != passconfirm:
|
|
|
|
print("Passwords don't match");
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Get SHA256 hash of DB file
|
|
|
|
dbhash = get_filehash(dbpath)
|
|
|
|
|
2015-12-20 13:00:34 -06:00
|
|
|
# Generate randon 256-bit salt and key
|
|
|
|
salt = Random.get_random_bytes(SALT_SIZE)
|
|
|
|
key = (password + salt.decode('utf-8', "replace")).encode('utf-8')
|
2015-12-22 12:10:24 -06:00
|
|
|
for i in range(iterations):
|
2015-12-20 13:00:34 -06:00
|
|
|
key = hashlib.sha256(key).digest()
|
|
|
|
|
|
|
|
iv = Random.get_random_bytes(16)
|
2015-12-19 10:23:29 -06:00
|
|
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
|
|
|
filesize = os.path.getsize(dbpath)
|
|
|
|
|
|
|
|
with open(dbpath, 'rb') as infile:
|
|
|
|
with open(encpath, 'wb') as outfile:
|
|
|
|
outfile.write(struct.pack('<Q', filesize))
|
2015-12-20 13:00:34 -06:00
|
|
|
outfile.write(salt)
|
2015-12-19 10:23:29 -06:00
|
|
|
outfile.write(iv)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Embed DB file hash in encrypted file
|
|
|
|
outfile.write(dbhash)
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
while True:
|
2015-12-20 13:00:34 -06:00
|
|
|
chunk = infile.read(CHUNKSIZE)
|
2015-12-19 10:23:29 -06:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break
|
|
|
|
elif len(chunk) % 16 != 0:
|
|
|
|
chunk += ' ' * (16 - len(chunk) % 16)
|
|
|
|
|
|
|
|
outfile.write(cipher.encrypt(chunk))
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
os.remove(dbpath)
|
2015-12-19 10:23:29 -06:00
|
|
|
print("File encrypted")
|
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
# Decrypt the bookmarks database file
|
|
|
|
def decrypt_file():
|
2015-12-19 14:16:24 -06:00
|
|
|
dbpath = os.path.join(os.environ.get('HOME'), '.cache', 'buku', 'bookmarks.db')
|
2015-12-19 11:15:17 -06:00
|
|
|
encpath = dbpath + '.enc'
|
|
|
|
if not os.path.exists(encpath):
|
|
|
|
print("Error: %s missing" % encpath)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-03-20 03:11:52 -05:00
|
|
|
# If both encrypted file and flat file exist, error out
|
|
|
|
if os.path.exists(dbpath) and os.path.exists(encpath):
|
|
|
|
print("ERROR: Both encrypted and flat DB files exist!")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
password = ''
|
|
|
|
password = getpass.getpass()
|
|
|
|
if password == '':
|
|
|
|
print("Decryption failed");
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
with open(encpath, 'rb') as infile:
|
|
|
|
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
|
2015-12-20 13:00:34 -06:00
|
|
|
|
|
|
|
# Read 256-bit salt and generate key
|
|
|
|
salt = infile.read(32)
|
|
|
|
key = (password + salt.decode('utf-8', "replace")).encode('utf-8')
|
2015-12-22 12:10:24 -06:00
|
|
|
for i in range(iterations):
|
2015-12-20 13:00:34 -06:00
|
|
|
key = hashlib.sha256(key).digest()
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
iv = infile.read(16)
|
|
|
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Get original DB file's SHA256 hash from encrypted file
|
|
|
|
enchash = infile.read(32)
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
with open(dbpath, 'wb') as outfile:
|
|
|
|
while True:
|
2015-12-20 13:00:34 -06:00
|
|
|
chunk = infile.read(CHUNKSIZE)
|
2015-12-19 11:15:17 -06:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break;
|
|
|
|
|
|
|
|
outfile.write(cipher.decrypt(chunk))
|
|
|
|
|
|
|
|
outfile.truncate(origsize)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Match hash of generated file with that of original DB file
|
|
|
|
dbhash = get_filehash(dbpath)
|
|
|
|
if dbhash != enchash:
|
|
|
|
os.remove(dbpath)
|
2016-03-20 02:43:41 -05:00
|
|
|
print("Decryption failed");
|
2015-12-19 13:55:05 -06:00
|
|
|
else:
|
|
|
|
os.remove(encpath)
|
|
|
|
print("File decrypted")
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-03-16 10:10:55 -05:00
|
|
|
# SIGINT handler
|
|
|
|
def sigint_handler(signum, frame):
|
|
|
|
print('\nInterrupted.', file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
signal.signal(signal.SIGINT, sigint_handler)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-01 14:04:41 -06:00
|
|
|
# Main starts here
|
|
|
|
# ----------------
|
2015-11-04 08:11:16 -06:00
|
|
|
optlist = None
|
|
|
|
keywords = None
|
|
|
|
|
|
|
|
if len(sys.argv) < 2:
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Check cmdline options
|
2015-11-04 08:11:16 -06:00
|
|
|
try:
|
2016-03-19 10:55:13 -05:00
|
|
|
optlist, keywords = getopt(sys.argv[1:], "d:i:m:o:p:t:u:x:aDgklPRrsSwz")
|
2015-11-06 13:59:57 -06:00
|
|
|
if len(optlist) < 1:
|
|
|
|
usage()
|
|
|
|
|
2015-11-04 08:11:16 -06:00
|
|
|
for opt in optlist:
|
2015-11-05 09:18:51 -06:00
|
|
|
if opt[0] == "-a":
|
2015-11-08 12:19:12 -06:00
|
|
|
if update == True or delete == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
2015-11-05 21:40:44 -06:00
|
|
|
usage()
|
|
|
|
|
2015-11-05 09:18:51 -06:00
|
|
|
addurl = True
|
2015-11-04 08:11:16 -06:00
|
|
|
elif opt[0] == "-d":
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or update == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
entry = opt[1]
|
|
|
|
if int(entry) <= 0:
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
delete = True
|
2015-11-07 07:29:38 -06:00
|
|
|
elif opt[0] == "-D":
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or update == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
delete = True
|
2016-03-16 13:03:15 -05:00
|
|
|
elif opt[0] == "-g":
|
|
|
|
showTags = True
|
2015-11-07 12:10:21 -06:00
|
|
|
elif opt[0] == "-i":
|
2015-11-10 03:11:05 -06:00
|
|
|
if update == True or delete == True:
|
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
2015-11-07 12:10:21 -06:00
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
addindex = opt[1]
|
|
|
|
if int(addindex) <= 0:
|
|
|
|
usage()
|
2015-11-10 03:11:05 -06:00
|
|
|
|
|
|
|
addurl = True
|
2015-12-19 11:15:17 -06:00
|
|
|
elif opt[0] == "-k":
|
|
|
|
if no_crypto == True:
|
|
|
|
print("Error: PyCrypto missing")
|
|
|
|
sys.exit(0)
|
|
|
|
|
2015-12-22 12:10:24 -06:00
|
|
|
decrypt = True
|
2015-12-19 11:15:17 -06:00
|
|
|
elif opt[0] == "-l":
|
|
|
|
if no_crypto == True:
|
|
|
|
print("Error: PyCrypto missing")
|
|
|
|
sys.exit(0)
|
|
|
|
|
2015-12-22 12:10:24 -06:00
|
|
|
encrypt = True
|
2016-03-03 11:49:43 -06:00
|
|
|
elif opt[0] == "-m":
|
|
|
|
titleManual = opt[1]
|
2015-11-10 05:20:30 -06:00
|
|
|
elif opt[0] == "-o":
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
openurl = opt[1]
|
|
|
|
if int(openurl) <= 0:
|
|
|
|
usage()
|
2015-11-05 09:18:51 -06:00
|
|
|
elif opt[0] == "-p":
|
2015-11-09 12:32:10 -06:00
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
showindex = opt[1]
|
|
|
|
if int(showindex) <= 0:
|
|
|
|
usage()
|
|
|
|
|
|
|
|
show = True
|
|
|
|
elif opt[0] == "-P":
|
2015-11-08 12:19:12 -06:00
|
|
|
show = True
|
2015-11-13 05:12:12 -06:00
|
|
|
elif opt[0] == "-R":
|
|
|
|
if addurl == True or delete == True:
|
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
|
|
|
online = True
|
|
|
|
refresh = True
|
2016-03-19 10:55:13 -05:00
|
|
|
elif opt[0] == "-r":
|
|
|
|
replace = True
|
2015-11-05 12:26:02 -06:00
|
|
|
elif opt[0] == "-s":
|
|
|
|
search = True
|
2015-11-11 05:41:53 -06:00
|
|
|
elif opt[0] == "-S":
|
|
|
|
searchAll = True
|
|
|
|
search = True
|
2015-12-22 12:10:24 -06:00
|
|
|
elif opt[0] == "-t":
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
iterations = int(opt[1])
|
|
|
|
if iterations <= 0:
|
|
|
|
usage()
|
2015-11-05 21:40:44 -06:00
|
|
|
elif opt[0] == "-u":
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or delete == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
2015-11-05 21:40:44 -06:00
|
|
|
usage()
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2015-11-05 21:40:44 -06:00
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
entry = opt[1]
|
|
|
|
if int(entry) <= 0:
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
update = True
|
2015-11-10 03:11:05 -06:00
|
|
|
elif opt[0] == "-w":
|
|
|
|
online = True
|
2015-11-11 01:28:37 -06:00
|
|
|
elif opt[0] == "-x":
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
showOpt = int(opt[1])
|
|
|
|
if showOpt < 1 or showOpt > 2:
|
|
|
|
usage()
|
2015-11-08 12:19:12 -06:00
|
|
|
elif opt[0] == "-z":
|
|
|
|
debug = True
|
2015-11-04 08:11:16 -06:00
|
|
|
except GetoptError as e:
|
2015-12-19 14:16:24 -06:00
|
|
|
print("buku:", e)
|
2015-11-04 08:11:16 -06:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-03-03 11:49:43 -06:00
|
|
|
# Update and full DB refresh are mutually exclusive
|
|
|
|
if update == True and refresh == True:
|
|
|
|
print("You can either update a single index or refresh full DB at once.\n")
|
|
|
|
usage()
|
|
|
|
|
|
|
|
# Online title update conflicts with manual title option
|
|
|
|
if online == True and titleManual != None:
|
|
|
|
print("You can either fetch title from web or add/update title manually.\n")
|
|
|
|
usage()
|
|
|
|
|
2015-12-22 12:10:24 -06:00
|
|
|
# Handle encrypt/decrypt options at top priority
|
|
|
|
if encrypt == True:
|
|
|
|
encrypt_file()
|
|
|
|
|
|
|
|
if decrypt == True:
|
|
|
|
decrypt_file()
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Initilize the database and get handles
|
2015-11-05 09:18:51 -06:00
|
|
|
conn, cur = initdb()
|
|
|
|
|
2016-03-19 10:55:13 -05:00
|
|
|
# Replace a tag in DB
|
|
|
|
if replace == True:
|
|
|
|
numargs = len(keywords)
|
|
|
|
|
|
|
|
if addurl == True or update == True or delete == True:
|
|
|
|
print("Tag replace doesn't work with add or update or delete.\n")
|
|
|
|
conn.close()
|
|
|
|
usage()
|
|
|
|
elif numargs < 1 or numargs > 2:
|
|
|
|
print("Tag replace accepts 1 or 2 arguments\n")
|
|
|
|
conn.close()
|
|
|
|
usage()
|
|
|
|
elif numargs == 1:
|
|
|
|
replaceTags(conn, cur, keywords[0], "")
|
|
|
|
else:
|
|
|
|
replaceTags(conn, cur, keywords[0], keywords[1])
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Call add or update record
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or update == True:
|
2015-11-05 09:18:51 -06:00
|
|
|
if len(keywords) < 1:
|
2015-11-07 12:10:21 -06:00
|
|
|
conn.close()
|
2015-11-05 09:18:51 -06:00
|
|
|
usage()
|
|
|
|
|
2015-11-07 07:29:38 -06:00
|
|
|
AddUpdateEntry(conn, cur, keywords, entry)
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-13 05:12:12 -06:00
|
|
|
# Refresh full DB
|
|
|
|
if refresh == True:
|
|
|
|
cur.execute("SELECT id, url, tags FROM bookmarks")
|
2016-03-19 13:01:45 -05:00
|
|
|
results = cur.fetchall()
|
|
|
|
for row in results:
|
2015-11-13 05:12:12 -06:00
|
|
|
olddata = []
|
|
|
|
olddata.append(row[1])
|
|
|
|
if row[2] != '':
|
|
|
|
olddata.append(row[2][1:]) # Omit the initial ',' already in DB
|
|
|
|
else:
|
|
|
|
olddata.append(row[2])
|
|
|
|
AddUpdateEntry(conn, cur, olddata, row[0])
|
|
|
|
print("")
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Search tags, URLs, Title info
|
2015-11-05 12:26:02 -06:00
|
|
|
if search == True:
|
|
|
|
if len(keywords) < 1:
|
2015-11-07 12:10:21 -06:00
|
|
|
conn.close()
|
2015-11-05 12:26:02 -06:00
|
|
|
usage()
|
|
|
|
|
|
|
|
searchdb(cur, keywords)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Print all records
|
2015-11-08 12:19:12 -06:00
|
|
|
if show == True:
|
2015-11-09 12:32:10 -06:00
|
|
|
printdb(cur, showindex)
|
2015-11-04 21:54:10 -06:00
|
|
|
|
2016-03-16 13:03:15 -05:00
|
|
|
# Show all unique tags
|
|
|
|
if showTags == True:
|
|
|
|
showUniqueTags(cur)
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
# Open URL in browser
|
|
|
|
if openurl != None:
|
|
|
|
fetchopen(openurl)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Remove a single record of all records
|
2015-11-08 12:19:12 -06:00
|
|
|
if delete == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
cleardb(conn, cur, entry)
|
2015-11-05 21:40:44 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Close the connection before exiting
|
2015-11-01 14:04:41 -06:00
|
|
|
conn.close()
|