buku/buku

1459 lines
46 KiB
Plaintext
Raw Normal View History

#!/usr/bin/env python3
#
# Bookmark management utility
#
# Copyright (C) 2015 Arun Prakash Jana <engineerarun@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with buku. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import sqlite3
2016-04-24 14:19:32 -05:00
import argparse
import readline
import webbrowser
import html.parser as HTMLParser
2016-04-04 04:50:23 -05:00
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urljoin, quote, unquote
import gzip
import io
import signal
# Import libraries needed for encryption
try:
import getpass
import hashlib
from Crypto.Cipher import AES
from Crypto import Random
import struct
no_crypto = False
BLOCKSIZE = 65536
SALT_SIZE = 32
CHUNKSIZE = 0x80000 # Read/write 512 KB chunks
except ImportError:
no_crypto = True
# Globals
2016-04-05 23:55:25 -05:00
update = False # Update a bookmark in DB
2016-05-18 22:24:46 -05:00
tagManual = None # Tags for update command
2016-04-05 23:55:25 -05:00
titleManual = None # Manually add a title offline
2016-05-17 15:11:31 -05:00
description = None # Description of the bookmark
2016-05-18 22:24:46 -05:00
tagsearch = False # Search bookmarks by tag
titleData = None # Title fetched from a page
2016-04-17 11:07:26 -05:00
jsonOutput = False # Output json formatted result
2016-04-25 11:23:03 -05:00
showOpt = 0 # Modify show. 1: show only URL, 2: show URL and tag
2016-04-24 16:18:56 -05:00
debug = False # Enable debug logs
2016-04-05 23:55:25 -05:00
pipeargs = [] # Holds arguments piped to the program
2016-05-22 12:50:19 -05:00
_VERSION_ = '2.1' # Program version
2016-04-05 23:55:25 -05:00
2016-04-05 06:25:40 -05:00
class BMHTMLParser(HTMLParser.HTMLParser):
2016-04-05 23:39:56 -05:00
"""Class to parse and fetch the title from a HTML page, if available"""
2016-04-05 06:25:40 -05:00
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
self.inTitle = False
self.data = ""
self.lasttag = None
def handle_starttag(self, tag, attrs):
self.inTitle = False
if tag == "title":
self.inTitle = True
self.lasttag = tag
def handle_endtag(self, tag):
global titleData
if tag == "title":
self.inTitle = False
if self.data != "":
titleData = self.data
self.reset() # We have received title data, exit parsing
2016-04-05 06:25:40 -05:00
def handle_data(self, data):
if self.lasttag == "title" and self.inTitle == True:
self.data += data
def error(self, message):
pass
class BukuDb:
2016-04-05 06:25:40 -05:00
def __init__(self, *args, **kwargs):
conn, cur = BukuDb.initdb()
self.conn = conn
self.cur = cur
2016-04-10 07:28:49 -05:00
@staticmethod
def get_dbfile_path():
"""Determine the DB file path:
if $XDG_DATA_HOME is defined, use it
else if $HOME exists, use it
else use the current directory
"""
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
data_home = '.'
else:
data_home = os.path.join(os.environ.get('HOME'), '.local', 'share')
return os.path.join(data_home, 'buku')
@staticmethod
def move_legacy_dbfile():
"""Move database file from earlier path used in versions <= 1.8
to new path. Errors out if both the old and new DB files exist.
"""
olddbpath = os.path.join(os.environ.get('HOME'), '.cache', 'buku')
olddbfile = os.path.join(olddbpath, 'bookmarks.db')
if not os.path.exists(olddbfile):
return
newdbpath = BukuDb.get_dbfile_path()
newdbfile = os.path.join(newdbpath, 'bookmarks.db')
if os.path.exists(newdbfile):
print("Both old (%s) and new (%s) databases exist, need manual action" % (olddbfile, newdbfile))
sys.exit(1)
if not os.path.exists(newdbpath):
os.makedirs(newdbpath)
os.rename(olddbfile, newdbfile)
print("Database was moved from old (%s) to new (%s) location.\n" % (olddbfile, newdbfile))
os.rmdir(olddbpath)
@staticmethod
def initdb():
"""Initialize the database connection. Create DB file and/or bookmarks table
if they don't exist. Alert on encryption options on first execution.
Returns: connection, cursor
"""
dbpath = BukuDb.get_dbfile_path()
if not os.path.exists(dbpath):
os.makedirs(dbpath)
dbfile = os.path.join(dbpath, 'bookmarks.db')
encpath = os.path.join(dbpath, 'bookmarks.db.enc')
# Notify if DB file needs to be decrypted first
if os.path.exists(encpath) and not os.path.exists(dbfile):
print("Unlock database first")
sys.exit(1)
# Show info on first creation
if no_crypto == False and not os.path.exists(dbfile):
print("DB file is being created. You may want to encrypt it later.")
try:
# Create a connection
conn = sqlite3.connect(dbfile)
cur = conn.cursor()
# Create table if it doesn't exist
cur.execute('''CREATE TABLE if not exists bookmarks \
(id integer PRIMARY KEY, URL text NOT NULL UNIQUE, metadata text default \'\', tags text default \',\', desc text default \'\')''')
conn.commit()
except Exception as e:
print("\x1b[1mEXCEPTION\x1b[21m [initdb]: (%s) %s" % (type(e).__name__, e))
sys.exit(1)
# Add description column in existing DB (from version 2.1)
try:
cur.execute("""ALTER TABLE bookmarks ADD COLUMN desc text default \'\'""")
conn.commit()
except:
pass
return (conn, cur)
def get_bookmark_index(self, url):
"""Check if URL already exists in DB
Params: URL to search
Returns: DB index if URL found, else -1
"""
self.cur.execute("SELECT id FROM bookmarks WHERE URL = ?", (url,))
resultset = self.cur.fetchall()
if len(resultset) == 0:
return -1
return resultset[0][0]
def add_bookmark(self, url, tag_manual=None, title_manual=None, desc=None):
2016-05-22 14:20:50 -05:00
"""Add a new bookmark
:param url: url to bookmark
:param tag_manual: string of comma-separated tags to add manually
2016-05-22 14:33:24 -05:00
:param title_manual: string title to add manually
:param desc: string description
"""
# Ensure that the URL does not exist in DB already
id = self.get_bookmark_index(url)
if id != -1:
print("URL already exists at index %d" % id)
return
# Process title
2016-05-22 14:33:24 -05:00
if title_manual is not None:
meta = title_manual
else:
meta = network_handler(url)
if meta == '':
print("\x1B[91mTitle: []\x1B[0m")
else:
print("Title: [%s]" % meta)
# Process tags
if tag_manual is None:
tag_manual = ','
# Process description
2016-05-22 14:33:24 -05:00
if desc is None:
desc = ''
try:
self.cur.execute('INSERT INTO bookmarks(URL, metadata, tags, desc) VALUES (?, ?, ?, ?)', (url, meta, tag_manual, desc))
self.conn.commit()
print("Added at index %d\n" % self.cur.lastrowid)
self.printdb(self.cur.lastrowid)
2016-05-22 14:20:50 -05:00
except Exception as e:
print("\x1b[1mEXCEPTION\x1b[21m [add_bookmark]: (%s) %s" % (type(e).__name__, e))
def update_bookmark(self, index, url='', tag_manual=None, title_manual=None, desc=None):
""" Update an existing record at index
:param index: int position to update
:param url: address
:param tag_manual: string of comma-separated tags to add manually
:param title_manual: string title to add manually
:param desc: string description
:return:
"""
arguments = []
query = "UPDATE bookmarks SET"
to_update = False
# Update URL if passed as argument
if url != '':
query += " URL = ?,"
arguments.append(url)
to_update = True
# Update tags if passed as argument
if tag_manual is not None:
query += " tags = ?,"
arguments.append(tag_manual)
to_update = True
# Update description if passed as an argument
if desc is not None:
query += " desc = ?,"
arguments.append(desc)
to_update = True
# Update title
#
# 1. if -t has no arguments, delete existing title
# 2. if -t has arguments, update existing title
# 3. if -t option is omitted at cmdline:
# if URL is passed, update the title from web using the URL
# 4. if no other argument (url, tag, comment) passed update title from web using DB URL
meta = None
if title_manual is not None:
meta = title_manual
elif url != '':
meta = network_handler(url)
if meta == '':
print("\x1B[91mTitle: []\x1B[0m")
else:
print("Title: [%s]" % meta)
elif not to_update:
self.refreshdb(index)
self.printdb(index)
if meta is not None:
query += " metadata = ?,"
arguments.append(meta)
to_update = True
if not to_update: # Nothing to update
return
query = query[:-1] + " WHERE id = ?"
arguments.append(index)
if debug:
print("query: [%s], args: [%s]" % (query, arguments))
try:
self.cur.execute(query, arguments)
self.conn.commit()
if self.cur.rowcount == 1:
print("Updated index %d\n" % index)
self.printdb(index)
else:
print("No matching index")
except sqlite3.IntegrityError:
print("URL already exists")
def refreshdb(self, index):
"""Refresh ALL records in the database. Fetch title for each
bookmark from the web and update the records. Doesn't udpate
the record if title is empty.
This API doesn't change DB index, URL or tags of a bookmark.
Params: index of record to update, or 0 for all records
"""
global titleManual
if index == 0:
self.cur.execute("SELECT id, url FROM bookmarks ORDER BY id ASC")
else:
self.cur.execute("SELECT id, url FROM bookmarks WHERE id = ?", (index,))
resultset = self.cur.fetchall()
if titleManual is None:
for row in resultset:
title = network_handler(row[1])
if title == '':
print("\x1B[91mTitle: []")
print("\x1b[1mNOT updating index %d\x1b[21m\x1B[0m\n" % row[0])
continue
else:
print("Title: [%s]" % title)
self.cur.execute("UPDATE bookmarks SET metadata = ? WHERE id = ?", (title, row[0],))
self.conn.commit()
print("Updated index %d\n" % row[0])
else:
title = titleManual
for row in resultset:
self.cur.execute("UPDATE bookmarks SET metadata = ? WHERE id = ?", (title, row[0],))
self.conn.commit()
print("Updated index %d\n" % row[0])
def searchdb(self, keywords, all_keywords=False):
"""Search the database for an entries with tags or URL
or title info matching keywords and list those.
Params: keywords to search, search any or all keywords
"""
global jsonOutput
arguments = []
placeholder = "'%' || ? || '%'"
query = "SELECT id, url, metadata, tags, desc FROM bookmarks WHERE"
if all_keywords == True: # Match all keywords in URL or Title
for token in keywords:
query += " (tags LIKE (%s) OR URL LIKE (%s) OR metadata LIKE (%s) OR desc LIKE (%s)) AND" % (placeholder, placeholder, placeholder, placeholder)
arguments.append(token)
arguments.append(token)
arguments.append(token)
arguments.append(token)
query = query[:-4]
else: # Match any keyword in URL or Title
for token in keywords:
query += " tags LIKE (%s) OR URL LIKE (%s) OR metadata LIKE (%s) OR desc LIKE (%s) OR" % (placeholder, placeholder, placeholder, placeholder)
arguments.append(token)
arguments.append(token)
arguments.append(token)
arguments.append(token)
query = query[:-3]
if debug:
print("\"%s\", (%s)" % (query, arguments))
self.cur.execute(query, arguments)
results = self.cur.fetchall()
if len(results) == 0:
return
if jsonOutput == False:
prompt(results)
else:
print(format_json(results))
def search_by_tag(self, tag):
"""Search and list bookmarks with a tag
Params: tag to search
"""
global jsonOutput
self.cur.execute("SELECT id, url, metadata, tags, desc FROM bookmarks WHERE tags LIKE '%' || ? || '%'", (tag,))
results = self.cur.fetchall()
if len(results) == 0:
return
if jsonOutput == False:
prompt(results)
else:
print(format_json(results))
def compactdb(self, index):
"""When an entry at index is deleted, move the last
entry in DB to index, if index is lesser.
Params: index of deleted entry
"""
self.cur.execute('SELECT MAX(id) from bookmarks')
results = self.cur.fetchall()
if len(results) == 1 and results[0][0] is None: # Return if the last index was just deleted
return
for row in results:
if row[0] > index:
self.cur.execute('SELECT id, URL, metadata, tags, desc FROM bookmarks WHERE id = ?', (row[0],))
results = self.cur.fetchall()
for row in results:
self.cur.execute('DELETE FROM bookmarks WHERE id = ?', (row[0],))
self.conn.commit()
self.cur.execute('INSERT INTO bookmarks(id, URL, metadata, tags, desc) VALUES (?, ?, ?, ?, ?)', (index, row[1], row[2], row[3], row[4],))
self.conn.commit()
print("Index %d moved to %d" % (row[0], index))
def cleardb(self, index):
"""Delete a single record or remove the table if index is None
Params: index to delete
"""
if index == 0: # Remove the table
resp = input("ALL bookmarks will be removed. Enter \x1b[1my\x1b[21m to confirm: ")
if resp != 'y':
print("No bookmarks deleted")
return
self.cur.execute('DROP TABLE if exists bookmarks')
self.conn.commit()
print("All bookmarks deleted")
else: # Remove a single entry
try:
self.cur.execute('DELETE FROM bookmarks WHERE id = ?', (index,))
self.conn.commit()
if self.cur.rowcount == 1:
print("Removed index %d" % index)
self.compactdb(index)
else:
print("No matching index")
except IndexError:
print("Index out of bound")
def printdb(self, index, empty=False):
"""Print bookmark details at index or all bookmarks if index is None
Print only bookmarks with blank title or tag if empty is True
Note: URL is printed on top because title may be blank
Params: index to print, flag to show only bookmarks with no title or tags
"""
global showOpt
global jsonOutput
resultset = None
if index == 0: # Show all entries
if empty == False:
self.cur.execute('SELECT * FROM bookmarks')
resultset = self.cur.fetchall()
else:
self.cur.execute("SELECT * FROM bookmarks WHERE metadata = '' OR tags = ','")
resultset = self.cur.fetchall()
print("\x1b[1m%d records found\x1b[21m\n" % len(resultset))
if jsonOutput == False:
if showOpt == 0:
for row in resultset:
print_record(row)
elif showOpt == 1:
for row in resultset:
print("%s %s" % (row[0], row[1]))
elif showOpt == 2:
for row in resultset:
print("%s %s %s" % (row[0], row[1], row[3][1:-1]))
else:
print(format_json(resultset))
else: # Show record at index
try:
resultset = self.cur.execute("SELECT * FROM bookmarks WHERE id = ?", (index,))
except IndexError:
print("Index out of bound")
return
if jsonOutput == False:
for row in resultset:
print_record(row)
return
print("No matching index")
else:
print(format_json(resultset, True))
def list_tags(self):
"""Print all unique tags ordered alphabetically
"""
count = 1
Tags = []
uniqueTags = []
for row in self.cur.execute('SELECT DISTINCT tags FROM bookmarks'):
if row[0] == ',':
continue
Tags.extend(row[0].strip(',').split(','))
for tag in Tags:
if tag not in uniqueTags:
uniqueTags.append(tag)
Tags = sorted(uniqueTags, key=str.lower)
for tag in Tags:
print("%6d. %s" % (count, tag))
count += 1
def replace_tag(self, orig, new=None):
"""Replace orig tags with new tags in DB for all records.
Remove orig tag is new tag is empty.
Params: original and new tags
"""
update = False
delete = False
newtags = ','
orig = ',' + orig + ','
if new is None:
delete = True
else:
for tag in new:
if tag[-1] == ',':
tag = tag.strip(',') + ',' # if delimiter is present, maintain it
else:
tag = tag.strip(',') # a token in a multi-word tag
if tag == ',':
continue
if newtags[-1] == ',':
newtags += tag
else:
newtags += ' ' + tag
if newtags[-1] != ',':
newtags += ','
if newtags == ',':
delete = True
if orig == newtags:
print("Tags are same.")
return
self.cur.execute("SELECT id, tags FROM bookmarks WHERE tags LIKE ?", ('%' + orig + '%',))
results = self.cur.fetchall()
for row in results:
if delete == False:
# Check if tag newtags is already added
if row[1].find(newtags) >= 0:
newtags = ','
newtags = row[1].replace(orig, newtags)
self.cur.execute("UPDATE bookmarks SET tags = ? WHERE id = ?", (newtags, row[0],))
print("Updated index %d" % row[0])
update = True
if update:
self.conn.commit()
def browse_by_index(self, index):
"""Open URL at index in browser
Params: index
"""
try:
for row in self.cur.execute("SELECT URL FROM bookmarks WHERE id = ?", (index,)):
url = unquote(row[0])
browser_open(url)
return
print("No matching index")
except IndexError:
print("Index out of bound")
def close_quit(self, exitval=0):
"""Close a DB connection and exit"""
2016-05-17 15:11:31 -05:00
if self.conn is not None:
try:
self.cur.close()
self.conn.close()
except: # we don't really care about errors, we're closing down anyway
pass
sys.exit(exitval)
def connect_server(url, fullurl=False, forced=False):
2016-04-05 06:25:40 -05:00
"""Connect to a server and fetch the requested page data.
Supports gzip compression.
2016-04-10 07:41:00 -05:00
If forced is True, for URLs like http://www.domain.com
or http://www.domain.com/ path is www.domain.com or
www.domain.com/ correspondingly.
If fullurl is False, for URLs like http://www.domain.com/,
path is /, else www.domain.com/.
Params: URL to fetch, use complete url as path, force flag
2016-04-05 06:25:40 -05:00
Returns: connection, HTTP(S) GET response
"""
if url.find("%20") != -1:
2016-04-19 12:02:15 -05:00
url = unquote(url).replace(" ", "%20")
else:
url = unquote(url)
if debug:
print("unquoted: %s" % url)
if url.find("https://") >= 0: # Secure connection
server = url[8:]
marker = server.find("/")
if marker > 0:
if fullurl == False and forced == False:
url = server[marker:]
server = server[:marker]
elif forced == False: # Handle domain name without trailing /
url = '/'
urlconn = HTTPSConnection(server, timeout=30)
elif url.find("http://") >= 0: # Insecure connection
server = url[7:]
marker = server.find("/")
if marker > 0:
if fullurl == False and forced == False:
url = server[marker:]
server = server[:marker]
elif forced == False:
url = '/'
urlconn = HTTPConnection(server, timeout=30)
else:
printmsg("Not a valid HTTP(S) url", "WARNING")
2016-05-01 12:49:02 -05:00
if url.find(":") == -1:
printmsg("Doesn't appear to be a valid url either", "WARNING")
return (None, None)
if debug:
2016-04-14 09:04:02 -05:00
print("server [%s] url [%s]" % (server, url))
# Handle URLs passed with %xx escape
try:
url.encode('ascii')
except:
url = quote(url)
urlconn.request("GET", url, None, {
"Accept-encoding": "gzip",
})
return (urlconn, urlconn.getresponse())
def get_page_title(resp):
2016-04-05 06:25:40 -05:00
"""Invoke HTML parser and extract title from HTTP response
2016-04-10 07:41:00 -05:00
Params: GET response and invoke HTML parser
2016-04-05 06:25:40 -05:00
"""
data = None
2016-04-05 06:25:40 -05:00
charset = resp.headers.get_content_charset()
if resp.headers.get('Content-Encoding') == 'gzip':
if debug:
print("gzip response")
data = gzip.GzipFile(fileobj=io.BytesIO(resp.read())).read()
else:
data = resp.read()
2016-04-05 06:25:40 -05:00
if charset == None:
charset = 'utf-8'
2016-04-14 09:04:02 -05:00
if debug:
printmsg("Charset missing in response", "WARNING")
2016-04-05 06:25:40 -05:00
if debug:
print("charset: %s" % charset)
parser = BMHTMLParser()
try:
if charset == 'utf-8':
parser.feed(data.decode(charset, "replace"))
2016-04-05 06:25:40 -05:00
else:
parser.feed(data.decode(charset))
2016-04-05 06:25:40 -05:00
except Exception as e:
if debug and str(e) != "we should not get here!":
# Suppress Exception due to intentional self.reset() in HTMLParser
print("\x1b[1mEXCEPTION\x1b[21m [get_page_title]: (%s) %s" % (type(e).__name__, e))
2016-04-05 06:25:40 -05:00
def network_handler(url):
2016-04-05 06:25:40 -05:00
"""Handle server connection and redirections
2016-04-10 07:41:00 -05:00
2016-04-05 06:25:40 -05:00
Params: URL to fetch
Returns: page title or empty string, if not found
"""
global titleData
titleData = None
urlconn = None
retry = False
try:
urlconn, resp = connect_server(url, False)
while 1:
if resp is None:
break
elif resp.status == 200:
get_page_title(resp)
break
elif resp.status in [301, 302]:
redirurl = urljoin(url, resp.getheader('location', ''))
printmsg(redirurl, "REDIRECTION")
retry = False # Reset retry, start fresh on redirection
2016-04-10 08:01:11 -05:00
if redirurl.find("sorry/IndexRedirect?") >= 0: # gracefully handle Google blocks
printmsg("Connection blocked due to unusual activity", "ERROR")
break
marker = redirurl.find("redirectUrl=")
if marker != -1:
redirurl = redirurl[marker + 12:]
# break same URL redirection loop
if url == redirurl:
printmsg("Detected repeated redirection to same URL", "ERROR")
break
url = redirurl
urlconn.close()
2016-04-09 09:10:30 -05:00
# Try with complete URL on redirection
urlconn, resp = connect_server(url, True)
elif resp.status == 403 and retry == False:
"""Handle URLs of the form https://www.domain.com or
https://www.domain.com/ which fails when trying to fetch
resource '/', retry with full path.
"""
urlconn.close()
if debug:
print("Received status 403: retrying.")
# Remove trailing /
if url[-1] == '/':
url = url[:-1]
urlconn, resp = connect_server(url, False, True)
retry = True
elif resp.status == 500 and retry == False:
"""Retry on status 500 (Internal Server Error) with truncated
URL. Some servers support truncated request URL on redirection.
"""
urlconn.close()
if debug:
print("Received status 500: retrying.")
urlconn, resp = connect_server(url, False)
retry = True
else:
printmsg(("[" + str(resp.status) + "] " + resp.reason), "ERROR")
break
except Exception as e:
print("\x1b[1mEXCEPTION\x1b[21m [network_handler]: (%s) %s" % (type(e).__name__, e))
finally:
if urlconn is not None:
urlconn.close()
if titleData is None:
return ''
return titleData.strip().replace("\n","")
def parse_tags(keywords=[]):
2016-05-18 22:24:46 -05:00
"""Format and get tag string from tokens"""
# TODO: Simplify this logic
tags = ','
origTags = []
uniqueTags = []
2016-05-18 22:24:46 -05:00
# Cleanse and get the tags
for tag in keywords:
if tag[0] == ',': # delimiter precedes token (e.g. token1 ,token2)
if tags[-1] != ',':
tags += ','
if tag[-1] == ',': # if delimiter is present, maintain it (e.g. token1, token2)
tag = tag.strip(',') + ','
else: # a token in a multi-word tag (e.g. token1 token2)
tag = tag.strip(',')
2016-05-18 22:24:46 -05:00
if tag == ',': # isolated delimiter (e.g. token1 , token2)
2016-05-18 22:24:46 -05:00
if tags[-1] != ',':
tags += tag
continue
if tags[-1] == ',':
tags += tag
else:
tags += ' ' + tag
if tags[-1] != ',':
tags += ','
origTags.extend(tags.strip(',').split(','))
for tag in origTags:
if tag not in uniqueTags:
uniqueTags.append(tag) # Select unique tags
# Sort the tags
sortedTags = sorted(uniqueTags, key=str.lower)
# Wrap with delimiter
return ',' + ','.join(sortedTags) + ','
2016-05-18 22:24:46 -05:00
def prompt(results):
"""Show each matching result from a search and prompt"""
2016-05-18 10:46:08 -05:00
count = 0
for row in results:
count += 1
print_record(row, count)
2016-05-18 10:46:08 -05:00
while True:
try:
nav = input("Result number to open: ")
except EOFError:
return
if is_int(nav):
index = int(nav) - 1
if index < 0 or index >= count:
print("Index out of bound")
continue
try:
browser_open(unquote(results[index][1]))
except Exception as e:
print("\x1b[1mEXCEPTION\x1b[21m [searchdb]: (%s) %s" % (type(e).__name__, e))
else:
break
def print_record(row, count=0):
2016-05-17 15:11:31 -05:00
"""Print a single DB record
Handles differently for search and print (count = 0)
"""
# Print index and URL
if count != 0:
print("\x1B[1m\x1B[93m%d. \x1B[0m\x1B[92m%s\x1B[0m\t[%d]" % (count, row[1], row[0]))
else:
print("\x1B[1m\x1B[93m%d. \x1B[0m\x1B[92m%s\x1B[0m" % (row[0], row[1]))
# Print title
if row[2] != '':
2016-05-17 17:35:01 -05:00
print(" \x1B[91m>\x1B[0m %s" % row[2])
2016-05-17 15:11:31 -05:00
# Print description
if row[4] != '':
2016-05-17 17:35:01 -05:00
print(" \x1B[91m+\x1B[0m %s" % row[4])
2016-05-17 15:11:31 -05:00
# Print tags
if row[3] != ',':
2016-05-17 17:35:01 -05:00
print(" \x1B[91m#\x1B[0m %s" % row[3][1:-1])
2016-05-17 15:11:31 -05:00
print("")
def format_json(resultset, single=False):
2016-05-16 09:39:01 -05:00
"""Return results in Json format"""
2016-03-22 18:29:45 -05:00
global showOpt
if single == False:
marks = []
for row in resultset:
if showOpt == 1:
record = { 'url': row[1] }
elif showOpt == 2:
record = { 'url': row[1], 'tags': row[3][1:-1] }
else:
2016-05-18 14:15:57 -05:00
record = { 'url': row[1], 'title': row[2], 'comment': row[4], 'tags': row[3][1:-1]}
2016-03-22 18:29:45 -05:00
marks.append(record)
else:
marks = {}
for row in resultset:
if showOpt == 1:
marks['url'] = row[1]
elif showOpt == 2:
marks['title'] = row[2]
marks['tags'] = row[3][1:-1]
else:
marks['url'] = row[1]
marks['title'] = row[2]
2016-05-18 14:15:57 -05:00
marks['comment'] = row[4]
2016-03-22 18:29:45 -05:00
marks['tags'] = row[3][1:-1]
2016-03-22 18:29:45 -05:00
return json.dumps(marks, sort_keys=True, indent=4)
2016-05-16 09:39:01 -05:00
def is_int(string):
2016-04-05 06:25:40 -05:00
"""Check if a string is a digit
2016-04-10 07:41:00 -05:00
2016-04-05 06:25:40 -05:00
Params: string
"""
try:
int(string)
return True
except:
return False
def browser_open(url):
"""Duplicate stdin, stdout (to suppress showing errors
on the terminal) and open URL in default browser
Params: url to open
"""
url = url.replace("%22", "\"")
_stderr = os.dup(2)
os.close(2)
_stdout = os.dup(1)
os.close(1)
fd = os.open(os.devnull, os.O_RDWR)
os.dup2(fd, 2)
os.dup2(fd, 1)
try:
webbrowser.open(url)
except Exception as e:
2016-04-09 12:39:00 -05:00
print("\x1b[1mEXCEPTION\x1b[21m [browser_open]: (%s) %s" % (type(e).__name__, e))
finally:
os.close(fd)
os.dup2(_stderr, 2)
os.dup2(_stdout, 1)
def get_filehash(filepath):
"""Get the SHA256 hash of a file
Params: path to the file
"""
with open(filepath, 'rb') as f:
hasher = hashlib.sha256()
buf = f.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(BLOCKSIZE)
return hasher.digest()
def encrypt_file(iterations):
"""Encrypt the bookmarks database file"""
dbpath = os.path.join(BukuDb.get_dbfile_path(), 'bookmarks.db')
encpath = dbpath + '.enc'
if not os.path.exists(dbpath):
print("%s missing. Already encrypted?" % dbpath)
sys.exit(1)
# If both encrypted file and flat file exist, error out
if os.path.exists(dbpath) and os.path.exists(encpath):
printmsg("Both encrypted and flat DB files exist!", "ERROR")
sys.exit(1)
password = ''
password = getpass.getpass()
passconfirm = getpass.getpass()
if password == '':
print("Empty password");
sys.exit(1)
if password != passconfirm:
print("Passwords don't match");
sys.exit(1)
# Get SHA256 hash of DB file
dbhash = get_filehash(dbpath)
2016-04-10 08:01:11 -05:00
# Generate random 256-bit salt and key
2016-04-10 07:41:00 -05:00
salt = Random.get_random_bytes(SALT_SIZE)
key = (password + salt.decode('utf-8', "replace")).encode('utf-8')
for i in range(iterations):
key = hashlib.sha256(key).digest()
iv = Random.get_random_bytes(16)
cipher = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(dbpath)
with open(dbpath, 'rb') as infile:
with open(encpath, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(salt)
outfile.write(iv)
# Embed DB file hash in encrypted file
outfile.write(dbhash)
while True:
chunk = infile.read(CHUNKSIZE)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outfile.write(cipher.encrypt(chunk))
os.remove(dbpath)
print("File encrypted")
sys.exit(0)
def decrypt_file(iterations):
"""Decrypt the bookmarks database file"""
dbpath = os.path.join(BukuDb.get_dbfile_path(), 'bookmarks.db')
encpath = dbpath + '.enc'
if not os.path.exists(encpath):
printmsg((encpath + " missing"), "ERROR")
sys.exit(1)
# If both encrypted file and flat file exist, error out
if os.path.exists(dbpath) and os.path.exists(encpath):
printmsg("Both encrypted and flat DB files exist!", "ERROR")
sys.exit(1)
password = ''
password = getpass.getpass()
if password == '':
2016-04-24 17:05:46 -05:00
printmsg("Decryption failed", "ERROR");
sys.exit(1)
with open(encpath, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
# Read 256-bit salt and generate key
salt = infile.read(32)
key = (password + salt.decode('utf-8', "replace")).encode('utf-8')
for i in range(iterations):
key = hashlib.sha256(key).digest()
iv = infile.read(16)
cipher = AES.new(key, AES.MODE_CBC, iv)
# Get original DB file's SHA256 hash from encrypted file
enchash = infile.read(32)
with open(dbpath, 'wb') as outfile:
while True:
chunk = infile.read(CHUNKSIZE)
if len(chunk) == 0:
break;
outfile.write(cipher.decrypt(chunk))
outfile.truncate(origsize)
# Match hash of generated file with that of original DB file
dbhash = get_filehash(dbpath)
if dbhash != enchash:
os.remove(dbpath)
2016-04-24 17:05:46 -05:00
printmsg("Decryption failed", "ERROR");
2016-04-24 16:18:56 -05:00
sys.exit(1)
else:
os.remove(encpath)
print("File decrypted")
def sigint_handler(signum, frame):
"""Custom SIGINT handler"""
print('\nInterrupted.', file=sys.stderr)
sys.exit(1)
signal.signal(signal.SIGINT, sigint_handler)
def printmsg(msg, level=None):
"""Print a message in 2 parts, with the level in bold
Params: msg, level
"""
if level is not None:
print("\x1b[1m%s: \x1b[21m%s\x1B[0m" % (level, msg))
else:
print("%s" % msg)
class CustomUpdateAction(argparse.Action):
2016-05-18 12:23:08 -05:00
"""Class to capture if optional param 'update'
is actually used, even if sans arguments
"""
2016-04-25 11:23:03 -05:00
def __call__(self, parser, args, values, option_string=None):
global update
update = True
# NOTE: the following converts a None argument to an empty array []
setattr(args, self.dest, values)
2016-04-26 12:23:48 -05:00
class CustomTagAction(argparse.Action):
2016-05-18 22:24:46 -05:00
"""Class to capture if optional param 'tag'
2016-05-18 12:23:08 -05:00
is actually used, even if sans arguments
"""
def __call__(self, parser, args, values, option_string=None):
2016-05-18 22:24:46 -05:00
global tagManual
2016-05-18 12:23:08 -05:00
2016-05-18 22:24:46 -05:00
tagManual = [',',]
2016-05-18 12:23:08 -05:00
setattr(args, self.dest, values)
class CustomTitleAction(argparse.Action):
2016-05-18 12:23:08 -05:00
"""Class to capture if optional param 'title'
is actually used, even if sans arguments
"""
def __call__(self, parser, args, values, option_string=None):
global titleManual
titleManual = ''
setattr(args, self.dest, values)
class CustomDescAction(argparse.Action):
2016-05-18 12:23:08 -05:00
"""Class to capture if optional param 'comment'
2016-05-17 15:11:31 -05:00
is actually used, even if sans arguments
"""
def __call__(self, parser, args, values, option_string=None):
global description
description = ''
setattr(args, self.dest, values)
class CustomTagSearchAction(argparse.Action):
2016-05-18 22:24:46 -05:00
"""Class to capture if optional param 'stag'
is actually used, even if sans arguments
"""
def __call__(self, parser, args, values, option_string=None):
global tagsearch
tagsearch = True
setattr(args, self.dest, values)
2016-04-24 14:19:32 -05:00
class ExtendedArgumentParser(argparse.ArgumentParser):
"""Extend classic argument parser"""
2016-04-24 14:19:32 -05:00
2016-04-25 15:45:10 -05:00
# Print additional help and info
@staticmethod
def print_extended_help(file=None):
2016-05-14 10:33:21 -05:00
file.write('''
prompt keys:
1-N open the Nth search result in web browser
2016-05-17 10:57:12 -05:00
Enter exit buku
2016-05-14 10:33:21 -05:00
2016-05-22 12:50:19 -05:00
Version %s
2016-05-14 10:33:21 -05:00
Copyright (C) 2015-2016 Arun Prakash Jana <engineerarun@gmail.com>
License: GPLv3
Webpage: https://github.com/jarun/buku
''' % _VERSION_)
2016-04-25 15:45:10 -05:00
# Help
2016-04-24 14:19:32 -05:00
def print_help(self, file=None):
super(ExtendedArgumentParser, self).print_help(file)
2016-04-25 15:45:10 -05:00
self.print_extended_help(file)
2016-04-05 23:55:25 -05:00
"""main starts here"""
2016-04-26 12:23:48 -05:00
# Handle piped input
def main(argv = sys.argv):
if not sys.stdin.isatty():
pipeargs.extend(sys.argv)
for s in sys.stdin.readlines():
pipeargs.extend(s.split())
if __name__ == "__main__":
try:
main(sys.argv)
except KeyboardInterrupt:
pass
2016-04-26 12:23:48 -05:00
# If piped input, set argument vector
if len(pipeargs) > 0:
sys.argv = pipeargs
# Setup custom argument parser
2016-04-24 14:19:32 -05:00
argparser = ExtendedArgumentParser(
2016-05-12 03:58:31 -05:00
description='A private command-line bookmark manager. Your mini web!',
2016-04-26 14:22:59 -05:00
formatter_class=argparse.RawTextHelpFormatter,
2016-05-18 22:24:46 -05:00
usage='''buku [-a URL [tags ...]] [-u [N]] [-d [N]]
[--url keyword] [--tag [...]] [-t [...]] [-c [...]]
2016-05-18 12:23:08 -05:00
[-s keyword [...]] [-S keyword [...]] [--st [...]]
2016-04-26 14:22:59 -05:00
[-k [N]] [-l [N]] [-p [N]] [-f N]
2016-05-18 22:24:46 -05:00
[-r oldtag [newtag ...]] [-j] [-o N] [-z] [-h]''',
2016-04-26 14:22:59 -05:00
add_help=False
2016-04-24 14:19:32 -05:00
)
2016-04-26 16:13:27 -05:00
# General options
general_group = argparser.add_argument_group(title="general options",
description='''-a, --add URL [tags ...]
2016-05-18 22:24:46 -05:00
bookmark URL with comma-separated tags
2016-05-20 13:26:01 -05:00
-u, --update [N] update fields of bookmark at DB index N
2016-04-26 16:13:27 -05:00
refresh all titles, if no arguments
2016-05-18 22:24:46 -05:00
refresh title of bookmark at N, if only
N is specified without any edit options
2016-05-18 22:24:46 -05:00
-d, --delete [N] delete bookmark at DB index N
delete all bookmarks, if no arguments
-h, --help show this information''')
general_group.add_argument('-a', '--add', nargs='+', dest='addurl', metavar=('URL', 'tags'), help=argparse.SUPPRESS)
general_group.add_argument('-u', '--update', nargs='*', dest='update', action=CustomUpdateAction, metavar=('N', 'URL tags'), help=argparse.SUPPRESS)
general_group.add_argument('-d', '--delete', nargs='?', dest='delete', type=int, const=0, metavar='N', help=argparse.SUPPRESS)
general_group.add_argument('-h', '--help', dest='help', action='store_true', help=argparse.SUPPRESS)
# Edit options
edit_group=argparser.add_argument_group(title="edit options",
description='''--url keyword specify url, works with -u only
2016-05-21 02:13:34 -05:00
--tag [...] set comma-separated tags, works with -a, -u
2016-05-18 22:24:46 -05:00
clears tag, if no arguments
2016-05-20 13:26:01 -05:00
-t, --title [...] manually set title, works with -a, -u
if no arguments:
-a: do not set title, -u: clear title
2016-05-17 15:11:31 -05:00
-c, --comment [...] description of the bookmark, works with
-a, -u; clears comment, if no arguments''')
edit_group.add_argument('--url', nargs=1, dest='url', metavar='url', help=argparse.SUPPRESS)
edit_group.add_argument('--tag', nargs='*', dest='tag', action=CustomTagAction, metavar='tag', help=argparse.SUPPRESS)
edit_group.add_argument('-t', '--title', nargs='*', dest='title', action=CustomTitleAction, metavar='title', help=argparse.SUPPRESS)
edit_group.add_argument('-c', '--comment', nargs='*', dest='desc', type=str, action=CustomDescAction, metavar='desc', help=argparse.SUPPRESS)
2016-04-26 16:13:27 -05:00
# Search options
search_group=argparser.add_argument_group(title="search options",
description='''-s, --sany keyword [...]
search bookmarks for ANY matching keyword
-S, --sall keyword [...]
search bookmarks with ALL keywords
2016-05-18 12:23:08 -05:00
special keyword -
2016-05-18 10:46:08 -05:00
"blank": list entries with empty title/tag
2016-05-18 12:23:08 -05:00
--st, --stag [...] search bookmarks by tag
list all tags alphabetically, if no arguments''')
2016-04-26 16:13:27 -05:00
search_group.add_argument('-s', '--sany', nargs='+', metavar='keyword', help=argparse.SUPPRESS)
search_group.add_argument('-S', '--sall', nargs='+', metavar='keyword', help=argparse.SUPPRESS)
search_group.add_argument('--st', '--stag', nargs='*', dest='stag', action=CustomTagSearchAction, metavar='keyword', help=argparse.SUPPRESS)
2016-04-26 16:13:27 -05:00
# Encryption options
crypto_group=argparser.add_argument_group(title="encryption options",
description='''-l, --lock [N] encrypt DB file with N (> 0, default 8)
hash iterations to generate key
-k, --unlock [N] decrypt DB file with N (> 0, default 8)
hash iterations to generate key''')
crypto_group.add_argument('-k', '--unlock', nargs='?', dest='decrypt', type=int, const=8, metavar='N', help=argparse.SUPPRESS)
crypto_group.add_argument('-l', '--lock', nargs='?', dest='encrypt', type=int, const=8, metavar='N', help=argparse.SUPPRESS)
# Power toys
power_group=argparser.add_argument_group(title="power toys",
description='''-p, --print [N] show details of bookmark at DB index N
show all bookmarks, if no arguments
-f, --format N modify -p output
N=1: show only URL, N=2: show URL and tag
2016-04-29 12:29:06 -05:00
-r, --replace oldtag [newtag ...]
replace oldtag with newtag in all bookmarks
2016-04-29 12:29:06 -05:00
delete oldtag, if no newtag
2016-05-18 14:15:57 -05:00
-j, --json Json formatted output, for -p, -s, -S, --st
2016-05-15 01:03:02 -05:00
-o, --open N open bookmark at DB index N in web browser
2016-04-26 16:13:27 -05:00
-z, --debug show debug information and additional logs''')
power_group.add_argument('-p', '--print', nargs='?', dest='printindex', type=int, const=0, metavar='N', help=argparse.SUPPRESS)
power_group.add_argument('-f', '--format', dest='showOpt', type=int, choices=[1, 2], metavar='N', help=argparse.SUPPRESS)
2016-04-29 12:29:06 -05:00
power_group.add_argument('-r', '--replace', nargs='+', dest='replace', metavar=('oldtag', 'newtag'), help=argparse.SUPPRESS)
2016-04-26 16:13:27 -05:00
power_group.add_argument('-j', '--json', dest='jsonOutput', action='store_true', help=argparse.SUPPRESS)
power_group.add_argument('-o', '--open', dest='openurl', type=int, metavar='N', help=argparse.SUPPRESS)
power_group.add_argument('-z', '--debug', dest='debug', action='store_true', help=argparse.SUPPRESS)
2016-05-02 12:42:29 -05:00
# Show help and exit if no arguments
if len(sys.argv) < 2:
2016-04-24 14:19:32 -05:00
argparser.print_help(sys.stderr)
sys.exit(1)
2016-04-26 12:23:48 -05:00
# Parse the arguments
2016-04-24 14:19:32 -05:00
args = argparser.parse_args()
2016-04-26 14:22:59 -05:00
# Show help and exit if help requested
if args.help == True:
argparser.print_help(sys.stderr)
sys.exit(0)
2016-04-24 17:05:46 -05:00
# Assign the values to globals
if args.showOpt is not None:
showOpt = args.showOpt
2016-05-18 22:24:46 -05:00
if tagManual is not None and len(args.tag) > 0:
tagManual = args.tag
if titleManual is not None and len(args.title) > 0:
titleManual = " ".join(args.title)
2016-05-17 15:11:31 -05:00
if description is not None and len(args.desc) > 0:
description = " ".join(args.desc)
2016-05-14 10:17:38 -05:00
if args.jsonOutput:
import json
jsonOutput = args.jsonOutput
2016-04-24 14:19:32 -05:00
debug = args.debug
2016-04-24 14:19:32 -05:00
# Show version in debug logs
if debug:
2016-05-22 12:50:19 -05:00
print("Version %s" % _VERSION_)
# Move pre-1.9 database to new location
BukuDb.move_legacy_dbfile()
# Handle encrypt/decrypt options at top priority
if args.encrypt is not None:
2016-04-24 15:33:59 -05:00
if no_crypto:
printmsg("PyCrypto missing", "ERROR")
sys.exit(1)
if args.encrypt < 1:
2016-04-24 17:05:46 -05:00
printmsg("Iterations must be >= 1", "ERROR")
sys.exit(1)
encrypt_file(args.encrypt)
if args.decrypt is not None:
2016-04-24 15:33:59 -05:00
if no_crypto:
printmsg("PyCrypto missing", "ERROR")
sys.exit(1)
if args.decrypt < 1:
2016-04-24 17:05:46 -05:00
printmsg("Decryption failed", "ERROR");
sys.exit(1)
decrypt_file(args.decrypt)
2016-04-24 16:18:56 -05:00
# Initialize the database and get handles
bdb = BukuDb()
2016-04-24 15:33:59 -05:00
# Add a record
if args.addurl is not None:
# Parse tags into a comma-separated string
tags = ','
keywords = args.addurl
if tagManual is not None and not (tagManual[0] == ',' and len(tagManual) == 1):
keywords = args.addurl + [','] + tagManual
if len(keywords) > 1:
tags = parse_tags(keywords[1:])
bdb.add_bookmark(args.addurl[0], tags, titleManual, description)
2016-05-22 14:33:24 -05:00
# Update record
if update == True:
if len(args.update) == 0:
bdb.refreshdb(0)
elif not args.update[0].isdigit():
printmsg("Index must be a number >= 0", "ERROR")
bdb.close_quit(1)
elif int(args.update[0]) == 0:
bdb.refreshdb(0)
else:
if args.url is not None:
new_url = args.url[0]
else:
new_url = ''
# Parse tags into a comma-separated string
tags = ','
if tagManual is not None and not (tagManual[0] == ',' and len(tagManual) == 1):
tags = parse_tags(tagManual)
bdb.update_bookmark(int(args.update[0]), new_url, tags, titleManual, description)
2016-05-02 12:42:29 -05:00
# Delete record(s)
2016-04-24 15:33:59 -05:00
if args.delete is not None:
2016-04-24 17:05:46 -05:00
if args.delete < 0:
printmsg("Index must be >= 0", "ERROR")
bdb.close_quit(1)
bdb.cleardb(args.delete)
2016-04-24 15:33:59 -05:00
2016-04-25 13:43:28 -05:00
# Search URLs, titles, tags for any keyword
2016-05-17 10:57:12 -05:00
if args.sany is not None:
bdb.searchdb(args.sany)
2016-04-25 13:43:28 -05:00
2016-05-17 10:57:12 -05:00
# Search URLs, titles, tags with all keywords
2016-04-24 15:33:59 -05:00
if args.sall is not None:
2016-05-18 12:23:08 -05:00
if args.sall[0] == 'blank' and len(args.sall) == 1:
bdb.printdb(0, True)
else:
bdb.searchdb(args.sall, True)
2016-05-18 10:46:08 -05:00
# Search bookmarks by tag
2016-05-18 12:23:08 -05:00
if tagsearch == True:
if len(args.stag) > 0:
tag = ',' + " ".join(args.stag) + ','
bdb.search_by_tag(tag)
2016-05-18 12:23:08 -05:00
else:
bdb.list_tags()
2016-05-18 10:46:08 -05:00
# Print all records
2016-04-24 17:05:46 -05:00
if args.printindex is not None:
if args.printindex < 0:
printmsg("Index must be >= 0", "ERROR")
bdb.close_quit(1)
bdb.printdb(args.printindex)
2016-04-24 17:52:15 -05:00
# Replace a tag in DB
if args.replace is not None:
2016-04-29 12:29:06 -05:00
if len(args.replace) == 1:
bdb.replace_tag(args.replace[0])
2016-04-24 17:52:15 -05:00
else:
bdb.replace_tag(args.replace[0], args.replace[1:])
# Open URL in browser
2016-04-25 13:43:28 -05:00
if args.openurl is not None:
if args.openurl < 1:
printmsg("Index must be >= 1", "ERROR")
bdb.close_quit(1)
bdb.browse_by_index(args.openurl)
2016-05-22 14:20:50 -05:00
# Close DB connection and quit
bdb.close_quit(0)